jolt 0.9.342__py3-none-any.whl → 0.9.429__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of jolt might be problematic. Click here for more details.
- jolt/__init__.py +47 -0
- jolt/cache.py +358 -159
- jolt/cli.py +71 -104
- jolt/config.py +14 -26
- jolt/filesystem.py +2 -2
- jolt/graph.py +56 -28
- jolt/influence.py +67 -2
- jolt/loader.py +150 -186
- jolt/log.py +12 -2
- jolt/manifest.py +0 -46
- jolt/options.py +35 -12
- jolt/pkgs/abseil.py +42 -0
- jolt/pkgs/asio.py +25 -0
- jolt/pkgs/autoconf.py +41 -0
- jolt/pkgs/automake.py +41 -0
- jolt/pkgs/b2.py +31 -0
- jolt/pkgs/boost.py +111 -0
- jolt/pkgs/boringssl.py +32 -0
- jolt/pkgs/busybox.py +39 -0
- jolt/pkgs/bzip2.py +43 -0
- jolt/pkgs/cares.py +29 -0
- jolt/pkgs/catch2.py +36 -0
- jolt/pkgs/cbindgen.py +17 -0
- jolt/pkgs/cista.py +19 -0
- jolt/pkgs/clang.py +44 -0
- jolt/pkgs/cli11.py +23 -0
- jolt/pkgs/cmake.py +48 -0
- jolt/pkgs/cpython.py +196 -0
- jolt/pkgs/crun.py +29 -0
- jolt/pkgs/curl.py +38 -0
- jolt/pkgs/dbus.py +18 -0
- jolt/pkgs/double_conversion.py +24 -0
- jolt/pkgs/fastfloat.py +21 -0
- jolt/pkgs/ffmpeg.py +28 -0
- jolt/pkgs/flatbuffers.py +29 -0
- jolt/pkgs/fmt.py +27 -0
- jolt/pkgs/fstree.py +20 -0
- jolt/pkgs/gflags.py +18 -0
- jolt/pkgs/glib.py +18 -0
- jolt/pkgs/glog.py +25 -0
- jolt/pkgs/glslang.py +21 -0
- jolt/pkgs/golang.py +16 -11
- jolt/pkgs/googlebenchmark.py +18 -0
- jolt/pkgs/googletest.py +46 -0
- jolt/pkgs/gperf.py +15 -0
- jolt/pkgs/grpc.py +73 -0
- jolt/pkgs/hdf5.py +19 -0
- jolt/pkgs/help2man.py +14 -0
- jolt/pkgs/inja.py +28 -0
- jolt/pkgs/jsoncpp.py +31 -0
- jolt/pkgs/libarchive.py +43 -0
- jolt/pkgs/libcap.py +44 -0
- jolt/pkgs/libdrm.py +44 -0
- jolt/pkgs/libedit.py +42 -0
- jolt/pkgs/libevent.py +31 -0
- jolt/pkgs/libexpat.py +27 -0
- jolt/pkgs/libfastjson.py +21 -0
- jolt/pkgs/libffi.py +16 -0
- jolt/pkgs/libglvnd.py +30 -0
- jolt/pkgs/libogg.py +28 -0
- jolt/pkgs/libpciaccess.py +18 -0
- jolt/pkgs/libseccomp.py +21 -0
- jolt/pkgs/libtirpc.py +24 -0
- jolt/pkgs/libtool.py +42 -0
- jolt/pkgs/libunwind.py +35 -0
- jolt/pkgs/libva.py +18 -0
- jolt/pkgs/libvorbis.py +33 -0
- jolt/pkgs/libxml2.py +35 -0
- jolt/pkgs/libxslt.py +17 -0
- jolt/pkgs/libyajl.py +16 -0
- jolt/pkgs/llvm.py +81 -0
- jolt/pkgs/lua.py +54 -0
- jolt/pkgs/lz4.py +26 -0
- jolt/pkgs/m4.py +14 -0
- jolt/pkgs/make.py +17 -0
- jolt/pkgs/mesa.py +81 -0
- jolt/pkgs/meson.py +17 -0
- jolt/pkgs/mstch.py +28 -0
- jolt/pkgs/mysql.py +60 -0
- jolt/pkgs/nasm.py +49 -0
- jolt/pkgs/ncurses.py +30 -0
- jolt/pkgs/ng_log.py +25 -0
- jolt/pkgs/ninja.py +45 -0
- jolt/pkgs/nlohmann_json.py +25 -0
- jolt/pkgs/nodejs.py +19 -11
- jolt/pkgs/opencv.py +24 -0
- jolt/pkgs/openjdk.py +26 -0
- jolt/pkgs/openssl.py +103 -0
- jolt/pkgs/paho.py +76 -0
- jolt/pkgs/patchelf.py +16 -0
- jolt/pkgs/perl.py +42 -0
- jolt/pkgs/pkgconfig.py +64 -0
- jolt/pkgs/poco.py +39 -0
- jolt/pkgs/protobuf.py +77 -0
- jolt/pkgs/pugixml.py +27 -0
- jolt/pkgs/python.py +19 -0
- jolt/pkgs/qt.py +35 -0
- jolt/pkgs/rapidjson.py +26 -0
- jolt/pkgs/rapidyaml.py +28 -0
- jolt/pkgs/re2.py +30 -0
- jolt/pkgs/re2c.py +17 -0
- jolt/pkgs/readline.py +15 -0
- jolt/pkgs/rust.py +41 -0
- jolt/pkgs/sdl.py +28 -0
- jolt/pkgs/simdjson.py +27 -0
- jolt/pkgs/soci.py +46 -0
- jolt/pkgs/spdlog.py +29 -0
- jolt/pkgs/spirv_llvm.py +21 -0
- jolt/pkgs/spirv_tools.py +24 -0
- jolt/pkgs/sqlite.py +83 -0
- jolt/pkgs/ssl.py +12 -0
- jolt/pkgs/texinfo.py +15 -0
- jolt/pkgs/tomlplusplus.py +22 -0
- jolt/pkgs/wayland.py +26 -0
- jolt/pkgs/x11.py +58 -0
- jolt/pkgs/xerces_c.py +20 -0
- jolt/pkgs/xorg.py +360 -0
- jolt/pkgs/xz.py +29 -0
- jolt/pkgs/yamlcpp.py +30 -0
- jolt/pkgs/zeromq.py +47 -0
- jolt/pkgs/zlib.py +69 -0
- jolt/pkgs/zstd.py +33 -0
- jolt/plugins/autotools.py +66 -0
- jolt/plugins/cmake.py +74 -6
- jolt/plugins/conan.py +238 -0
- jolt/plugins/cxxinfo.py +7 -0
- jolt/plugins/docker.py +3 -3
- jolt/plugins/environ.py +11 -0
- jolt/plugins/fetch.py +141 -0
- jolt/plugins/gdb.py +10 -6
- jolt/plugins/git.py +60 -11
- jolt/plugins/libtool.py +63 -0
- jolt/plugins/linux.py +990 -0
- jolt/plugins/meson.py +61 -0
- jolt/plugins/ninja-compdb.py +11 -7
- jolt/plugins/ninja.py +245 -26
- jolt/plugins/paths.py +11 -1
- jolt/plugins/pkgconfig.py +219 -0
- jolt/plugins/podman.py +15 -41
- jolt/plugins/python.py +137 -0
- jolt/plugins/rust.py +25 -0
- jolt/plugins/scheduler.py +18 -14
- jolt/plugins/selfdeploy/setup.py +2 -1
- jolt/plugins/selfdeploy.py +21 -30
- jolt/plugins/strings.py +19 -10
- jolt/scheduler.py +428 -138
- jolt/tasks.py +159 -7
- jolt/tools.py +105 -51
- jolt/utils.py +16 -1
- jolt/version.py +1 -1
- {jolt-0.9.342.dist-info → jolt-0.9.429.dist-info}/METADATA +64 -9
- jolt-0.9.429.dist-info/RECORD +207 -0
- {jolt-0.9.342.dist-info → jolt-0.9.429.dist-info}/WHEEL +1 -1
- jolt/plugins/debian.py +0 -338
- jolt/plugins/repo.py +0 -253
- jolt-0.9.342.dist-info/RECORD +0 -93
- {jolt-0.9.342.dist-info → jolt-0.9.429.dist-info}/entry_points.txt +0 -0
- {jolt-0.9.342.dist-info → jolt-0.9.429.dist-info}/top_level.txt +0 -0
jolt/cli.py
CHANGED
|
@@ -18,13 +18,12 @@ from jolt import log
|
|
|
18
18
|
from jolt import __version__
|
|
19
19
|
from jolt.log import logfile
|
|
20
20
|
from jolt import config
|
|
21
|
-
from jolt.loader import JoltLoader
|
|
21
|
+
from jolt.loader import JoltLoader, import_workspace
|
|
22
22
|
from jolt import tools
|
|
23
23
|
from jolt import utils
|
|
24
24
|
from jolt.influence import HashInfluenceRegistry
|
|
25
25
|
from jolt.options import JoltOptions
|
|
26
26
|
from jolt import hooks
|
|
27
|
-
from jolt.manifest import JoltManifest
|
|
28
27
|
from jolt.error import JoltError
|
|
29
28
|
from jolt.error import raise_error
|
|
30
29
|
from jolt.error import raise_error_if
|
|
@@ -92,6 +91,7 @@ class PluginGroup(click.Group):
|
|
|
92
91
|
help="Add salt as task influence.")
|
|
93
92
|
@click.option("-g", "--debug", is_flag=True, default=False, hidden=True,
|
|
94
93
|
help="Start debug shell before executing task.")
|
|
94
|
+
@click.option("-m", "--mute", is_flag=True, help="Display task log only if it fails.")
|
|
95
95
|
@click.option("-n", "--network", is_flag=True, default=False, hidden=True,
|
|
96
96
|
help="Build on network.")
|
|
97
97
|
@click.option("-l", "--local", is_flag=True, default=False, hidden=True,
|
|
@@ -103,7 +103,7 @@ class PluginGroup(click.Group):
|
|
|
103
103
|
@click.option("-h", "--help", is_flag=True, help="Show this message and exit.")
|
|
104
104
|
@click.pass_context
|
|
105
105
|
def cli(ctx, verbose, config_file, debugger, profile,
|
|
106
|
-
force, salt, debug, network, local, keep_going, jobs, help, machine_interface, chdir):
|
|
106
|
+
force, salt, debug, mute, network, local, keep_going, jobs, help, machine_interface, chdir):
|
|
107
107
|
"""
|
|
108
108
|
A task execution tool.
|
|
109
109
|
|
|
@@ -156,23 +156,9 @@ def cli(ctx, verbose, config_file, debugger, profile,
|
|
|
156
156
|
print(ctx.get_help())
|
|
157
157
|
sys.exit(0)
|
|
158
158
|
|
|
159
|
-
|
|
160
|
-
utils.call_and_catch(manifest.parse)
|
|
161
|
-
manifest.process_import()
|
|
162
|
-
ctx.obj["manifest"] = manifest
|
|
163
|
-
|
|
164
|
-
if manifest.version:
|
|
165
|
-
from jolt.version_utils import requirement, version
|
|
166
|
-
req = requirement(manifest.version)
|
|
167
|
-
ver = version(__version__)
|
|
168
|
-
raise_error_if(not req.satisfied(ver),
|
|
169
|
-
"This project requires Jolt version {} (running {})",
|
|
170
|
-
req, __version__)
|
|
171
|
-
|
|
159
|
+
registry = TaskRegistry.get()
|
|
172
160
|
loader = JoltLoader.get()
|
|
173
|
-
|
|
174
|
-
for cls in tasks:
|
|
175
|
-
TaskRegistry.get().add_task_class(cls)
|
|
161
|
+
loader.load(registry)
|
|
176
162
|
|
|
177
163
|
if ctx.invoked_subcommand in ["build", "clean"] and loader.joltdir:
|
|
178
164
|
ctx.obj["workspace_lock"] = utils.LockFile(
|
|
@@ -185,8 +171,8 @@ def cli(ctx, verbose, config_file, debugger, profile,
|
|
|
185
171
|
if ctx.invoked_subcommand is None:
|
|
186
172
|
task = config.get("jolt", "default", "default")
|
|
187
173
|
taskname, _ = utils.parse_task_name(task)
|
|
188
|
-
if
|
|
189
|
-
ctx.invoke(build, task=[task], force=force, salt=salt, debug=debug,
|
|
174
|
+
if registry.get_task_class(taskname) is not None:
|
|
175
|
+
ctx.invoke(build, task=[task], force=force, salt=salt, debug=debug, mute=mute,
|
|
190
176
|
network=network, local=local, keep_going=keep_going, jobs=jobs)
|
|
191
177
|
else:
|
|
192
178
|
print(cli.get_help(ctx))
|
|
@@ -194,11 +180,8 @@ def cli(ctx, verbose, config_file, debugger, profile,
|
|
|
194
180
|
|
|
195
181
|
|
|
196
182
|
def _autocomplete_tasks(ctx, args, incomplete):
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
manifest.process_import()
|
|
200
|
-
|
|
201
|
-
tasks = JoltLoader.get().load()
|
|
183
|
+
loader = JoltLoader.get()
|
|
184
|
+
tasks = loader.load()
|
|
202
185
|
tasks = [task.name for task in tasks if task.name.startswith(incomplete or '')]
|
|
203
186
|
return sorted(tasks)
|
|
204
187
|
|
|
@@ -234,12 +217,13 @@ def _autocomplete_tasks(ctx, args, incomplete):
|
|
|
234
217
|
help="Don't prune cached artifacts from the build graph. This option can be used to populate the local cache with remotely cached dependency artifacts.")
|
|
235
218
|
@click.option("--worker", is_flag=True, default=False,
|
|
236
219
|
help="Run with the worker build strategy", hidden=True)
|
|
220
|
+
@click.option("--environ", type=click.Path(), help="Import build environment from protobuf", hidden=True)
|
|
237
221
|
@click.pass_context
|
|
238
222
|
@hooks.cli_build
|
|
239
223
|
def build(ctx, task, network, keep_going, default, local,
|
|
240
224
|
no_download, no_download_persistent, no_upload, download, upload, worker, force,
|
|
241
225
|
salt, copy, debug, result, jobs, no_prune, verbose,
|
|
242
|
-
mute):
|
|
226
|
+
mute, environ):
|
|
243
227
|
"""
|
|
244
228
|
Build task artifact.
|
|
245
229
|
|
|
@@ -271,6 +255,7 @@ def build(ctx, task, network, keep_going, default, local,
|
|
|
271
255
|
are removed before execution starts.
|
|
272
256
|
|
|
273
257
|
"""
|
|
258
|
+
|
|
274
259
|
raise_error_if(network and local,
|
|
275
260
|
"The -n and -l flags are mutually exclusive")
|
|
276
261
|
|
|
@@ -322,6 +307,33 @@ def build(ctx, task, network, keep_going, default, local,
|
|
|
322
307
|
if keep_going:
|
|
323
308
|
config.set_keep_going(True)
|
|
324
309
|
|
|
310
|
+
# Import build environment from protobuf if provided
|
|
311
|
+
buildenv = None
|
|
312
|
+
if environ:
|
|
313
|
+
with open(environ, "rb") as f:
|
|
314
|
+
from jolt import common_pb2 as common_pb
|
|
315
|
+
buildenv = common_pb.BuildEnvironment()
|
|
316
|
+
try:
|
|
317
|
+
buildenv.ParseFromString(f.read())
|
|
318
|
+
except Exception as e:
|
|
319
|
+
raise_error("Failed to parse build environment protobuf: {}", e)
|
|
320
|
+
|
|
321
|
+
# Import log level
|
|
322
|
+
log.set_level_pb(buildenv.loglevel)
|
|
323
|
+
|
|
324
|
+
# Import workspace
|
|
325
|
+
import_workspace(buildenv)
|
|
326
|
+
|
|
327
|
+
# Import configuration snippet
|
|
328
|
+
config.import_config(buildenv.config)
|
|
329
|
+
|
|
330
|
+
# Import configuration parameters (-c params.key)
|
|
331
|
+
config.import_params({param.key: param.value for param in buildenv.parameters})
|
|
332
|
+
|
|
333
|
+
# Import default parameters (-d taskname:param=value)
|
|
334
|
+
default = utils.as_list(default)
|
|
335
|
+
default += buildenv.task_default_parameters
|
|
336
|
+
|
|
325
337
|
options = JoltOptions(
|
|
326
338
|
network=network,
|
|
327
339
|
local=local,
|
|
@@ -356,26 +368,20 @@ def build(ctx, task, network, keep_going, default, local,
|
|
|
356
368
|
for params in default:
|
|
357
369
|
registry.set_default_parameters(params)
|
|
358
370
|
|
|
359
|
-
manifest = ctx.obj["manifest"]
|
|
360
|
-
|
|
361
|
-
for mb in manifest.builds:
|
|
362
|
-
for mt in mb.tasks:
|
|
363
|
-
task.append(mt.name)
|
|
364
|
-
for mt in mb.defaults:
|
|
365
|
-
registry.set_default_parameters(mt.name)
|
|
366
|
-
|
|
367
|
-
if force:
|
|
368
|
-
for goal in task:
|
|
369
|
-
registry.get_task(goal, manifest=manifest).taint = uuid.uuid4()
|
|
370
|
-
|
|
371
371
|
log.info("Started: {}", datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
|
|
372
372
|
|
|
373
|
-
gb = graph.GraphBuilder(registry, acache,
|
|
373
|
+
gb = graph.GraphBuilder(registry, acache, options, progress=True, buildenv=buildenv)
|
|
374
374
|
dag = gb.build(task)
|
|
375
375
|
|
|
376
|
+
# If asked to force rebuild, taint all goal tasks
|
|
377
|
+
if force:
|
|
378
|
+
for goal in dag.goals:
|
|
379
|
+
goal.get_extended_task().taint()
|
|
380
|
+
|
|
376
381
|
# Collect information about artifact presence before starting prune or build
|
|
377
382
|
acache.precheck(dag.persistent_artifacts, remote=not local)
|
|
378
383
|
|
|
384
|
+
# Prune the graph to remove tasks that are already available locally or remotely
|
|
379
385
|
if not no_prune:
|
|
380
386
|
gp = graph.GraphPruner(acache, strategy)
|
|
381
387
|
dag = gp.prune(dag)
|
|
@@ -384,7 +390,7 @@ def build(ctx, task, network, keep_going, default, local,
|
|
|
384
390
|
goal_task_duration = 0
|
|
385
391
|
|
|
386
392
|
session = executors.create_session(dag) if options.network else {}
|
|
387
|
-
queue = scheduler.TaskQueue(
|
|
393
|
+
queue = scheduler.TaskQueue()
|
|
388
394
|
|
|
389
395
|
try:
|
|
390
396
|
if not dag.has_tasks():
|
|
@@ -398,16 +404,20 @@ def build(ctx, task, network, keep_going, default, local,
|
|
|
398
404
|
debug=debug)
|
|
399
405
|
|
|
400
406
|
with progress:
|
|
407
|
+
in_progress = set()
|
|
408
|
+
|
|
401
409
|
while dag.has_tasks() or not queue.empty():
|
|
402
410
|
# Find all tasks ready to be executed
|
|
403
|
-
leafs = dag.select(lambda graph, task: task.is_ready())
|
|
411
|
+
leafs = dag.select(lambda graph, task: task.is_ready() and task not in in_progress)
|
|
404
412
|
|
|
405
413
|
# Order the tasks by their weights to improve build times
|
|
406
414
|
leafs.sort(key=lambda x: x.weight)
|
|
407
415
|
|
|
408
416
|
while leafs:
|
|
409
417
|
task = leafs.pop()
|
|
410
|
-
|
|
418
|
+
executor = strategy.create_executor(session, task)
|
|
419
|
+
queue.submit(executor)
|
|
420
|
+
in_progress.add(task)
|
|
411
421
|
|
|
412
422
|
task, error = queue.wait()
|
|
413
423
|
|
|
@@ -430,6 +440,7 @@ def build(ctx, task, network, keep_going, default, local,
|
|
|
430
440
|
|
|
431
441
|
if not keep_going and error is not None:
|
|
432
442
|
queue.abort()
|
|
443
|
+
executors.shutdown()
|
|
433
444
|
task.raise_for_status()
|
|
434
445
|
raise error
|
|
435
446
|
|
|
@@ -448,6 +459,7 @@ def build(ctx, task, network, keep_going, default, local,
|
|
|
448
459
|
log.warning("Interrupted by user")
|
|
449
460
|
try:
|
|
450
461
|
queue.abort()
|
|
462
|
+
executors.shutdown()
|
|
451
463
|
sys.exit(1)
|
|
452
464
|
except KeyboardInterrupt:
|
|
453
465
|
print()
|
|
@@ -502,7 +514,7 @@ def clean(ctx, task, deps, expired):
|
|
|
502
514
|
if task:
|
|
503
515
|
task = [utils.stable_task_name(t) for t in task]
|
|
504
516
|
registry = TaskRegistry.get()
|
|
505
|
-
dag = graph.GraphBuilder(registry, acache
|
|
517
|
+
dag = graph.GraphBuilder(registry, acache).build(task)
|
|
506
518
|
if deps:
|
|
507
519
|
tasks = dag.tasks
|
|
508
520
|
else:
|
|
@@ -649,7 +661,7 @@ def display(ctx, task, reverse=None, show_cache=False, prune=False):
|
|
|
649
661
|
registry = TaskRegistry.get()
|
|
650
662
|
options = JoltOptions()
|
|
651
663
|
acache = cache.ArtifactCache.get(options)
|
|
652
|
-
gb = graph.GraphBuilder(registry, acache
|
|
664
|
+
gb = graph.GraphBuilder(registry, acache)
|
|
653
665
|
dag = gb.build(task, influence=show_cache)
|
|
654
666
|
|
|
655
667
|
if reverse:
|
|
@@ -746,15 +758,14 @@ def download(ctx, task, deps, copy, copy_all):
|
|
|
746
758
|
if copy_all:
|
|
747
759
|
deps = True
|
|
748
760
|
|
|
749
|
-
manifest = ctx.obj["manifest"]
|
|
750
761
|
options = JoltOptions()
|
|
751
762
|
acache = cache.ArtifactCache.get(options)
|
|
752
763
|
hooks.TaskHookRegistry.get(options)
|
|
753
764
|
executors = scheduler.ExecutorRegistry.get(options)
|
|
754
765
|
registry = TaskRegistry.get()
|
|
755
766
|
strategy = scheduler.DownloadStrategy(executors, acache)
|
|
756
|
-
queue = scheduler.TaskQueue(
|
|
757
|
-
gb = graph.GraphBuilder(registry, acache,
|
|
767
|
+
queue = scheduler.TaskQueue()
|
|
768
|
+
gb = graph.GraphBuilder(registry, acache, options, progress=True)
|
|
758
769
|
dag = gb.build(task)
|
|
759
770
|
|
|
760
771
|
if not deps:
|
|
@@ -767,12 +778,16 @@ def download(ctx, task, deps, copy, copy_all):
|
|
|
767
778
|
|
|
768
779
|
try:
|
|
769
780
|
with log.progress("Progress", dag.number_of_tasks(), " tasks", estimates=False, debug=False) as p:
|
|
781
|
+
in_progress = set()
|
|
782
|
+
|
|
770
783
|
while dag.has_tasks() or not queue.empty():
|
|
771
|
-
leafs = dag.select(lambda graph, task: task.is_ready())
|
|
784
|
+
leafs = dag.select(lambda graph, task: task.is_ready() and task not in in_progress)
|
|
772
785
|
|
|
773
786
|
while leafs:
|
|
774
787
|
task = leafs.pop()
|
|
775
|
-
|
|
788
|
+
executor = strategy.create_executor({}, task)
|
|
789
|
+
queue.submit(executor)
|
|
790
|
+
in_progress.add(task)
|
|
776
791
|
|
|
777
792
|
task, error = queue.wait()
|
|
778
793
|
p.update(1)
|
|
@@ -795,6 +810,7 @@ def download(ctx, task, deps, copy, copy_all):
|
|
|
795
810
|
log.warning("Interrupted by user")
|
|
796
811
|
try:
|
|
797
812
|
queue.abort()
|
|
813
|
+
executors.shutdown()
|
|
798
814
|
sys.exit(1)
|
|
799
815
|
except KeyboardInterrupt:
|
|
800
816
|
print()
|
|
@@ -809,54 +825,6 @@ def download(ctx, task, deps, copy, copy_all):
|
|
|
809
825
|
queue.shutdown()
|
|
810
826
|
|
|
811
827
|
|
|
812
|
-
@cli.command(hidden=True)
|
|
813
|
-
@click.argument("task", type=str, nargs=-1, required=True)
|
|
814
|
-
@click.option("-r", "--remove", is_flag=True, help="Remove tasks from existing manifest.")
|
|
815
|
-
@click.option("-d", "--default", type=str, multiple=True, help="Override default parameter values.")
|
|
816
|
-
@click.option("-o", "--output", type=str, default="default.joltxmanifest", help="Manifest filename.")
|
|
817
|
-
@click.pass_context
|
|
818
|
-
def freeze(ctx, task, default, output, remove):
|
|
819
|
-
"""
|
|
820
|
-
Freeze the identity of a task.
|
|
821
|
-
|
|
822
|
-
<WIP>
|
|
823
|
-
"""
|
|
824
|
-
manifest = ctx.obj["manifest"]
|
|
825
|
-
|
|
826
|
-
options = JoltOptions(default=default)
|
|
827
|
-
acache = cache.ArtifactCache.get(options)
|
|
828
|
-
scheduler.ExecutorRegistry.get(options)
|
|
829
|
-
registry = TaskRegistry.get()
|
|
830
|
-
|
|
831
|
-
for params in default:
|
|
832
|
-
registry.set_default_parameters(params)
|
|
833
|
-
|
|
834
|
-
gb = graph.GraphBuilder(registry, acache, manifest)
|
|
835
|
-
dag = gb.build(task)
|
|
836
|
-
|
|
837
|
-
available, missing = acache.availability(dag.persistent_artifacts)
|
|
838
|
-
|
|
839
|
-
for artifact in missing:
|
|
840
|
-
raise_task_error_if(
|
|
841
|
-
not remove, artifact.get_task(),
|
|
842
|
-
"Task artifact is not available in any cache, build it first")
|
|
843
|
-
|
|
844
|
-
for task in dag.tasks:
|
|
845
|
-
if task.is_resource() or not task.is_cacheable():
|
|
846
|
-
continue
|
|
847
|
-
manifest_task = manifest.find_task(task)
|
|
848
|
-
if remove and manifest_task:
|
|
849
|
-
manifest.remove_task(manifest_task)
|
|
850
|
-
continue
|
|
851
|
-
if not remove:
|
|
852
|
-
if not manifest_task:
|
|
853
|
-
manifest_task = manifest.create_task()
|
|
854
|
-
manifest_task.name = task.qualified_name
|
|
855
|
-
manifest_task.identity = task.identity
|
|
856
|
-
|
|
857
|
-
manifest.write(fs.path.join(JoltLoader.get().joltdir, output))
|
|
858
|
-
|
|
859
|
-
|
|
860
828
|
@cli.command(name="list")
|
|
861
829
|
@click.argument("task", type=str, nargs=-1, required=False, shell_complete=_autocomplete_tasks)
|
|
862
830
|
@click.option("-a", "--all", is_flag=True, help="List all direct and indirect dependencies of TASK.")
|
|
@@ -892,7 +860,7 @@ def _list(ctx, task=None, all=False, reverse=None):
|
|
|
892
860
|
reverse = [utils.stable_task_name(t) for t in utils.as_list(reverse or [])]
|
|
893
861
|
|
|
894
862
|
try:
|
|
895
|
-
dag = graph.GraphBuilder(registry, acache
|
|
863
|
+
dag = graph.GraphBuilder(registry, acache).build(task, influence=False)
|
|
896
864
|
except JoltError as e:
|
|
897
865
|
raise e
|
|
898
866
|
except Exception:
|
|
@@ -992,9 +960,8 @@ def inspect(ctx, task, influence=False, artifact=False, salt=None):
|
|
|
992
960
|
|
|
993
961
|
print()
|
|
994
962
|
print(" Requirements")
|
|
995
|
-
manifest = ctx.obj["manifest"]
|
|
996
963
|
try:
|
|
997
|
-
task = task_registry.get_task(task_name
|
|
964
|
+
task = task_registry.get_task(task_name)
|
|
998
965
|
for req in sorted(utils.as_list(utils.call_or_return(task, task.requires))):
|
|
999
966
|
print(" {0}".format(task.tools.expand(req)))
|
|
1000
967
|
if not task.requires:
|
|
@@ -1016,7 +983,7 @@ def inspect(ctx, task, influence=False, artifact=False, salt=None):
|
|
|
1016
983
|
if artifact:
|
|
1017
984
|
options = JoltOptions(salt=salt)
|
|
1018
985
|
acache = cache.ArtifactCache.get()
|
|
1019
|
-
builder = graph.GraphBuilder(task_registry, acache,
|
|
986
|
+
builder = graph.GraphBuilder(task_registry, acache, options)
|
|
1020
987
|
dag = builder.build([task.qualified_name])
|
|
1021
988
|
tasks = dag.select(lambda graph, node: node.task is task)
|
|
1022
989
|
assert len(tasks) == 1, "graph produced multiple tasks, one expected"
|
|
@@ -1080,7 +1047,7 @@ def _export(ctx, task):
|
|
|
1080
1047
|
executors = scheduler.ExecutorRegistry.get()
|
|
1081
1048
|
strategy = scheduler.LocalStrategy(executors, acache)
|
|
1082
1049
|
|
|
1083
|
-
dag = graph.GraphBuilder(registry, acache
|
|
1050
|
+
dag = graph.GraphBuilder(registry, acache)
|
|
1084
1051
|
dag = dag.build(task)
|
|
1085
1052
|
|
|
1086
1053
|
gp = graph.GraphPruner(acache, strategy)
|
|
@@ -1110,7 +1077,7 @@ def _export(ctx, task):
|
|
|
1110
1077
|
for task in context.tasks:
|
|
1111
1078
|
for artifact in task.artifacts:
|
|
1112
1079
|
raise_task_error_if(
|
|
1113
|
-
artifact.is_temporary(), task,
|
|
1080
|
+
not task.is_resource() and artifact.is_temporary(), task,
|
|
1114
1081
|
"Task artifact not found in local cache, build it first")
|
|
1115
1082
|
|
|
1116
1083
|
visitor = Export()
|
jolt/config.py
CHANGED
|
@@ -7,7 +7,6 @@ from jolt import common_pb2 as common_pb
|
|
|
7
7
|
from jolt import filesystem as fs
|
|
8
8
|
from jolt import utils
|
|
9
9
|
from jolt.error import raise_error_if
|
|
10
|
-
from jolt.manifest import ManifestExtension, ManifestExtensionRegistry
|
|
11
10
|
|
|
12
11
|
|
|
13
12
|
_workdir = os.getcwd()
|
|
@@ -318,39 +317,28 @@ def split(string):
|
|
|
318
317
|
return section, key
|
|
319
318
|
|
|
320
319
|
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
p = manifest.create_parameter()
|
|
327
|
-
p.key = "config." + key
|
|
328
|
-
p.value = value
|
|
329
|
-
|
|
330
|
-
def import_manifest(self, manifest):
|
|
331
|
-
if manifest.config:
|
|
332
|
-
_manifest.read_string(manifest.config)
|
|
333
|
-
from jolt.loader import JoltLoader
|
|
334
|
-
JoltLoader.get().load_plugins()
|
|
335
|
-
|
|
336
|
-
for param in manifest.parameters:
|
|
337
|
-
if param.key.startswith("config."):
|
|
338
|
-
set("params", param.key.split(".", 1)[1], param.value)
|
|
339
|
-
|
|
340
|
-
def import_protobuf(self, pb):
|
|
341
|
-
self.import_manifest(pb)
|
|
320
|
+
def import_config(snippet: str):
|
|
321
|
+
""" Apply extra configuration for the worker, provided by the client. """
|
|
322
|
+
_manifest.read_string(snippet)
|
|
323
|
+
from jolt.loader import JoltLoader
|
|
324
|
+
JoltLoader.get().load_plugins()
|
|
342
325
|
|
|
343
326
|
|
|
344
327
|
def export_config():
|
|
328
|
+
""" Get extra configuration for the worker. """
|
|
345
329
|
return get("network", "config", "", expand=False)
|
|
346
330
|
|
|
347
331
|
|
|
332
|
+
def import_params(params: dict):
|
|
333
|
+
""" Apply user-defined parameters (-c params.key=value). """
|
|
334
|
+
for key, value in params.items():
|
|
335
|
+
if key.startswith("config."):
|
|
336
|
+
set("params", key.split(".", 1)[1], value)
|
|
337
|
+
|
|
338
|
+
|
|
348
339
|
def export_params():
|
|
340
|
+
""" Get user-defined parameters (-c params.key=value). """
|
|
349
341
|
parameters = []
|
|
350
342
|
for key, value in options("params"):
|
|
351
343
|
parameters.append(common_pb.Property(key="config." + key, value=value))
|
|
352
344
|
return parameters
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
# High priority so that plugins are loaded before resources are acquired.
|
|
356
|
-
ManifestExtensionRegistry.add(ConfigExtension(), -10)
|
jolt/filesystem.py
CHANGED
|
@@ -88,10 +88,10 @@ def move(src, dst):
|
|
|
88
88
|
|
|
89
89
|
def onerror_warning(func, path, exc_info):
|
|
90
90
|
from jolt import log
|
|
91
|
-
if
|
|
91
|
+
if hasattr(exc_info[1], "strerror"):
|
|
92
92
|
msg = exc_info[1].strerror
|
|
93
93
|
else:
|
|
94
|
-
msg = "Reason unknown"
|
|
94
|
+
msg = "Reason unknown: " + str(exc_info[1])
|
|
95
95
|
if os.path.exists(path):
|
|
96
96
|
log.warning("Could not remove file or directory: {} ({})", path, msg)
|
|
97
97
|
|
jolt/graph.py
CHANGED
|
@@ -216,7 +216,7 @@ class TaskProxy(object):
|
|
|
216
216
|
for c in self.children:
|
|
217
217
|
if c.is_resource() or c.is_alias():
|
|
218
218
|
continue
|
|
219
|
-
if not c.is_available_locally():
|
|
219
|
+
if not c.is_available_locally(persistent_only=True):
|
|
220
220
|
return False
|
|
221
221
|
return True
|
|
222
222
|
|
|
@@ -335,16 +335,32 @@ class TaskProxy(object):
|
|
|
335
335
|
self._download = False
|
|
336
336
|
|
|
337
337
|
def download(self, force=False, session_only=False, persistent_only=False):
|
|
338
|
+
"""
|
|
339
|
+
Downloads all artifacts of this task.
|
|
340
|
+
|
|
341
|
+
If the task is not downloadable, the method returns True. Failure to
|
|
342
|
+
download persistent artifacts is considered a failure, and the method
|
|
343
|
+
returns False. Session artifacts are not required to be downloaded.
|
|
344
|
+
|
|
345
|
+
:param force: Force download even if the artifacts are already available.
|
|
346
|
+
:param session_only: Download only session artifacts.
|
|
347
|
+
:param persistent_only: Download only persistent artifacts.
|
|
348
|
+
|
|
349
|
+
"""
|
|
338
350
|
if not force and not self.is_downloadable():
|
|
339
351
|
return True
|
|
352
|
+
success = True
|
|
340
353
|
artifacts = self._artifacts
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
354
|
+
artifacts_session = list(filter(lambda a: a.is_session(), artifacts))
|
|
355
|
+
artifacts_persistent = list(filter(lambda a: not a.is_session(), artifacts))
|
|
356
|
+
download_all = not session_only and not persistent_only
|
|
357
|
+
if session_only or download_all:
|
|
358
|
+
for artifact in artifacts_session:
|
|
359
|
+
if not self.cache.download(artifact, force=force):
|
|
360
|
+
self.warning("Failed to download session artifact: {}", artifact.identity)
|
|
361
|
+
if persistent_only or download_all:
|
|
362
|
+
success = all([self.cache.download(artifact, force=force) for artifact in artifacts_persistent])
|
|
363
|
+
return success
|
|
348
364
|
|
|
349
365
|
def upload(self, force=False, locked=False, session_only=False, persistent_only=False, artifacts=None):
|
|
350
366
|
artifacts = artifacts or self._artifacts
|
|
@@ -408,9 +424,8 @@ class TaskProxy(object):
|
|
|
408
424
|
self._owner = owner
|
|
409
425
|
self.task.exported_name = f"{self.short_qualified_name}@@{owner.short_qualified_name}"
|
|
410
426
|
|
|
411
|
-
def finalize(self, dag
|
|
427
|
+
def finalize(self, dag):
|
|
412
428
|
log.debug("Finalizing: " + self.short_qualified_name)
|
|
413
|
-
self.manifest = manifest
|
|
414
429
|
|
|
415
430
|
# Find all direct and transitive dependencies
|
|
416
431
|
self.ancestors = set()
|
|
@@ -426,9 +441,10 @@ class TaskProxy(object):
|
|
|
426
441
|
self.children.extend(n.children)
|
|
427
442
|
n.ancestors.add(self)
|
|
428
443
|
|
|
429
|
-
# Exclude transitive alias and resources dependencies
|
|
444
|
+
# Exclude transitive alias and resources dependencies.
|
|
445
|
+
# Workspace resources are included as they may be required by its dependencies.
|
|
430
446
|
self.children = list(
|
|
431
|
-
filter(lambda n: not n.is_alias() and
|
|
447
|
+
filter(lambda n: dag.are_neighbors(self, n) or (not n.is_alias() and not n.is_resource()),
|
|
432
448
|
utils.unique_list(self.children)))
|
|
433
449
|
|
|
434
450
|
# Prepare workspace resources for this task so that influence can be calculated
|
|
@@ -451,11 +467,23 @@ class TaskProxy(object):
|
|
|
451
467
|
self._artifacts.extend(self.task._artifacts(self.cache, self))
|
|
452
468
|
|
|
453
469
|
def taint(self, salt=None):
|
|
454
|
-
self.task.taint =
|
|
470
|
+
self.task.taint = salt or uuid.uuid4()
|
|
455
471
|
if salt is None:
|
|
456
472
|
# Only recalculate identity when build is forced, not when salted
|
|
457
473
|
self.identity = None
|
|
458
474
|
self.identity
|
|
475
|
+
# Recreate artifacts
|
|
476
|
+
self._artifacts = []
|
|
477
|
+
self.finalize_artifacts()
|
|
478
|
+
|
|
479
|
+
# If this is an alias, taint all children
|
|
480
|
+
if self.is_alias():
|
|
481
|
+
for child in self.children:
|
|
482
|
+
child.taint()
|
|
483
|
+
|
|
484
|
+
# Taint all extensions
|
|
485
|
+
for extension in self.extensions:
|
|
486
|
+
extension.taint()
|
|
459
487
|
|
|
460
488
|
def queued(self, remote=True):
|
|
461
489
|
self.task.verbose("Task queued " + self.log_name)
|
|
@@ -716,8 +744,7 @@ class TaskProxy(object):
|
|
|
716
744
|
log_prefix = False
|
|
717
745
|
|
|
718
746
|
# Collect list of resource dependencies
|
|
719
|
-
resource_deps = [child for child in self.children if child.is_resource()
|
|
720
|
-
resource_deps_ws = [child for child in self.children if child.is_workspace_resource()]
|
|
747
|
+
resource_deps = [child for child in self.children if child.is_resource()]
|
|
721
748
|
|
|
722
749
|
if self.options.worker:
|
|
723
750
|
# Exclude local resources when running as worker. They are already acquired by the client.
|
|
@@ -732,8 +759,11 @@ class TaskProxy(object):
|
|
|
732
759
|
acquired = []
|
|
733
760
|
try:
|
|
734
761
|
# Acquire resource dependencies in reverse order.
|
|
735
|
-
for resource in reversed(resource_deps
|
|
736
|
-
|
|
762
|
+
for resource in reversed(resource_deps):
|
|
763
|
+
# Always discard resource artifacts before acquiring the resource.
|
|
764
|
+
# They should not exist in the cache when the resource is acquired,
|
|
765
|
+
# but may exist if the resource was previously acquired by an interrupted build.
|
|
766
|
+
with resource.lock_artifacts(discard=True) if not resource.is_workspace_resource() else nullcontext():
|
|
737
767
|
resource.deps = self.cache.get_context(resource)
|
|
738
768
|
exitstack.enter_context(resource.deps)
|
|
739
769
|
|
|
@@ -763,7 +793,6 @@ class TaskProxy(object):
|
|
|
763
793
|
self._run_task(env, force_upload, force_build)
|
|
764
794
|
|
|
765
795
|
def _run_task(self, env, force_upload=False, force_build=False):
|
|
766
|
-
cache = env.cache
|
|
767
796
|
queue = env.queue
|
|
768
797
|
|
|
769
798
|
with self.tools:
|
|
@@ -777,7 +806,7 @@ class TaskProxy(object):
|
|
|
777
806
|
self.skipped()
|
|
778
807
|
return
|
|
779
808
|
|
|
780
|
-
available_remotely = cache.download_enabled() and self.is_available_remotely()
|
|
809
|
+
available_remotely = self.cache.download_enabled() and self.is_available_remotely()
|
|
781
810
|
if not available_locally and available_remotely:
|
|
782
811
|
available_locally = self.download()
|
|
783
812
|
|
|
@@ -796,7 +825,7 @@ class TaskProxy(object):
|
|
|
796
825
|
upload_session_artifacts = False
|
|
797
826
|
|
|
798
827
|
try:
|
|
799
|
-
context = cache.get_context(self)
|
|
828
|
+
context = self.cache.get_context(self)
|
|
800
829
|
exitstack.enter_context(context)
|
|
801
830
|
|
|
802
831
|
self.running_execution()
|
|
@@ -865,7 +894,7 @@ class TaskProxy(object):
|
|
|
865
894
|
if force_upload or force_build or not available_remotely:
|
|
866
895
|
raise_task_error_if(
|
|
867
896
|
not self.upload(force=force_upload, locked=False, persistent_only=True) \
|
|
868
|
-
and cache.upload_enabled(),
|
|
897
|
+
and self.cache.upload_enabled(),
|
|
869
898
|
self, "Failed to upload task artifact")
|
|
870
899
|
|
|
871
900
|
finally:
|
|
@@ -873,14 +902,14 @@ class TaskProxy(object):
|
|
|
873
902
|
raise_task_error_if(
|
|
874
903
|
upload_session_artifacts \
|
|
875
904
|
and not self.upload(force=force_upload, locked=False, session_only=True, artifacts=upload_session_artifacts) \
|
|
876
|
-
and cache.upload_enabled(),
|
|
905
|
+
and self.cache.upload_enabled(),
|
|
877
906
|
self, "Failed to upload session artifact")
|
|
878
907
|
|
|
879
908
|
elif force_upload or not available_remotely:
|
|
880
909
|
self.started_upload()
|
|
881
910
|
raise_task_error_if(
|
|
882
911
|
not self.upload(force=force_upload, persistent_only=True) \
|
|
883
|
-
and cache.upload_enabled(),
|
|
912
|
+
and self.cache.upload_enabled(),
|
|
884
913
|
self, "Failed to upload task artifact")
|
|
885
914
|
self.finished_upload()
|
|
886
915
|
|
|
@@ -1075,12 +1104,11 @@ class Graph(object):
|
|
|
1075
1104
|
|
|
1076
1105
|
|
|
1077
1106
|
class GraphBuilder(object):
|
|
1078
|
-
def __init__(self, registry, cache,
|
|
1107
|
+
def __init__(self, registry, cache, options=None, progress=False, buildenv=None):
|
|
1079
1108
|
self.cache = cache
|
|
1080
1109
|
self.graph = Graph()
|
|
1081
1110
|
self.nodes = {}
|
|
1082
1111
|
self.registry = registry
|
|
1083
|
-
self.manifest = manifest
|
|
1084
1112
|
self.buildenv = buildenv
|
|
1085
1113
|
self.progress = progress
|
|
1086
1114
|
self.options = options or JoltOptions()
|
|
@@ -1089,7 +1117,7 @@ class GraphBuilder(object):
|
|
|
1089
1117
|
name = utils.stable_task_name(name)
|
|
1090
1118
|
node = self.nodes.get(name)
|
|
1091
1119
|
if not node:
|
|
1092
|
-
task = self.registry.get_task(name,
|
|
1120
|
+
task = self.registry.get_task(name, buildenv=self.buildenv)
|
|
1093
1121
|
node = self.nodes.get(task.qualified_name, None)
|
|
1094
1122
|
if node is not None:
|
|
1095
1123
|
return node
|
|
@@ -1121,7 +1149,7 @@ class GraphBuilder(object):
|
|
|
1121
1149
|
parent = node
|
|
1122
1150
|
|
|
1123
1151
|
for requirement in node.task.requires:
|
|
1124
|
-
alias,
|
|
1152
|
+
alias, _, task, name = utils.parse_aliased_task_name(requirement)
|
|
1125
1153
|
child = self._get_node(progress, utils.format_task_name(task, name), parent=node)
|
|
1126
1154
|
|
|
1127
1155
|
# Create direct edges from alias parents to alias children
|
|
@@ -1156,7 +1184,7 @@ class GraphBuilder(object):
|
|
|
1156
1184
|
topological_nodes = self.graph.topological_nodes
|
|
1157
1185
|
with self._progress("Collecting task influence", len(self.graph.tasks), "tasks") as p:
|
|
1158
1186
|
for node in reversed(topological_nodes):
|
|
1159
|
-
node.finalize(self.graph
|
|
1187
|
+
node.finalize(self.graph)
|
|
1160
1188
|
p.update(1)
|
|
1161
1189
|
|
|
1162
1190
|
# Create artifacts in forward order so that parent identities are available
|