jolt 0.9.172__py3-none-any.whl → 0.9.435__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- jolt/__init__.py +80 -7
- jolt/__main__.py +9 -1
- jolt/bin/fstree-darwin-x86_64 +0 -0
- jolt/bin/fstree-linux-x86_64 +0 -0
- jolt/cache.py +596 -252
- jolt/chroot.py +36 -11
- jolt/cli.py +143 -130
- jolt/common_pb2.py +45 -45
- jolt/config.py +76 -40
- jolt/error.py +19 -4
- jolt/filesystem.py +2 -6
- jolt/graph.py +400 -82
- jolt/influence.py +110 -3
- jolt/loader.py +338 -174
- jolt/log.py +127 -31
- jolt/manifest.py +13 -46
- jolt/options.py +35 -11
- jolt/pkgs/abseil.py +42 -0
- jolt/pkgs/asio.py +25 -0
- jolt/pkgs/autoconf.py +41 -0
- jolt/pkgs/automake.py +41 -0
- jolt/pkgs/b2.py +31 -0
- jolt/pkgs/boost.py +111 -0
- jolt/pkgs/boringssl.py +32 -0
- jolt/pkgs/busybox.py +39 -0
- jolt/pkgs/bzip2.py +43 -0
- jolt/pkgs/cares.py +29 -0
- jolt/pkgs/catch2.py +36 -0
- jolt/pkgs/cbindgen.py +17 -0
- jolt/pkgs/cista.py +19 -0
- jolt/pkgs/clang.py +44 -0
- jolt/pkgs/cli11.py +24 -0
- jolt/pkgs/cmake.py +48 -0
- jolt/pkgs/cpython.py +196 -0
- jolt/pkgs/crun.py +29 -0
- jolt/pkgs/curl.py +38 -0
- jolt/pkgs/dbus.py +18 -0
- jolt/pkgs/double_conversion.py +24 -0
- jolt/pkgs/fastfloat.py +21 -0
- jolt/pkgs/ffmpeg.py +28 -0
- jolt/pkgs/flatbuffers.py +29 -0
- jolt/pkgs/fmt.py +27 -0
- jolt/pkgs/fstree.py +20 -0
- jolt/pkgs/gflags.py +18 -0
- jolt/pkgs/glib.py +18 -0
- jolt/pkgs/glog.py +25 -0
- jolt/pkgs/glslang.py +21 -0
- jolt/pkgs/golang.py +16 -11
- jolt/pkgs/googlebenchmark.py +18 -0
- jolt/pkgs/googletest.py +46 -0
- jolt/pkgs/gperf.py +15 -0
- jolt/pkgs/grpc.py +73 -0
- jolt/pkgs/hdf5.py +19 -0
- jolt/pkgs/help2man.py +14 -0
- jolt/pkgs/inja.py +28 -0
- jolt/pkgs/jsoncpp.py +31 -0
- jolt/pkgs/libarchive.py +43 -0
- jolt/pkgs/libcap.py +44 -0
- jolt/pkgs/libdrm.py +44 -0
- jolt/pkgs/libedit.py +42 -0
- jolt/pkgs/libevent.py +31 -0
- jolt/pkgs/libexpat.py +27 -0
- jolt/pkgs/libfastjson.py +21 -0
- jolt/pkgs/libffi.py +16 -0
- jolt/pkgs/libglvnd.py +30 -0
- jolt/pkgs/libogg.py +28 -0
- jolt/pkgs/libpciaccess.py +18 -0
- jolt/pkgs/libseccomp.py +21 -0
- jolt/pkgs/libtirpc.py +24 -0
- jolt/pkgs/libtool.py +42 -0
- jolt/pkgs/libunwind.py +35 -0
- jolt/pkgs/libva.py +18 -0
- jolt/pkgs/libvorbis.py +33 -0
- jolt/pkgs/libxml2.py +35 -0
- jolt/pkgs/libxslt.py +17 -0
- jolt/pkgs/libyajl.py +16 -0
- jolt/pkgs/llvm.py +81 -0
- jolt/pkgs/lua.py +54 -0
- jolt/pkgs/lz4.py +26 -0
- jolt/pkgs/m4.py +14 -0
- jolt/pkgs/make.py +17 -0
- jolt/pkgs/mesa.py +81 -0
- jolt/pkgs/meson.py +17 -0
- jolt/pkgs/mstch.py +28 -0
- jolt/pkgs/mysql.py +60 -0
- jolt/pkgs/nasm.py +49 -0
- jolt/pkgs/ncurses.py +30 -0
- jolt/pkgs/ng_log.py +25 -0
- jolt/pkgs/ninja.py +45 -0
- jolt/pkgs/nlohmann_json.py +25 -0
- jolt/pkgs/nodejs.py +19 -11
- jolt/pkgs/opencv.py +24 -0
- jolt/pkgs/openjdk.py +26 -0
- jolt/pkgs/openssl.py +103 -0
- jolt/pkgs/paho.py +76 -0
- jolt/pkgs/patchelf.py +16 -0
- jolt/pkgs/perl.py +42 -0
- jolt/pkgs/pkgconfig.py +64 -0
- jolt/pkgs/poco.py +39 -0
- jolt/pkgs/protobuf.py +77 -0
- jolt/pkgs/pugixml.py +27 -0
- jolt/pkgs/python.py +19 -0
- jolt/pkgs/qt.py +35 -0
- jolt/pkgs/rapidjson.py +26 -0
- jolt/pkgs/rapidyaml.py +28 -0
- jolt/pkgs/re2.py +30 -0
- jolt/pkgs/re2c.py +17 -0
- jolt/pkgs/readline.py +15 -0
- jolt/pkgs/rust.py +41 -0
- jolt/pkgs/sdl.py +28 -0
- jolt/pkgs/simdjson.py +27 -0
- jolt/pkgs/soci.py +46 -0
- jolt/pkgs/spdlog.py +29 -0
- jolt/pkgs/spirv_llvm.py +21 -0
- jolt/pkgs/spirv_tools.py +24 -0
- jolt/pkgs/sqlite.py +83 -0
- jolt/pkgs/ssl.py +12 -0
- jolt/pkgs/texinfo.py +15 -0
- jolt/pkgs/tomlplusplus.py +22 -0
- jolt/pkgs/wayland.py +26 -0
- jolt/pkgs/x11.py +58 -0
- jolt/pkgs/xerces_c.py +20 -0
- jolt/pkgs/xorg.py +360 -0
- jolt/pkgs/xz.py +29 -0
- jolt/pkgs/yamlcpp.py +30 -0
- jolt/pkgs/zeromq.py +47 -0
- jolt/pkgs/zlib.py +87 -0
- jolt/pkgs/zstd.py +33 -0
- jolt/plugins/alias.py +3 -0
- jolt/plugins/allure.py +2 -2
- jolt/plugins/autotools.py +66 -0
- jolt/plugins/cache.py +1 -1
- jolt/plugins/cmake.py +74 -6
- jolt/plugins/conan.py +238 -0
- jolt/plugins/cxxinfo.py +7 -0
- jolt/plugins/docker.py +76 -19
- jolt/plugins/email.xslt +141 -118
- jolt/plugins/environ.py +11 -0
- jolt/plugins/fetch.py +141 -0
- jolt/plugins/gdb.py +33 -14
- jolt/plugins/gerrit.py +0 -13
- jolt/plugins/git.py +248 -66
- jolt/plugins/googletest.py +1 -1
- jolt/plugins/http.py +1 -1
- jolt/plugins/libtool.py +63 -0
- jolt/plugins/linux.py +990 -0
- jolt/plugins/logstash.py +4 -4
- jolt/plugins/meson.py +61 -0
- jolt/plugins/ninja-compdb.py +96 -28
- jolt/plugins/ninja.py +424 -150
- jolt/plugins/paths.py +11 -1
- jolt/plugins/pkgconfig.py +219 -0
- jolt/plugins/podman.py +131 -87
- jolt/plugins/python.py +137 -0
- jolt/plugins/remote_execution/administration_pb2.py +27 -19
- jolt/plugins/remote_execution/log_pb2.py +12 -12
- jolt/plugins/remote_execution/scheduler_pb2.py +23 -23
- jolt/plugins/remote_execution/worker_pb2.py +19 -19
- jolt/plugins/report.py +7 -2
- jolt/plugins/rust.py +25 -0
- jolt/plugins/scheduler.py +135 -86
- jolt/plugins/selfdeploy/setup.py +6 -6
- jolt/plugins/selfdeploy.py +49 -31
- jolt/plugins/strings.py +35 -22
- jolt/plugins/symlinks.py +11 -4
- jolt/plugins/telemetry.py +1 -2
- jolt/plugins/timeline.py +13 -3
- jolt/scheduler.py +467 -165
- jolt/tasks.py +427 -111
- jolt/templates/timeline.html.template +44 -47
- jolt/timer.py +22 -0
- jolt/tools.py +527 -188
- jolt/utils.py +183 -3
- jolt/version.py +1 -1
- jolt/xmldom.py +12 -2
- {jolt-0.9.172.dist-info → jolt-0.9.435.dist-info}/METADATA +97 -41
- jolt-0.9.435.dist-info/RECORD +207 -0
- {jolt-0.9.172.dist-info → jolt-0.9.435.dist-info}/WHEEL +1 -1
- jolt/plugins/amqp.py +0 -855
- jolt/plugins/debian.py +0 -338
- jolt/plugins/repo.py +0 -253
- jolt/plugins/snap.py +0 -122
- jolt-0.9.172.dist-info/RECORD +0 -92
- {jolt-0.9.172.dist-info → jolt-0.9.435.dist-info}/entry_points.txt +0 -0
- {jolt-0.9.172.dist-info → jolt-0.9.435.dist-info}/top_level.txt +0 -0
jolt/graph.py
CHANGED
|
@@ -1,12 +1,16 @@
|
|
|
1
|
-
from contextlib import contextmanager, ExitStack
|
|
1
|
+
from contextlib import contextmanager, ExitStack, nullcontext
|
|
2
2
|
import copy
|
|
3
3
|
import hashlib
|
|
4
4
|
from os import getenv
|
|
5
5
|
from threading import RLock
|
|
6
6
|
from collections import OrderedDict
|
|
7
7
|
import uuid
|
|
8
|
+
import socket
|
|
9
|
+
import sys
|
|
8
10
|
|
|
11
|
+
from jolt import cli
|
|
9
12
|
from jolt import common_pb2 as common_pb
|
|
13
|
+
from jolt import config
|
|
10
14
|
from jolt import log
|
|
11
15
|
from jolt import utils
|
|
12
16
|
from jolt import colors
|
|
@@ -16,7 +20,7 @@ from jolt.error import raise_error_if
|
|
|
16
20
|
from jolt.error import raise_task_error_if
|
|
17
21
|
from jolt.influence import HashInfluenceRegistry, TaskRequirementInfluence
|
|
18
22
|
from jolt.options import JoltOptions
|
|
19
|
-
from jolt.tasks import Alias, Resource, WorkspaceResource
|
|
23
|
+
from jolt.tasks import Alias, Resource, WorkspaceResource
|
|
20
24
|
|
|
21
25
|
|
|
22
26
|
class TaskProxy(object):
|
|
@@ -56,8 +60,13 @@ class TaskProxy(object):
|
|
|
56
60
|
self._network = False
|
|
57
61
|
self._artifacts = []
|
|
58
62
|
self._status = None
|
|
63
|
+
self._finalized = False
|
|
59
64
|
|
|
60
|
-
|
|
65
|
+
# Consumer task if this is a resource
|
|
66
|
+
self._owner = None
|
|
67
|
+
|
|
68
|
+
# List of all artifacts that are produced by this task
|
|
69
|
+
self._artifacts = []
|
|
61
70
|
|
|
62
71
|
def __hash__(self):
|
|
63
72
|
return id(self)
|
|
@@ -66,6 +75,15 @@ class TaskProxy(object):
|
|
|
66
75
|
def artifacts(self):
|
|
67
76
|
return self._artifacts
|
|
68
77
|
|
|
78
|
+
def add_artifact(self, artifact):
|
|
79
|
+
self._artifacts.append(artifact)
|
|
80
|
+
|
|
81
|
+
def get_artifact(self, name):
|
|
82
|
+
for artifact in self.artifacts:
|
|
83
|
+
if artifact.name == name:
|
|
84
|
+
return artifact
|
|
85
|
+
return None
|
|
86
|
+
|
|
69
87
|
@property
|
|
70
88
|
def tools(self):
|
|
71
89
|
return self.task.tools
|
|
@@ -86,12 +104,33 @@ class TaskProxy(object):
|
|
|
86
104
|
def short_qualified_name(self):
|
|
87
105
|
return self.task.short_qualified_name
|
|
88
106
|
|
|
107
|
+
@property
|
|
108
|
+
def exported_name(self):
|
|
109
|
+
return self.task.exported_name
|
|
110
|
+
|
|
89
111
|
@property
|
|
90
112
|
def log_name(self):
|
|
91
113
|
return "({0} {1})".format(self.short_qualified_name, self.identity[:8])
|
|
92
114
|
|
|
115
|
+
def log_running_time(self):
|
|
116
|
+
""" Emits an information log line every 5 mins a task has been running. """
|
|
117
|
+
if not self.is_running():
|
|
118
|
+
return
|
|
119
|
+
minutes = int(self.duration_running.seconds / 60)
|
|
120
|
+
if (minutes % 5) == 0 and minutes > 0:
|
|
121
|
+
if minutes >= 60:
|
|
122
|
+
fmt = f"{int(minutes / 60)}h {int(minutes % 60)}min"
|
|
123
|
+
else:
|
|
124
|
+
fmt = f"{int(minutes % 60)}min"
|
|
125
|
+
if self.is_remotely_executed():
|
|
126
|
+
self.info("Remote execution still in progress after {}", fmt)
|
|
127
|
+
else:
|
|
128
|
+
self.info("Execution still in progress after {}", fmt)
|
|
129
|
+
|
|
93
130
|
@property
|
|
94
131
|
def identity(self):
|
|
132
|
+
raise_task_error_if(not self._finalized, self, "Task identity read prematurely")
|
|
133
|
+
|
|
95
134
|
if self.task.identity is not None:
|
|
96
135
|
return self.task.identity
|
|
97
136
|
|
|
@@ -112,7 +151,11 @@ class TaskProxy(object):
|
|
|
112
151
|
|
|
113
152
|
@property
|
|
114
153
|
def instance(self):
|
|
115
|
-
return self.task.
|
|
154
|
+
return self.task.instance
|
|
155
|
+
|
|
156
|
+
@instance.setter
|
|
157
|
+
def instance(self, value):
|
|
158
|
+
self.task.instance = value
|
|
116
159
|
|
|
117
160
|
@property
|
|
118
161
|
def is_unstable(self):
|
|
@@ -132,6 +175,9 @@ class TaskProxy(object):
|
|
|
132
175
|
def info(self, fmt, *args, **kwargs):
|
|
133
176
|
self.task.info(fmt + " " + self.log_name, *args, **kwargs)
|
|
134
177
|
|
|
178
|
+
def debug(self, fmt, *args, **kwargs):
|
|
179
|
+
log.debug(fmt + " " + self.log_name, *args, **kwargs)
|
|
180
|
+
|
|
135
181
|
def verbose(self, fmt, *args, **kwargs):
|
|
136
182
|
log.verbose(fmt + " " + self.log_name, *args, **kwargs)
|
|
137
183
|
|
|
@@ -148,7 +194,7 @@ class TaskProxy(object):
|
|
|
148
194
|
return len(self.ancestors) > 0
|
|
149
195
|
|
|
150
196
|
def has_artifact(self):
|
|
151
|
-
return self.is_cacheable() and not self.
|
|
197
|
+
return self.is_cacheable() and not self.is_alias()
|
|
152
198
|
|
|
153
199
|
def has_extensions(self):
|
|
154
200
|
return len(self.extensions) > 0
|
|
@@ -170,19 +216,21 @@ class TaskProxy(object):
|
|
|
170
216
|
for c in self.children:
|
|
171
217
|
if c.is_resource() or c.is_alias():
|
|
172
218
|
continue
|
|
173
|
-
if not c.is_available_locally():
|
|
219
|
+
if not c.is_available_locally(persistent_only=True):
|
|
174
220
|
return False
|
|
175
221
|
return True
|
|
176
222
|
|
|
177
223
|
def is_alias(self):
|
|
178
224
|
return isinstance(self.task, Alias)
|
|
179
225
|
|
|
180
|
-
def is_available_locally(self, extensions=True,
|
|
226
|
+
def is_available_locally(self, extensions=True, persistent_only=True):
|
|
181
227
|
dep_artifacts = []
|
|
182
228
|
if extensions:
|
|
183
229
|
for dep in self.extensions:
|
|
184
230
|
dep_artifacts += dep.artifacts
|
|
185
|
-
artifacts =
|
|
231
|
+
artifacts = self._artifacts + dep_artifacts
|
|
232
|
+
if persistent_only:
|
|
233
|
+
artifacts = filter(lambda a: not a.is_session(), artifacts)
|
|
186
234
|
return all(map(self.cache.is_available_locally, artifacts))
|
|
187
235
|
|
|
188
236
|
def is_available_remotely(self, extensions=True, cache=True):
|
|
@@ -215,6 +263,12 @@ class TaskProxy(object):
|
|
|
215
263
|
def is_goal(self, with_extensions=True):
|
|
216
264
|
return self._goal or (with_extensions and any([e.is_goal() for e in self.extensions]))
|
|
217
265
|
|
|
266
|
+
def is_local(self):
|
|
267
|
+
if self.is_extension():
|
|
268
|
+
return self.get_extended_task().is_local()
|
|
269
|
+
tasks = [self.task] + [e.task for e in self.extensions]
|
|
270
|
+
return any([task.local for task in tasks])
|
|
271
|
+
|
|
218
272
|
def in_progress(self):
|
|
219
273
|
return self._in_progress
|
|
220
274
|
|
|
@@ -236,8 +290,18 @@ class TaskProxy(object):
|
|
|
236
290
|
def is_resource(self):
|
|
237
291
|
return isinstance(self.task, Resource)
|
|
238
292
|
|
|
293
|
+
def is_workspace_resource(self):
|
|
294
|
+
return isinstance(self.task, WorkspaceResource)
|
|
295
|
+
|
|
296
|
+
def is_running(self):
|
|
297
|
+
return self.status() == common_pb.TaskStatus.TASK_RUNNING
|
|
298
|
+
|
|
239
299
|
def is_unpackable(self):
|
|
240
|
-
|
|
300
|
+
tasks = [self] + self.extensions
|
|
301
|
+
artifacts = []
|
|
302
|
+
for task in tasks:
|
|
303
|
+
artifacts.extend(task._artifacts)
|
|
304
|
+
return any(map(lambda artifact: artifact.is_unpackable(), artifacts))
|
|
241
305
|
|
|
242
306
|
def is_unpacked(self):
|
|
243
307
|
tasks = [self] + self.extensions
|
|
@@ -254,17 +318,14 @@ class TaskProxy(object):
|
|
|
254
318
|
artifacts.extend(task._artifacts)
|
|
255
319
|
return all(map(lambda artifact: artifact.is_uploadable(), artifacts))
|
|
256
320
|
|
|
257
|
-
def is_workspace_resource(self):
|
|
258
|
-
return isinstance(self.task, WorkspaceResource)
|
|
259
|
-
|
|
260
321
|
@contextmanager
|
|
261
322
|
def lock_artifacts(self, discard=False):
|
|
262
323
|
artifacts = []
|
|
263
324
|
stack = ExitStack()
|
|
264
|
-
for artifact in self.artifacts:
|
|
265
|
-
lock = self.cache.lock_artifact(artifact, discard=discard)
|
|
266
|
-
artifacts.append(stack.enter_context(lock))
|
|
267
325
|
try:
|
|
326
|
+
for artifact in self.artifacts:
|
|
327
|
+
lock = self.cache.lock_artifact(artifact, discard=discard)
|
|
328
|
+
artifacts.append(stack.enter_context(lock))
|
|
268
329
|
self._artifacts = artifacts
|
|
269
330
|
yield artifacts
|
|
270
331
|
finally:
|
|
@@ -274,23 +335,39 @@ class TaskProxy(object):
|
|
|
274
335
|
self._download = False
|
|
275
336
|
|
|
276
337
|
def download(self, force=False, session_only=False, persistent_only=False):
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
artifacts = list(filter(lambda a: not a.is_session(), self._artifacts))
|
|
284
|
-
if not artifacts:
|
|
285
|
-
return True
|
|
286
|
-
return all([self.cache.download(artifact, force=force) for artifact in artifacts])
|
|
338
|
+
"""
|
|
339
|
+
Downloads all artifacts of this task.
|
|
340
|
+
|
|
341
|
+
If the task is not downloadable, the method returns True. Failure to
|
|
342
|
+
download persistent artifacts is considered a failure, and the method
|
|
343
|
+
returns False. Session artifacts are not required to be downloaded.
|
|
287
344
|
|
|
288
|
-
|
|
345
|
+
:param force: Force download even if the artifacts are already available.
|
|
346
|
+
:param session_only: Download only session artifacts.
|
|
347
|
+
:param persistent_only: Download only persistent artifacts.
|
|
348
|
+
|
|
349
|
+
"""
|
|
350
|
+
if not force and not self.is_downloadable():
|
|
351
|
+
return True
|
|
352
|
+
success = True
|
|
289
353
|
artifacts = self._artifacts
|
|
354
|
+
artifacts_session = list(filter(lambda a: a.is_session(), artifacts))
|
|
355
|
+
artifacts_persistent = list(filter(lambda a: not a.is_session(), artifacts))
|
|
356
|
+
download_all = not session_only and not persistent_only
|
|
357
|
+
if session_only or download_all:
|
|
358
|
+
for artifact in artifacts_session:
|
|
359
|
+
if not self.cache.download(artifact, force=force):
|
|
360
|
+
self.warning("Failed to download session artifact: {}", artifact.identity)
|
|
361
|
+
if persistent_only or download_all:
|
|
362
|
+
success = all([self.cache.download(artifact, force=force) for artifact in artifacts_persistent])
|
|
363
|
+
return success
|
|
364
|
+
|
|
365
|
+
def upload(self, force=False, locked=False, session_only=False, persistent_only=False, artifacts=None):
|
|
366
|
+
artifacts = artifacts or self._artifacts
|
|
290
367
|
if session_only:
|
|
291
|
-
artifacts = list(filter(lambda a: a.is_session(),
|
|
368
|
+
artifacts = list(filter(lambda a: a.is_session(), artifacts))
|
|
292
369
|
if persistent_only:
|
|
293
|
-
artifacts = list(filter(lambda a: not a.is_session(),
|
|
370
|
+
artifacts = list(filter(lambda a: not a.is_session(), artifacts))
|
|
294
371
|
if not artifacts:
|
|
295
372
|
return True
|
|
296
373
|
if not self.is_uploadable(artifacts):
|
|
@@ -300,6 +377,9 @@ class TaskProxy(object):
|
|
|
300
377
|
def resolve_requirement_alias(self, name):
|
|
301
378
|
return self.requirement_aliases.get(name)
|
|
302
379
|
|
|
380
|
+
def set_cancelled(self):
|
|
381
|
+
self.set_status(common_pb.TaskStatus.TASK_CANCELLED)
|
|
382
|
+
|
|
303
383
|
def set_passed(self):
|
|
304
384
|
self.set_status(common_pb.TaskStatus.TASK_PASSED)
|
|
305
385
|
|
|
@@ -340,9 +420,12 @@ class TaskProxy(object):
|
|
|
340
420
|
def set_goal(self):
|
|
341
421
|
self._goal = True
|
|
342
422
|
|
|
343
|
-
def
|
|
423
|
+
def set_owner(self, owner):
|
|
424
|
+
self._owner = owner
|
|
425
|
+
self.task.exported_name = f"{self.short_qualified_name}@@{owner.short_qualified_name}"
|
|
426
|
+
|
|
427
|
+
def finalize(self, dag):
|
|
344
428
|
log.debug("Finalizing: " + self.short_qualified_name)
|
|
345
|
-
self.manifest = manifest
|
|
346
429
|
|
|
347
430
|
# Find all direct and transitive dependencies
|
|
348
431
|
self.ancestors = set()
|
|
@@ -358,25 +441,49 @@ class TaskProxy(object):
|
|
|
358
441
|
self.children.extend(n.children)
|
|
359
442
|
n.ancestors.add(self)
|
|
360
443
|
|
|
361
|
-
# Exclude transitive alias and resources dependencies
|
|
444
|
+
# Exclude transitive alias and resources dependencies.
|
|
445
|
+
# Workspace resources are included as they may be required by its dependencies.
|
|
362
446
|
self.children = list(
|
|
363
|
-
filter(lambda n: not n.is_alias() and
|
|
447
|
+
filter(lambda n: dag.are_neighbors(self, n) or (not n.is_alias() and not n.is_resource()),
|
|
364
448
|
utils.unique_list(self.children)))
|
|
365
449
|
|
|
450
|
+
# Prepare workspace resources for this task so that influence can be calculated
|
|
451
|
+
for child in self.children:
|
|
452
|
+
if not child.is_workspace_resource():
|
|
453
|
+
continue
|
|
454
|
+
child.task.prepare_ws_for(self.task)
|
|
455
|
+
|
|
366
456
|
self.descendants = list(self.descendants)
|
|
367
457
|
|
|
368
458
|
self.task.influence += [TaskRequirementInfluence(n) for n in self.neighbors]
|
|
459
|
+
self._finalized = True
|
|
369
460
|
self.identity
|
|
370
|
-
|
|
461
|
+
|
|
462
|
+
hooks.task_created(self)
|
|
371
463
|
|
|
372
464
|
return self.identity
|
|
373
465
|
|
|
466
|
+
def finalize_artifacts(self):
|
|
467
|
+
self._artifacts.extend(self.task._artifacts(self.cache, self))
|
|
468
|
+
|
|
374
469
|
def taint(self, salt=None):
|
|
375
|
-
self.task.taint =
|
|
470
|
+
self.task.taint = salt or uuid.uuid4()
|
|
376
471
|
if salt is None:
|
|
377
472
|
# Only recalculate identity when build is forced, not when salted
|
|
378
473
|
self.identity = None
|
|
379
474
|
self.identity
|
|
475
|
+
# Recreate artifacts
|
|
476
|
+
self._artifacts = []
|
|
477
|
+
self.finalize_artifacts()
|
|
478
|
+
|
|
479
|
+
# If this is an alias, taint all children
|
|
480
|
+
if self.is_alias():
|
|
481
|
+
for child in self.children:
|
|
482
|
+
child.taint()
|
|
483
|
+
|
|
484
|
+
# Taint all extensions
|
|
485
|
+
for extension in self.extensions:
|
|
486
|
+
extension.taint()
|
|
380
487
|
|
|
381
488
|
def queued(self, remote=True):
|
|
382
489
|
self.task.verbose("Task queued " + self.log_name)
|
|
@@ -392,9 +499,15 @@ class TaskProxy(object):
|
|
|
392
499
|
self.duration_running = utils.duration() if not when else when
|
|
393
500
|
|
|
394
501
|
def running_execution(self, remote=False):
|
|
502
|
+
self.worker = socket.gethostname()
|
|
395
503
|
hooks.task_started_execution(self)
|
|
396
504
|
self.running(what="Remote execution" if remote else "Execution")
|
|
397
505
|
|
|
506
|
+
def restarted_execution(self, remote=False):
|
|
507
|
+
hooks.task_finished_execution(self)
|
|
508
|
+
self.task.warning("Remote execution interrupted {}" if remote else "Execution interrupted {}", self.log_name)
|
|
509
|
+
self.queued()
|
|
510
|
+
|
|
398
511
|
def started_execution(self, remote=False):
|
|
399
512
|
self.queued()
|
|
400
513
|
self.running_execution(remote=remote)
|
|
@@ -409,16 +522,24 @@ class TaskProxy(object):
|
|
|
409
522
|
self.running(what="Upload")
|
|
410
523
|
hooks.task_started_upload(self)
|
|
411
524
|
|
|
412
|
-
def _failed(self, what="Execution"):
|
|
413
|
-
|
|
525
|
+
def _failed(self, what="Execution", interrupt=False):
|
|
526
|
+
if interrupt:
|
|
527
|
+
how = "interrupted"
|
|
528
|
+
logfn = self.warning
|
|
529
|
+
self.set_cancelled()
|
|
530
|
+
else:
|
|
531
|
+
how = "failed"
|
|
532
|
+
logfn = self.error
|
|
533
|
+
self.set_failed()
|
|
534
|
+
|
|
414
535
|
if self.duration_queued and self.duration_running:
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
536
|
+
logfn("{0} {1} after {2} {3}", what, how,
|
|
537
|
+
self.duration_running,
|
|
538
|
+
self.duration_queued.diff(self.duration_running))
|
|
418
539
|
elif self.duration_queued:
|
|
419
|
-
|
|
540
|
+
logfn("{0} {1} after {2}", what, how, self.duration_queued or utils.duration())
|
|
420
541
|
else:
|
|
421
|
-
|
|
542
|
+
logfn("{0} {1} immediately", what, how)
|
|
422
543
|
|
|
423
544
|
if self.is_unstable:
|
|
424
545
|
try:
|
|
@@ -437,8 +558,8 @@ class TaskProxy(object):
|
|
|
437
558
|
def failed_upload(self):
|
|
438
559
|
self._failed("Upload")
|
|
439
560
|
|
|
440
|
-
def failed_execution(self, remote=False):
|
|
441
|
-
self._failed(what="Remote execution" if remote else "Execution")
|
|
561
|
+
def failed_execution(self, remote=False, interrupt=False):
|
|
562
|
+
self._failed(what="Remote execution" if remote else "Execution", interrupt=interrupt)
|
|
442
563
|
|
|
443
564
|
def _finished(self, what="Execution"):
|
|
444
565
|
raise_task_error_if(
|
|
@@ -450,7 +571,7 @@ class TaskProxy(object):
|
|
|
450
571
|
except KeyError:
|
|
451
572
|
self.warning("Pruned task was executed")
|
|
452
573
|
self.task.info(colors.green(what + " finished after {0} {1}" + self.log_name),
|
|
453
|
-
self.duration_running,
|
|
574
|
+
self.duration_running or "00s",
|
|
454
575
|
self.duration_queued.diff(self.duration_running))
|
|
455
576
|
hooks.task_finished(self)
|
|
456
577
|
|
|
@@ -497,61 +618,220 @@ class TaskProxy(object):
|
|
|
497
618
|
else:
|
|
498
619
|
log.debug(" Retained: {} ({})", self.short_qualified_name, artifact.identity)
|
|
499
620
|
|
|
500
|
-
def _run_download_dependencies(self,
|
|
621
|
+
def _run_download_dependencies(self, resource_only=False):
|
|
501
622
|
for child in self.children:
|
|
502
623
|
if not child.has_artifact():
|
|
503
624
|
continue
|
|
625
|
+
|
|
626
|
+
if child.is_resource() and child.is_local():
|
|
627
|
+
if child.options.worker:
|
|
628
|
+
# Resource already acquired by the client when running as worker
|
|
629
|
+
raise_task_error_if(
|
|
630
|
+
not child.download(force=True),
|
|
631
|
+
child, "Failed to download task artifact")
|
|
632
|
+
else:
|
|
633
|
+
# Resource about to be acquired by the client
|
|
634
|
+
child._run_download_dependencies()
|
|
635
|
+
continue
|
|
636
|
+
|
|
637
|
+
if resource_only and not child.is_resource():
|
|
638
|
+
continue
|
|
639
|
+
|
|
504
640
|
raise_task_error_if(
|
|
505
641
|
not child.is_completed() and child.is_unstable,
|
|
506
642
|
self, "Task depends on failed task '{}'", child.short_qualified_name)
|
|
507
643
|
if not child.is_available_locally(extensions=False):
|
|
508
644
|
raise_task_error_if(
|
|
509
|
-
not child.download(
|
|
645
|
+
not child.download(force=True),
|
|
510
646
|
child, "Failed to download task artifact")
|
|
511
647
|
|
|
512
|
-
def
|
|
513
|
-
|
|
648
|
+
def partially_available_locally(self):
|
|
649
|
+
availability = map(lambda a: self.cache.is_available_locally(a), self.artifacts)
|
|
650
|
+
return any(availability) and not all(availability)
|
|
514
651
|
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
652
|
+
def _validate_platform(self):
|
|
653
|
+
""" Validates that the task is runnable on the current platform. """
|
|
654
|
+
platform_os, platform_arch = utils.platform_os_arch()
|
|
518
655
|
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
656
|
+
os = self.task.platform.get("node.os")
|
|
657
|
+
if os:
|
|
658
|
+
os = self.tools.expand(os)
|
|
659
|
+
raise_task_error_if(
|
|
660
|
+
os != platform_os,
|
|
661
|
+
self, f"Task is not runnable on current platform (wants node.os={os})")
|
|
662
|
+
|
|
663
|
+
arch = self.task.platform.get("node.arch")
|
|
664
|
+
if arch:
|
|
665
|
+
arch = self.tools.expand(arch)
|
|
666
|
+
raise_task_error_if(
|
|
667
|
+
arch != platform_arch,
|
|
668
|
+
self, f"Task is not runnable on current platform (wants node.arch={arch})")
|
|
522
669
|
|
|
523
|
-
|
|
524
|
-
|
|
670
|
+
def run_acquire(self, artifact, owner, log_prefix=False):
|
|
671
|
+
"""
|
|
672
|
+
Acquires a resource and publishes its artifact.
|
|
525
673
|
|
|
526
|
-
|
|
527
|
-
|
|
674
|
+
The artifact is published to the cache even if the acquisition fails.
|
|
675
|
+
"""
|
|
676
|
+
|
|
677
|
+
try:
|
|
678
|
+
if not self.is_workspace_resource():
|
|
679
|
+
ts = utils.duration()
|
|
680
|
+
log.info(colors.blue("Resource acquisition started ({} for {})"), self.short_qualified_name, owner.short_qualified_name)
|
|
681
|
+
|
|
682
|
+
try:
|
|
683
|
+
with log.thread_prefix(owner.identity[:8]) if log_prefix else nullcontext():
|
|
684
|
+
acquire = getattr(self.task, "acquire_" + artifact.name) if artifact.name != "main" else self.task.acquire
|
|
685
|
+
acquire(artifact, self.deps, self.tools, owner.task)
|
|
686
|
+
finally:
|
|
687
|
+
# Always commit the resource session artifact to the cache, even if the acquisition failed.
|
|
688
|
+
if not self.is_workspace_resource():
|
|
689
|
+
self.cache.commit(artifact)
|
|
690
|
+
|
|
691
|
+
if not self.is_workspace_resource():
|
|
692
|
+
log.info(colors.green("Resource acquisition finished after {} ({} for {})"), ts, self.short_qualified_name, owner.short_qualified_name)
|
|
693
|
+
|
|
694
|
+
except (KeyboardInterrupt, Exception) as e:
|
|
695
|
+
if not self.is_workspace_resource():
|
|
696
|
+
log.error(colors.red("Resource acquisition failed after {} ({} for {})"), ts, self.short_qualified_name, owner.short_qualified_name)
|
|
697
|
+
if self.task.release_on_error:
|
|
698
|
+
with utils.ignore_exception():
|
|
699
|
+
self.run_release(artifact, owner)
|
|
700
|
+
raise e
|
|
701
|
+
|
|
702
|
+
def run_release(self, artifact, owner, log_prefix=False):
|
|
703
|
+
"""
|
|
704
|
+
Releases a resource.
|
|
705
|
+
"""
|
|
706
|
+
try:
|
|
707
|
+
if not self.is_workspace_resource():
|
|
708
|
+
ts = utils.duration()
|
|
709
|
+
log.info(colors.blue("Resource release started ({} for {})"), self.short_qualified_name, owner.short_qualified_name)
|
|
710
|
+
|
|
711
|
+
with log.thread_prefix(owner.identity[:8]) if log_prefix else nullcontext():
|
|
712
|
+
release = getattr(self.task, "release_" + artifact.name) if artifact.name != "main" else self.task.release
|
|
713
|
+
release(artifact, self.deps, self.tools, owner.task)
|
|
714
|
+
|
|
715
|
+
if not self.is_workspace_resource():
|
|
716
|
+
log.info(colors.green("Resource release finished after {} ({} for {})"), ts, self.short_qualified_name, owner.short_qualified_name)
|
|
717
|
+
|
|
718
|
+
except (KeyboardInterrupt, Exception) as e:
|
|
719
|
+
if not self.is_workspace_resource():
|
|
720
|
+
log.error(colors.red("Resource release failed after {} ({} for {})"), ts, self.short_qualified_name, owner.short_qualified_name)
|
|
721
|
+
raise e
|
|
722
|
+
|
|
723
|
+
@contextmanager
|
|
724
|
+
def run_resources(self):
|
|
725
|
+
"""
|
|
726
|
+
Acquires and releases resources for the task.
|
|
727
|
+
|
|
728
|
+
The method is called by executors before invoking the task proxy's run() method.
|
|
729
|
+
Resource dependencies are acquired and released in reverse order. If an acquisition fails,
|
|
730
|
+
already acquired resources are released in reverse order and the exception is propagated
|
|
731
|
+
to the caller.
|
|
732
|
+
|
|
733
|
+
Resource artifacts are always published and uploaded if the acquisition has been started,
|
|
734
|
+
even if the acquisition fails. That way, a failed acquisition can be debugged.
|
|
735
|
+
"""
|
|
736
|
+
self._run_download_dependencies(resource_only=True)
|
|
737
|
+
|
|
738
|
+
with self._run_resources_no_dep_download():
|
|
739
|
+
yield
|
|
740
|
+
|
|
741
|
+
@contextmanager
|
|
742
|
+
def _run_resources_no_dep_download(self):
|
|
743
|
+
# Log messages are prefixed with task identity if resources are acquired in parallel
|
|
744
|
+
log_prefix = False
|
|
745
|
+
|
|
746
|
+
# Collect list of resource dependencies
|
|
747
|
+
resource_deps = [child for child in self.children if child.is_resource()]
|
|
748
|
+
|
|
749
|
+
if self.options.worker:
|
|
750
|
+
# Exclude local resources when running as worker. They are already acquired by the client.
|
|
751
|
+
resource_deps = [child for child in resource_deps if not child.is_local()]
|
|
752
|
+
elif self.options.network and not self.is_local():
|
|
753
|
+
# Exclude non-local resources in the client when running a network build.
|
|
754
|
+
# They are acquired by the remote worker.
|
|
755
|
+
resource_deps = [child for child in resource_deps if child.is_local()]
|
|
756
|
+
log_prefix = True
|
|
757
|
+
|
|
758
|
+
exitstack = ExitStack()
|
|
759
|
+
acquired = []
|
|
760
|
+
try:
|
|
761
|
+
# Acquire resource dependencies in reverse order.
|
|
762
|
+
for resource in reversed(resource_deps):
|
|
763
|
+
# Always discard resource artifacts before acquiring the resource.
|
|
764
|
+
# They should not exist in the cache when the resource is acquired,
|
|
765
|
+
# but may exist if the resource was previously acquired by an interrupted build.
|
|
766
|
+
with resource.lock_artifacts(discard=True) if not resource.is_workspace_resource() else nullcontext():
|
|
767
|
+
resource.deps = self.cache.get_context(resource)
|
|
768
|
+
exitstack.enter_context(resource.deps)
|
|
769
|
+
|
|
770
|
+
# Just like tasks, a resource may have multiple artifacts. Run acquire for each artifact.
|
|
771
|
+
for artifact in resource.artifacts:
|
|
772
|
+
try:
|
|
773
|
+
resource.run_acquire(artifact, self, log_prefix=log_prefix)
|
|
774
|
+
acquired.append(resource)
|
|
775
|
+
finally:
|
|
776
|
+
# Always upload the artifact session artifact to the cache, even if the acquisition failed.
|
|
777
|
+
if not resource.is_workspace_resource():
|
|
778
|
+
resource.upload(locked=False, session_only=True, artifacts=[artifact])
|
|
779
|
+
|
|
780
|
+
yield
|
|
781
|
+
|
|
782
|
+
finally:
|
|
783
|
+
for resource in reversed(acquired):
|
|
784
|
+
for artifact in resource.artifacts:
|
|
785
|
+
resource.run_release(artifact, self, log_prefix=log_prefix)
|
|
786
|
+
exitstack.close()
|
|
787
|
+
|
|
788
|
+
def run(self, env, force_upload=False, force_build=False):
|
|
789
|
+
# Download dependency artifacts if not already done
|
|
790
|
+
self._run_download_dependencies()
|
|
791
|
+
|
|
792
|
+
with self._run_resources_no_dep_download():
|
|
793
|
+
self._run_task(env, force_upload, force_build)
|
|
794
|
+
|
|
795
|
+
def _run_task(self, env, force_upload=False, force_build=False):
|
|
796
|
+
queue = env.queue
|
|
797
|
+
|
|
798
|
+
with self.tools:
|
|
799
|
+
available_locally = available_remotely = False
|
|
528
800
|
|
|
529
801
|
# Check if task artifact is available locally or remotely,
|
|
530
802
|
# either skip execution or download it if necessary.
|
|
531
803
|
if not force_build:
|
|
532
804
|
available_locally = self.is_available_locally()
|
|
533
805
|
if available_locally and not force_upload:
|
|
806
|
+
self.skipped()
|
|
534
807
|
return
|
|
535
|
-
|
|
808
|
+
|
|
809
|
+
available_remotely = self.cache.download_enabled() and self.is_available_remotely()
|
|
536
810
|
if not available_locally and available_remotely:
|
|
537
811
|
available_locally = self.download()
|
|
538
812
|
|
|
813
|
+
if not available_locally and self.partially_available_locally():
|
|
814
|
+
force_build = True
|
|
815
|
+
|
|
539
816
|
if force_build or not available_locally:
|
|
540
817
|
with log.threadsink() as buildlog:
|
|
541
818
|
if self.task.is_runnable():
|
|
542
819
|
log.verbose("Host: {0}", getenv("HOSTNAME", "localhost"))
|
|
543
820
|
|
|
544
|
-
with self.lock_artifacts(discard=
|
|
821
|
+
with self.lock_artifacts(discard=not self.is_resource()) as artifacts:
|
|
545
822
|
exitstack = ExitStack()
|
|
546
823
|
|
|
547
824
|
# Indicates whether session artifacts have been published
|
|
548
825
|
upload_session_artifacts = False
|
|
549
826
|
|
|
550
827
|
try:
|
|
551
|
-
context = cache.get_context(self)
|
|
828
|
+
context = self.cache.get_context(self)
|
|
552
829
|
exitstack.enter_context(context)
|
|
553
830
|
|
|
554
831
|
self.running_execution()
|
|
832
|
+
|
|
833
|
+
self._validate_platform()
|
|
834
|
+
|
|
555
835
|
with self.tools.cwd(self.task.joltdir):
|
|
556
836
|
if self.is_goal() and self.options.debug:
|
|
557
837
|
log.info("Entering debug shell")
|
|
@@ -561,7 +841,8 @@ class TaskProxy(object):
|
|
|
561
841
|
# Run task
|
|
562
842
|
try:
|
|
563
843
|
hooks.task_prerun(self, context, self.tools)
|
|
564
|
-
self.
|
|
844
|
+
with self.tools.timeout(seconds=config.getint("jolt", "task_timeout")):
|
|
845
|
+
self.task.run(context, self.tools)
|
|
565
846
|
finally:
|
|
566
847
|
hooks.task_postrun(self, context, self.tools)
|
|
567
848
|
|
|
@@ -574,16 +855,34 @@ class TaskProxy(object):
|
|
|
574
855
|
|
|
575
856
|
finally:
|
|
576
857
|
# Session artifacts should be uploaded
|
|
577
|
-
upload_session_artifacts =
|
|
858
|
+
upload_session_artifacts = []
|
|
578
859
|
|
|
579
860
|
# Publish session artifacts to local cache
|
|
580
861
|
for artifact in filter(lambda a: a.is_session(), artifacts):
|
|
581
862
|
self.publish(context, artifact)
|
|
863
|
+
upload_session_artifacts.append(artifact)
|
|
864
|
+
|
|
865
|
+
except KeyboardInterrupt as e:
|
|
866
|
+
self.failed_execution(interrupt=True)
|
|
867
|
+
with utils.ignore_exception():
|
|
868
|
+
exitstack.close()
|
|
869
|
+
|
|
870
|
+
raise e
|
|
582
871
|
|
|
583
872
|
except Exception as e:
|
|
584
|
-
self.failed_execution()
|
|
873
|
+
self.failed_execution(interrupt=queue.is_aborted() if queue else False)
|
|
874
|
+
|
|
585
875
|
with utils.ignore_exception():
|
|
586
876
|
exitstack.close()
|
|
877
|
+
|
|
878
|
+
if queue is not None and queue.is_aborted():
|
|
879
|
+
raise KeyboardInterrupt()
|
|
880
|
+
|
|
881
|
+
if cli.debug_enabled:
|
|
882
|
+
import pdb
|
|
883
|
+
extype, value, tb = sys.exc_info()
|
|
884
|
+
pdb.post_mortem(tb)
|
|
885
|
+
|
|
587
886
|
raise e
|
|
588
887
|
|
|
589
888
|
else:
|
|
@@ -595,22 +894,22 @@ class TaskProxy(object):
|
|
|
595
894
|
if force_upload or force_build or not available_remotely:
|
|
596
895
|
raise_task_error_if(
|
|
597
896
|
not self.upload(force=force_upload, locked=False, persistent_only=True) \
|
|
598
|
-
and cache.upload_enabled(),
|
|
897
|
+
and self.cache.upload_enabled(),
|
|
599
898
|
self, "Failed to upload task artifact")
|
|
600
899
|
|
|
601
900
|
finally:
|
|
602
|
-
# Upload session artifacts to remote cache
|
|
901
|
+
# Upload published session artifacts to remote cache
|
|
603
902
|
raise_task_error_if(
|
|
604
903
|
upload_session_artifacts \
|
|
605
|
-
and not self.upload(force=force_upload, locked=False, session_only=True) \
|
|
606
|
-
and cache.upload_enabled(),
|
|
904
|
+
and not self.upload(force=force_upload, locked=False, session_only=True, artifacts=upload_session_artifacts) \
|
|
905
|
+
and self.cache.upload_enabled(),
|
|
607
906
|
self, "Failed to upload session artifact")
|
|
608
907
|
|
|
609
908
|
elif force_upload or not available_remotely:
|
|
610
909
|
self.started_upload()
|
|
611
910
|
raise_task_error_if(
|
|
612
911
|
not self.upload(force=force_upload, persistent_only=True) \
|
|
613
|
-
and cache.upload_enabled(),
|
|
912
|
+
and self.cache.upload_enabled(),
|
|
614
913
|
self, "Failed to upload task artifact")
|
|
615
914
|
self.finished_upload()
|
|
616
915
|
|
|
@@ -620,7 +919,7 @@ class TaskProxy(object):
|
|
|
620
919
|
|
|
621
920
|
for extension in self.extensions:
|
|
622
921
|
with hooks.task_run(extension):
|
|
623
|
-
extension.run(
|
|
922
|
+
extension.run(env, force_upload, force_build)
|
|
624
923
|
|
|
625
924
|
def publish(self, context, artifact, buildlog=None):
|
|
626
925
|
hooks.task_prepublish(self, artifact, self.tools)
|
|
@@ -634,9 +933,18 @@ class TaskProxy(object):
|
|
|
634
933
|
hooks.task_postpublish(self, artifact, self.tools)
|
|
635
934
|
artifact.get_cache().commit(artifact)
|
|
636
935
|
|
|
936
|
+
def raise_for_status(self, log_details=False, log_error=False):
|
|
937
|
+
with self.report() as report:
|
|
938
|
+
report.raise_for_status(log_details, log_error)
|
|
939
|
+
|
|
637
940
|
def report(self):
|
|
638
941
|
return self.task.report()
|
|
639
942
|
|
|
943
|
+
def unpack(self):
|
|
944
|
+
""" Unpacks all artifacts produced by this task. """
|
|
945
|
+
for artifact in self.artifacts:
|
|
946
|
+
self.cache.unpack(artifact)
|
|
947
|
+
|
|
640
948
|
|
|
641
949
|
class Graph(object):
|
|
642
950
|
def __init__(self):
|
|
@@ -746,7 +1054,7 @@ class Graph(object):
|
|
|
746
1054
|
|
|
747
1055
|
@property
|
|
748
1056
|
def pruned(self):
|
|
749
|
-
return self._pruned
|
|
1057
|
+
return [p for p in self._pruned]
|
|
750
1058
|
|
|
751
1059
|
@property
|
|
752
1060
|
def roots(self):
|
|
@@ -776,7 +1084,7 @@ class Graph(object):
|
|
|
776
1084
|
with self._mutex:
|
|
777
1085
|
log.debug("[GRAPH] Listing all nodes")
|
|
778
1086
|
for node in self.topological_nodes:
|
|
779
|
-
log.debug("[GRAPH] " + node.
|
|
1087
|
+
log.debug("[GRAPH] " + node.short_qualified_name + " ({})", len(self._children[node].keys()))
|
|
780
1088
|
|
|
781
1089
|
def is_leaf(self, node):
|
|
782
1090
|
with self._mutex:
|
|
@@ -796,27 +1104,31 @@ class Graph(object):
|
|
|
796
1104
|
|
|
797
1105
|
|
|
798
1106
|
class GraphBuilder(object):
|
|
799
|
-
def __init__(self, registry, cache,
|
|
1107
|
+
def __init__(self, registry, cache, options=None, progress=False, buildenv=None):
|
|
800
1108
|
self.cache = cache
|
|
801
1109
|
self.graph = Graph()
|
|
802
1110
|
self.nodes = {}
|
|
803
1111
|
self.registry = registry
|
|
804
|
-
self.manifest = manifest
|
|
805
1112
|
self.buildenv = buildenv
|
|
806
1113
|
self.progress = progress
|
|
807
1114
|
self.options = options or JoltOptions()
|
|
808
1115
|
|
|
809
|
-
def _get_node(self, progress, name):
|
|
1116
|
+
def _get_node(self, progress, name, parent=None):
|
|
810
1117
|
name = utils.stable_task_name(name)
|
|
811
1118
|
node = self.nodes.get(name)
|
|
812
1119
|
if not node:
|
|
813
|
-
task = self.registry.get_task(name,
|
|
1120
|
+
task = self.registry.get_task(name, buildenv=self.buildenv)
|
|
814
1121
|
node = self.nodes.get(task.qualified_name, None)
|
|
815
1122
|
if node is not None:
|
|
816
1123
|
return node
|
|
817
1124
|
node = TaskProxy(task, self.graph, self.cache, self.options)
|
|
818
|
-
|
|
819
|
-
|
|
1125
|
+
if not node.is_resource() or node.is_workspace_resource():
|
|
1126
|
+
self.nodes[node.short_qualified_name] = node
|
|
1127
|
+
self.nodes[node.qualified_name] = node
|
|
1128
|
+
elif parent:
|
|
1129
|
+
node.set_owner(parent)
|
|
1130
|
+
if self.buildenv:
|
|
1131
|
+
task._apply_protobuf(self.buildenv)
|
|
820
1132
|
if self.options.salt:
|
|
821
1133
|
node.taint(self.options.salt)
|
|
822
1134
|
self._build_node(progress, node)
|
|
@@ -827,7 +1139,7 @@ class GraphBuilder(object):
|
|
|
827
1139
|
self.graph.add_node(node)
|
|
828
1140
|
|
|
829
1141
|
if node.task.extends:
|
|
830
|
-
extended_node = self._get_node(progress, node.task.extends)
|
|
1142
|
+
extended_node = self._get_node(progress, node.task.extends, parent=node)
|
|
831
1143
|
self.graph.add_edges_from([(node, extended_node)])
|
|
832
1144
|
node.set_extended_task(extended_node)
|
|
833
1145
|
extended_node.add_extension(node)
|
|
@@ -837,8 +1149,9 @@ class GraphBuilder(object):
|
|
|
837
1149
|
parent = node
|
|
838
1150
|
|
|
839
1151
|
for requirement in node.task.requires:
|
|
840
|
-
alias,
|
|
841
|
-
child = self._get_node(progress, utils.format_task_name(task, name))
|
|
1152
|
+
alias, _, task, name = utils.parse_aliased_task_name(requirement)
|
|
1153
|
+
child = self._get_node(progress, utils.format_task_name(task, name), parent=node)
|
|
1154
|
+
|
|
842
1155
|
# Create direct edges from alias parents to alias children
|
|
843
1156
|
if child.is_alias():
|
|
844
1157
|
for child_child in child.children:
|
|
@@ -871,9 +1184,14 @@ class GraphBuilder(object):
|
|
|
871
1184
|
topological_nodes = self.graph.topological_nodes
|
|
872
1185
|
with self._progress("Collecting task influence", len(self.graph.tasks), "tasks") as p:
|
|
873
1186
|
for node in reversed(topological_nodes):
|
|
874
|
-
node.finalize(self.graph
|
|
1187
|
+
node.finalize(self.graph)
|
|
875
1188
|
p.update(1)
|
|
876
1189
|
|
|
1190
|
+
# Create artifacts in forward order so that parent identities are available
|
|
1191
|
+
# when creating resource artifacts that depend on them.
|
|
1192
|
+
for node in topological_nodes:
|
|
1193
|
+
node.finalize_artifacts()
|
|
1194
|
+
|
|
877
1195
|
max_time = 0
|
|
878
1196
|
min_time = 0
|
|
879
1197
|
for node in topological_nodes:
|