jolt 0.9.76__py3-none-any.whl → 0.9.429__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- jolt/__init__.py +88 -7
- jolt/__main__.py +9 -1
- jolt/bin/fstree-darwin-x86_64 +0 -0
- jolt/bin/fstree-linux-x86_64 +0 -0
- jolt/cache.py +839 -367
- jolt/chroot.py +156 -0
- jolt/cli.py +362 -143
- jolt/common_pb2.py +63 -0
- jolt/common_pb2_grpc.py +4 -0
- jolt/config.py +99 -42
- jolt/error.py +19 -4
- jolt/expires.py +2 -2
- jolt/filesystem.py +8 -6
- jolt/graph.py +705 -117
- jolt/hooks.py +63 -1
- jolt/influence.py +129 -6
- jolt/loader.py +369 -121
- jolt/log.py +225 -63
- jolt/manifest.py +28 -38
- jolt/options.py +35 -10
- jolt/pkgs/abseil.py +42 -0
- jolt/pkgs/asio.py +25 -0
- jolt/pkgs/autoconf.py +41 -0
- jolt/pkgs/automake.py +41 -0
- jolt/pkgs/b2.py +31 -0
- jolt/pkgs/boost.py +111 -0
- jolt/pkgs/boringssl.py +32 -0
- jolt/pkgs/busybox.py +39 -0
- jolt/pkgs/bzip2.py +43 -0
- jolt/pkgs/cares.py +29 -0
- jolt/pkgs/catch2.py +36 -0
- jolt/pkgs/cbindgen.py +17 -0
- jolt/pkgs/cista.py +19 -0
- jolt/pkgs/clang.py +44 -0
- jolt/pkgs/cli11.py +23 -0
- jolt/pkgs/cmake.py +48 -0
- jolt/pkgs/cpython.py +196 -0
- jolt/pkgs/crun.py +29 -0
- jolt/pkgs/curl.py +38 -0
- jolt/pkgs/dbus.py +18 -0
- jolt/pkgs/double_conversion.py +24 -0
- jolt/pkgs/fastfloat.py +21 -0
- jolt/pkgs/ffmpeg.py +28 -0
- jolt/pkgs/flatbuffers.py +29 -0
- jolt/pkgs/fmt.py +27 -0
- jolt/pkgs/fstree.py +20 -0
- jolt/pkgs/gflags.py +18 -0
- jolt/pkgs/glib.py +18 -0
- jolt/pkgs/glog.py +25 -0
- jolt/pkgs/glslang.py +21 -0
- jolt/pkgs/golang.py +16 -11
- jolt/pkgs/googlebenchmark.py +18 -0
- jolt/pkgs/googletest.py +46 -0
- jolt/pkgs/gperf.py +15 -0
- jolt/pkgs/grpc.py +73 -0
- jolt/pkgs/hdf5.py +19 -0
- jolt/pkgs/help2man.py +14 -0
- jolt/pkgs/inja.py +28 -0
- jolt/pkgs/jsoncpp.py +31 -0
- jolt/pkgs/libarchive.py +43 -0
- jolt/pkgs/libcap.py +44 -0
- jolt/pkgs/libdrm.py +44 -0
- jolt/pkgs/libedit.py +42 -0
- jolt/pkgs/libevent.py +31 -0
- jolt/pkgs/libexpat.py +27 -0
- jolt/pkgs/libfastjson.py +21 -0
- jolt/pkgs/libffi.py +16 -0
- jolt/pkgs/libglvnd.py +30 -0
- jolt/pkgs/libogg.py +28 -0
- jolt/pkgs/libpciaccess.py +18 -0
- jolt/pkgs/libseccomp.py +21 -0
- jolt/pkgs/libtirpc.py +24 -0
- jolt/pkgs/libtool.py +42 -0
- jolt/pkgs/libunwind.py +35 -0
- jolt/pkgs/libva.py +18 -0
- jolt/pkgs/libvorbis.py +33 -0
- jolt/pkgs/libxml2.py +35 -0
- jolt/pkgs/libxslt.py +17 -0
- jolt/pkgs/libyajl.py +16 -0
- jolt/pkgs/llvm.py +81 -0
- jolt/pkgs/lua.py +54 -0
- jolt/pkgs/lz4.py +26 -0
- jolt/pkgs/m4.py +14 -0
- jolt/pkgs/make.py +17 -0
- jolt/pkgs/mesa.py +81 -0
- jolt/pkgs/meson.py +17 -0
- jolt/pkgs/mstch.py +28 -0
- jolt/pkgs/mysql.py +60 -0
- jolt/pkgs/nasm.py +49 -0
- jolt/pkgs/ncurses.py +30 -0
- jolt/pkgs/ng_log.py +25 -0
- jolt/pkgs/ninja.py +45 -0
- jolt/pkgs/nlohmann_json.py +25 -0
- jolt/pkgs/nodejs.py +19 -11
- jolt/pkgs/opencv.py +24 -0
- jolt/pkgs/openjdk.py +26 -0
- jolt/pkgs/openssl.py +103 -0
- jolt/pkgs/paho.py +76 -0
- jolt/pkgs/patchelf.py +16 -0
- jolt/pkgs/perl.py +42 -0
- jolt/pkgs/pkgconfig.py +64 -0
- jolt/pkgs/poco.py +39 -0
- jolt/pkgs/protobuf.py +77 -0
- jolt/pkgs/pugixml.py +27 -0
- jolt/pkgs/python.py +19 -0
- jolt/pkgs/qt.py +35 -0
- jolt/pkgs/rapidjson.py +26 -0
- jolt/pkgs/rapidyaml.py +28 -0
- jolt/pkgs/re2.py +30 -0
- jolt/pkgs/re2c.py +17 -0
- jolt/pkgs/readline.py +15 -0
- jolt/pkgs/rust.py +41 -0
- jolt/pkgs/sdl.py +28 -0
- jolt/pkgs/simdjson.py +27 -0
- jolt/pkgs/soci.py +46 -0
- jolt/pkgs/spdlog.py +29 -0
- jolt/pkgs/spirv_llvm.py +21 -0
- jolt/pkgs/spirv_tools.py +24 -0
- jolt/pkgs/sqlite.py +83 -0
- jolt/pkgs/ssl.py +12 -0
- jolt/pkgs/texinfo.py +15 -0
- jolt/pkgs/tomlplusplus.py +22 -0
- jolt/pkgs/wayland.py +26 -0
- jolt/pkgs/x11.py +58 -0
- jolt/pkgs/xerces_c.py +20 -0
- jolt/pkgs/xorg.py +360 -0
- jolt/pkgs/xz.py +29 -0
- jolt/pkgs/yamlcpp.py +30 -0
- jolt/pkgs/zeromq.py +47 -0
- jolt/pkgs/zlib.py +69 -0
- jolt/pkgs/zstd.py +33 -0
- jolt/plugins/alias.py +3 -0
- jolt/plugins/allure.py +5 -2
- jolt/plugins/autotools.py +66 -0
- jolt/plugins/cache.py +133 -0
- jolt/plugins/cmake.py +74 -6
- jolt/plugins/conan.py +238 -0
- jolt/plugins/cxx.py +698 -0
- jolt/plugins/cxxinfo.py +7 -0
- jolt/plugins/dashboard.py +1 -1
- jolt/plugins/docker.py +91 -23
- jolt/plugins/email.py +5 -2
- jolt/plugins/email.xslt +144 -101
- jolt/plugins/environ.py +11 -0
- jolt/plugins/fetch.py +141 -0
- jolt/plugins/gdb.py +44 -21
- jolt/plugins/gerrit.py +1 -14
- jolt/plugins/git.py +316 -101
- jolt/plugins/googletest.py +522 -1
- jolt/plugins/http.py +36 -38
- jolt/plugins/libtool.py +63 -0
- jolt/plugins/linux.py +990 -0
- jolt/plugins/logstash.py +4 -4
- jolt/plugins/meson.py +61 -0
- jolt/plugins/ninja-compdb.py +107 -31
- jolt/plugins/ninja.py +929 -134
- jolt/plugins/paths.py +11 -1
- jolt/plugins/pkgconfig.py +219 -0
- jolt/plugins/podman.py +148 -91
- jolt/plugins/python.py +137 -0
- jolt/plugins/remote_execution/__init__.py +0 -0
- jolt/plugins/remote_execution/administration_pb2.py +46 -0
- jolt/plugins/remote_execution/administration_pb2_grpc.py +170 -0
- jolt/plugins/remote_execution/log_pb2.py +32 -0
- jolt/plugins/remote_execution/log_pb2_grpc.py +68 -0
- jolt/plugins/remote_execution/scheduler_pb2.py +41 -0
- jolt/plugins/remote_execution/scheduler_pb2_grpc.py +141 -0
- jolt/plugins/remote_execution/worker_pb2.py +38 -0
- jolt/plugins/remote_execution/worker_pb2_grpc.py +112 -0
- jolt/plugins/report.py +12 -2
- jolt/plugins/rust.py +25 -0
- jolt/plugins/scheduler.py +710 -0
- jolt/plugins/selfdeploy/setup.py +9 -4
- jolt/plugins/selfdeploy.py +138 -88
- jolt/plugins/strings.py +35 -22
- jolt/plugins/symlinks.py +26 -11
- jolt/plugins/telemetry.py +5 -2
- jolt/plugins/timeline.py +13 -3
- jolt/plugins/volume.py +46 -48
- jolt/scheduler.py +591 -191
- jolt/tasks.py +1783 -245
- jolt/templates/export.sh.template +12 -6
- jolt/templates/timeline.html.template +44 -47
- jolt/timer.py +22 -0
- jolt/tools.py +749 -302
- jolt/utils.py +245 -18
- jolt/version.py +1 -1
- jolt/version_utils.py +2 -2
- jolt/xmldom.py +12 -2
- {jolt-0.9.76.dist-info → jolt-0.9.429.dist-info}/METADATA +98 -38
- jolt-0.9.429.dist-info/RECORD +207 -0
- {jolt-0.9.76.dist-info → jolt-0.9.429.dist-info}/WHEEL +1 -1
- jolt/plugins/amqp.py +0 -834
- jolt/plugins/debian.py +0 -338
- jolt/plugins/ftp.py +0 -181
- jolt/plugins/ninja-cache.py +0 -64
- jolt/plugins/ninjacli.py +0 -271
- jolt/plugins/repo.py +0 -253
- jolt-0.9.76.dist-info/RECORD +0 -79
- {jolt-0.9.76.dist-info → jolt-0.9.429.dist-info}/entry_points.txt +0 -0
- {jolt-0.9.76.dist-info → jolt-0.9.429.dist-info}/top_level.txt +0 -0
jolt/plugins/podman.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from jolt import
|
|
1
|
+
from jolt import Parameter, Resource, Task
|
|
2
2
|
from jolt.error import raise_task_error_if
|
|
3
3
|
from jolt.tasks import TaskRegistry
|
|
4
4
|
from jolt import attributes
|
|
@@ -12,10 +12,9 @@ from jolt.cache import ArtifactListAttribute
|
|
|
12
12
|
from jolt.cache import ArtifactAttributeSet
|
|
13
13
|
from jolt.cache import ArtifactAttributeSetProvider
|
|
14
14
|
|
|
15
|
+
import contextlib
|
|
15
16
|
import json
|
|
16
17
|
from os import path
|
|
17
|
-
|
|
18
|
-
from platform import system
|
|
19
18
|
import tarfile
|
|
20
19
|
|
|
21
20
|
|
|
@@ -25,24 +24,20 @@ class PodmanListVariable(ArtifactListAttribute):
|
|
|
25
24
|
|
|
26
25
|
class PodmanImportListVariable(PodmanListVariable):
|
|
27
26
|
def apply(self, task, artifact):
|
|
28
|
-
if isinstance(task, Resource):
|
|
29
|
-
return
|
|
30
27
|
for tar in self.items():
|
|
31
28
|
try:
|
|
32
29
|
tag = artifact.podman.tags[0]
|
|
33
30
|
except IndexError:
|
|
34
|
-
tag = "{}:{}"
|
|
31
|
+
tag = artifact.tools.expand("{canonical_name}:{identity}")
|
|
35
32
|
task.tools.run(
|
|
36
33
|
"podman import {} {}",
|
|
37
|
-
fs.path.join(artifact.path, tar, tag
|
|
34
|
+
fs.path.join(artifact.path, tar), tag, output_on_error=True)
|
|
38
35
|
for extra_tag in artifact.podman.tags[1:]:
|
|
39
36
|
task.tools.run("podman tag {} {}", tag, extra_tag, output_on_error=True)
|
|
40
37
|
|
|
41
38
|
|
|
42
39
|
class PodmanLoadListVariable(PodmanListVariable):
|
|
43
40
|
def apply(self, task, artifact):
|
|
44
|
-
if isinstance(task, Resource):
|
|
45
|
-
return
|
|
46
41
|
for image in self.items():
|
|
47
42
|
task.tools.run(
|
|
48
43
|
"podman load -i {}",
|
|
@@ -51,16 +46,12 @@ class PodmanLoadListVariable(PodmanListVariable):
|
|
|
51
46
|
|
|
52
47
|
class PodmanPullListVariable(PodmanListVariable):
|
|
53
48
|
def apply(self, task, artifact):
|
|
54
|
-
if isinstance(task, Resource):
|
|
55
|
-
return
|
|
56
49
|
for image in self.items():
|
|
57
50
|
task.tools.run("podman pull {}", image, output_on_error=True)
|
|
58
51
|
|
|
59
52
|
|
|
60
53
|
class PodmanRmiListVariable(PodmanListVariable):
|
|
61
54
|
def unapply(self, task, artifact):
|
|
62
|
-
if isinstance(task, Resource):
|
|
63
|
-
return
|
|
64
55
|
for image in self.items():
|
|
65
56
|
task.tools.run("podman rmi -f {}", image, output_on_error=True)
|
|
66
57
|
|
|
@@ -108,35 +99,6 @@ class PodmanAttributeProvider(ArtifactAttributeSetProvider):
|
|
|
108
99
|
artifact.podman.unapply(task, artifact)
|
|
109
100
|
|
|
110
101
|
|
|
111
|
-
class PodmanClient(Download):
|
|
112
|
-
""" Task: Downloads and publishes the Podman command line client.
|
|
113
|
-
|
|
114
|
-
The task will be automatically made available after importing
|
|
115
|
-
``jolt.plugins.podman``.
|
|
116
|
-
"""
|
|
117
|
-
|
|
118
|
-
name = "podman/cli"
|
|
119
|
-
""" Name of the task """
|
|
120
|
-
|
|
121
|
-
arch = Parameter("x86_64", help="Host architecture")
|
|
122
|
-
""" Host architecture [x86_64] """
|
|
123
|
-
|
|
124
|
-
collect = ["podman/podman"]
|
|
125
|
-
|
|
126
|
-
host = Parameter(system().lower(), help="Host operating system")
|
|
127
|
-
""" Host operating system [autodetected] """
|
|
128
|
-
|
|
129
|
-
url = "https://download.podman.com/{host}/static/stable/{arch}/podman-{version}.tgz"
|
|
130
|
-
""" URL of binaries """
|
|
131
|
-
|
|
132
|
-
version = Parameter("20.10.13", help="Podman version")
|
|
133
|
-
""" Podman version [20.10.13] """
|
|
134
|
-
|
|
135
|
-
def publish(self, artifact, tools):
|
|
136
|
-
super().publish(artifact, tools)
|
|
137
|
-
artifact.environ.PATH.append("podman")
|
|
138
|
-
|
|
139
|
-
|
|
140
102
|
@attributes.requires("_image")
|
|
141
103
|
class Container(Resource):
|
|
142
104
|
"""
|
|
@@ -152,6 +114,9 @@ class Container(Resource):
|
|
|
152
114
|
cap_drops = []
|
|
153
115
|
""" A list of capabilities to remove from the container """
|
|
154
116
|
|
|
117
|
+
chroot = False
|
|
118
|
+
""" Use as chroot - resource consumers will execute all commands in container """
|
|
119
|
+
|
|
155
120
|
entrypoint = None
|
|
156
121
|
""" Container entrypoint """
|
|
157
122
|
|
|
@@ -190,6 +155,18 @@ class Container(Resource):
|
|
|
190
155
|
Alternatively, assign ``True`` to publish all exposed ports to random ports.
|
|
191
156
|
"""
|
|
192
157
|
|
|
158
|
+
release_on_error = True
|
|
159
|
+
""" Stop and remove container on error to avoid resource leaks. """
|
|
160
|
+
|
|
161
|
+
stop_timeout = 10
|
|
162
|
+
""" Timeout in seconds for stopping the container .
|
|
163
|
+
|
|
164
|
+
When stopping the container, the task will wait for the container to stop
|
|
165
|
+
for the specified number of seconds before forcefully killing it.
|
|
166
|
+
|
|
167
|
+
Default: 10 seconds.
|
|
168
|
+
"""
|
|
169
|
+
|
|
193
170
|
volumes = []
|
|
194
171
|
"""
|
|
195
172
|
A list of volumes to mount.
|
|
@@ -199,8 +176,8 @@ class Container(Resource):
|
|
|
199
176
|
"""
|
|
200
177
|
|
|
201
178
|
volumes_default = [
|
|
202
|
-
"{joltdir}
|
|
203
|
-
"{joltcachedir}
|
|
179
|
+
"{joltdir}",
|
|
180
|
+
"{joltcachedir}",
|
|
204
181
|
]
|
|
205
182
|
"""
|
|
206
183
|
A list of default volumes to mount.
|
|
@@ -240,7 +217,7 @@ class Container(Resource):
|
|
|
240
217
|
def _image(self):
|
|
241
218
|
registry = TaskRegistry.get()
|
|
242
219
|
tool = tools.Tools(self)
|
|
243
|
-
if registry.
|
|
220
|
+
if registry.has_task(tool.expand(self.image)):
|
|
244
221
|
return [self.image]
|
|
245
222
|
return []
|
|
246
223
|
|
|
@@ -277,10 +254,12 @@ class Container(Resource):
|
|
|
277
254
|
|
|
278
255
|
@property
|
|
279
256
|
def _volumes(self):
|
|
280
|
-
return " ".join([utils.option("-v ", self.tools.
|
|
257
|
+
return " ".join([utils.option("-v ", self.tools.expand_path(vol))
|
|
281
258
|
for vol in self.volumes_default + self.volumes])
|
|
282
259
|
|
|
283
260
|
def acquire(self, artifact, deps, tools, owner):
|
|
261
|
+
self._context_stack = None
|
|
262
|
+
self.container = None
|
|
284
263
|
self.joltcachedir = config.get_cachedir()
|
|
285
264
|
try:
|
|
286
265
|
image = deps[self.image]
|
|
@@ -289,21 +268,34 @@ class Container(Resource):
|
|
|
289
268
|
image = tools.expand(self.image)
|
|
290
269
|
|
|
291
270
|
self._info(f"Creating container from image '{image}'")
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
271
|
+
with utils.delayed_interrupt():
|
|
272
|
+
self.container = tools.run(
|
|
273
|
+
"podman run -i -d {_cap_adds} {_cap_drops} {_entrypoint} {_labels} {_ports} {_privileged} {_user} {_environment} {_volumes} {image} {_arguments}",
|
|
274
|
+
image=image, output_on_error=True)
|
|
295
275
|
|
|
296
276
|
self._info("Created container '{container}'")
|
|
297
277
|
info = tools.run("podman inspect {container}", output_on_error=True)
|
|
298
278
|
artifact.container = self.container
|
|
299
279
|
artifact.info = json.loads(info)[0]
|
|
300
280
|
|
|
281
|
+
if self.chroot:
|
|
282
|
+
self._context_stack = contextlib.ExitStack()
|
|
283
|
+
self._context_stack.enter_context(
|
|
284
|
+
owner.tools.runprefix(f"podman exec -i {artifact.container}"))
|
|
285
|
+
|
|
301
286
|
def release(self, artifact, deps, tools, owner):
|
|
302
|
-
self.
|
|
303
|
-
|
|
287
|
+
if self.chroot and self._context_stack:
|
|
288
|
+
self._context_stack.close()
|
|
289
|
+
|
|
290
|
+
if not self.container:
|
|
291
|
+
return
|
|
304
292
|
|
|
305
|
-
|
|
306
|
-
|
|
293
|
+
try:
|
|
294
|
+
self._info("Stopping container '{container}'")
|
|
295
|
+
tools.run("podman stop -t {stop_timeout} {container}", output_on_error=True)
|
|
296
|
+
finally:
|
|
297
|
+
self._info("Deleting container '{container}'")
|
|
298
|
+
tools.run("podman rm -f {container}", output_on_error=True)
|
|
307
299
|
|
|
308
300
|
|
|
309
301
|
class PodmanLogin(Resource):
|
|
@@ -322,8 +314,6 @@ class PodmanLogin(Resource):
|
|
|
322
314
|
name = "podman/login"
|
|
323
315
|
""" Name of the resource """
|
|
324
316
|
|
|
325
|
-
requires = ["podman/cli"]
|
|
326
|
-
|
|
327
317
|
user = Parameter("", help="Podman Registry username")
|
|
328
318
|
"""
|
|
329
319
|
Podman Registry username.
|
|
@@ -363,7 +353,6 @@ class PodmanLogin(Resource):
|
|
|
363
353
|
tools.run("podman logout {server}")
|
|
364
354
|
|
|
365
355
|
|
|
366
|
-
TaskRegistry.get().add_task_class(PodmanClient)
|
|
367
356
|
TaskRegistry.get().add_task_class(PodmanLogin)
|
|
368
357
|
|
|
369
358
|
|
|
@@ -423,7 +412,6 @@ class ContainerImage(Task):
|
|
|
423
412
|
|
|
424
413
|
Optionally add requirements to:
|
|
425
414
|
|
|
426
|
-
- ``podman/cli`` to provision the Podman client, if none is available on the host.
|
|
427
415
|
- ``podman/login`` to automatically login to the Podman registry.
|
|
428
416
|
|
|
429
417
|
This class must be subclassed.
|
|
@@ -446,12 +434,14 @@ class ContainerImage(Task):
|
|
|
446
434
|
class Busybox(ContainerImage):
|
|
447
435
|
\"\"\" Publishes Busybox image as gzip-compressed tarball \"\"\"
|
|
448
436
|
compression = "gz"
|
|
449
|
-
requires = ["podman/cli"]
|
|
450
437
|
tags = ["busybox:{identity}"]
|
|
451
438
|
|
|
452
439
|
"""
|
|
453
440
|
abstract = True
|
|
454
441
|
|
|
442
|
+
annotations = []
|
|
443
|
+
""" A list of image annotations """
|
|
444
|
+
|
|
455
445
|
autoload = True
|
|
456
446
|
"""
|
|
457
447
|
Automatically load image file into local registry when the artifact is
|
|
@@ -481,10 +471,26 @@ class ContainerImage(Task):
|
|
|
481
471
|
dockerfile = "Dockerfile"
|
|
482
472
|
""" Path to the Dockerfile to build, or the full source code of such a file. """
|
|
483
473
|
|
|
484
|
-
output = "oci-archive"
|
|
485
|
-
"""
|
|
474
|
+
output = ["oci-archive"]
|
|
475
|
+
"""
|
|
476
|
+
List of output formats.
|
|
477
|
+
|
|
478
|
+
If set to None, no output is produced and published.
|
|
486
479
|
|
|
487
|
-
|
|
480
|
+
Supported formats:
|
|
481
|
+
- archive
|
|
482
|
+
- cpio
|
|
483
|
+
- custom
|
|
484
|
+
- directory
|
|
485
|
+
- docker-archive
|
|
486
|
+
- ext4
|
|
487
|
+
- oci-archive
|
|
488
|
+
- oci-directory
|
|
489
|
+
- squashfs
|
|
490
|
+
|
|
491
|
+
"""
|
|
492
|
+
|
|
493
|
+
imagefile = "{canonical_name}"
|
|
488
494
|
"""
|
|
489
495
|
Name of the image tarball published by the task.
|
|
490
496
|
|
|
@@ -496,9 +502,6 @@ class ContainerImage(Task):
|
|
|
496
502
|
labels = []
|
|
497
503
|
""" A list of image metadata labels """
|
|
498
504
|
|
|
499
|
-
platform = None
|
|
500
|
-
""" Target platform, e.g. linux/arm/v7. """
|
|
501
|
-
|
|
502
505
|
pull = True
|
|
503
506
|
"""
|
|
504
507
|
Always pull images when building.
|
|
@@ -514,15 +517,25 @@ class ContainerImage(Task):
|
|
|
514
517
|
The ``podman/login`` Jolt resource can be used for that purpose.
|
|
515
518
|
"""
|
|
516
519
|
|
|
520
|
+
size = None
|
|
521
|
+
""" Size of the image, e.g. "64M" (for certain output formats). """
|
|
522
|
+
|
|
517
523
|
squash = False
|
|
518
524
|
""" Squash image layers """
|
|
519
525
|
|
|
520
526
|
tags = ["{canonical_name}:{identity}"]
|
|
521
527
|
""" Optional list of image tags. Defaults to task's canonical name. """
|
|
522
528
|
|
|
529
|
+
target = None
|
|
530
|
+
""" Target platform, e.g. linux/arm/v7. """
|
|
531
|
+
|
|
523
532
|
def __init__(self, *args, **kwargs):
|
|
524
533
|
super().__init__(*args, **kwargs)
|
|
525
534
|
|
|
535
|
+
@property
|
|
536
|
+
def _annotations(self):
|
|
537
|
+
return " ".join([utils.option("--annotation ", self.tools.expand(an)) for an in self.annotations])
|
|
538
|
+
|
|
526
539
|
@property
|
|
527
540
|
def _buildargs(self):
|
|
528
541
|
return " ".join([utils.option("--build-arg ", self.tools.expand(ba)) for ba in self.buildargs])
|
|
@@ -531,9 +544,13 @@ class ContainerImage(Task):
|
|
|
531
544
|
def _labels(self):
|
|
532
545
|
return " ".join([utils.option("-l ", self.tools.expand(label)) for label in self.labels])
|
|
533
546
|
|
|
547
|
+
@property
|
|
548
|
+
def _output(self):
|
|
549
|
+
return utils.as_list(self.output) if self.output else []
|
|
550
|
+
|
|
534
551
|
@property
|
|
535
552
|
def _platform(self):
|
|
536
|
-
platform = self.tools.expand(self.
|
|
553
|
+
platform = self.tools.expand(self.target) if self.target else None
|
|
537
554
|
return utils.option("--platform ", platform)
|
|
538
555
|
|
|
539
556
|
@property
|
|
@@ -564,7 +581,7 @@ class ContainerImage(Task):
|
|
|
564
581
|
tools.expand_relpath(context))
|
|
565
582
|
|
|
566
583
|
with tools.cwd(context):
|
|
567
|
-
tools.run("podman build {_platform} . -f {} {_buildargs} {_labels} {_tags} {pull}{squash}",
|
|
584
|
+
tools.run("podman build {_platform} . -f {} {_annotations} {_buildargs} {_labels} {_tags} {pull}{squash}",
|
|
568
585
|
utils.quote(dockerfile), pull=pull, squash=squash)
|
|
569
586
|
|
|
570
587
|
try:
|
|
@@ -573,48 +590,88 @@ class ContainerImage(Task):
|
|
|
573
590
|
for tag in self.tags:
|
|
574
591
|
tools.run("podman push {}", tag)
|
|
575
592
|
|
|
576
|
-
|
|
577
|
-
self.info("Saving image")
|
|
578
|
-
|
|
579
|
-
|
|
593
|
+
for output in self._output:
|
|
594
|
+
self.info("Saving image as {}", output)
|
|
595
|
+
outdir = tools.builddir(output)
|
|
596
|
+
with tools.cwd(outdir):
|
|
597
|
+
if output in ["oci-archive", "docker-archive"]:
|
|
580
598
|
tools.run("podman image save --format={output} {} -o {}", self.tags[0], "image.tar")
|
|
581
|
-
if
|
|
599
|
+
if output == "oci-directory":
|
|
582
600
|
tools.run("podman image save --format=oci-dir {} -o {}", self.tags[0], "image.dir")
|
|
583
|
-
if
|
|
601
|
+
if output in ["archive", "cpio", "custom", "directory", "ext4", "squashfs"]:
|
|
584
602
|
ctr = tools.run("podman create {}", self.tags[0])
|
|
585
603
|
try:
|
|
586
|
-
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
604
|
+
with tools.runprefix("podman unshare "):
|
|
605
|
+
mount_path = tools.run("podman mount {}", ctr, output_on_error=True)
|
|
606
|
+
if output == "custom":
|
|
607
|
+
self.run_custom(deps, tools, mount_path)
|
|
608
|
+
elif output == "archive":
|
|
609
|
+
tools.run("tar -C {} -cf image.tar .", mount_path, output_on_error=True)
|
|
610
|
+
elif output == "cpio":
|
|
611
|
+
with tools.cwd(mount_path):
|
|
612
|
+
tools.run("find | podman unshare cpio -o -F {}/image.cpio -H newc", outdir, output_on_error=True)
|
|
613
|
+
elif output == "ext4":
|
|
614
|
+
assert self.size, "Size must be set for ext4 output"
|
|
615
|
+
tools.run("mke2fs -t ext4 -F -L rootfs -d {} image.ext4 {size}", mount_path, output_on_error=True)
|
|
616
|
+
elif output == "squashfs":
|
|
617
|
+
tools.run("mksquashfs {} image.squashfs", mount_path, output_on_error=True)
|
|
618
|
+
else:
|
|
619
|
+
tools.mkdir("image.dir")
|
|
620
|
+
tools.run("tar c -C {} . | tar --no-same-permissions --no-same-owner --no-overwrite-dir -x -C ./image.dir/", mount_path, output_on_error=True)
|
|
592
621
|
finally:
|
|
593
622
|
utils.call_and_catch(tools.run, "podman rm {}", ctr)
|
|
594
623
|
finally:
|
|
595
624
|
if self.cleanup:
|
|
596
625
|
self.info("Removing image from Podman")
|
|
597
626
|
for tag in self.tags:
|
|
598
|
-
utils.call_and_catch(tools.run("podman
|
|
627
|
+
utils.call_and_catch(tools.run("podman rmi -f {}", tag))
|
|
628
|
+
|
|
629
|
+
def run_custom(self, deps, tools, mount_path):
|
|
630
|
+
"""
|
|
631
|
+
Save image as custom output format.
|
|
632
|
+
|
|
633
|
+
The method is called when the output format is set to "custom".
|
|
634
|
+
The mount_path is the path to the mounted container root filesystem.
|
|
635
|
+
|
|
636
|
+
The default implementation does nothing.
|
|
637
|
+
"""
|
|
638
|
+
pass
|
|
599
639
|
|
|
600
640
|
def publish(self, artifact, tools):
|
|
641
|
+
""" Publish the image as different output formats """
|
|
642
|
+
|
|
601
643
|
artifact.strings.tag = tools.expand(self.tags[0])
|
|
602
644
|
|
|
603
645
|
for tag in self.tags:
|
|
604
646
|
artifact.podman.tags.append(tag)
|
|
605
647
|
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
|
|
648
|
+
for output in self._output:
|
|
649
|
+
outdir = tools.builddir(output)
|
|
650
|
+
with tools.cwd(outdir):
|
|
651
|
+
if output in ["oci-archive", "docker-archive"] and self._imagefile:
|
|
652
|
+
artifact.collect("image.tar", output + "/{_imagefile}.tar")
|
|
610
653
|
if self._autoload:
|
|
611
|
-
artifact.podman.load.append("{_imagefile}")
|
|
612
|
-
artifact.podman.rmi.append(artifact.strings.tag
|
|
613
|
-
if
|
|
614
|
-
artifact.collect("image.tar", "{_imagefile}")
|
|
654
|
+
artifact.podman.load.append(output + "/{_imagefile}.tar")
|
|
655
|
+
artifact.podman.rmi.append(artifact.strings.tag)
|
|
656
|
+
if output in ["archive"] and self._imagefile:
|
|
657
|
+
artifact.collect("image.tar", output + "/{_imagefile}.tar")
|
|
615
658
|
if self._autoload:
|
|
616
|
-
artifact.podman.imprt.append("{_imagefile}")
|
|
617
|
-
artifact.podman.rmi.append(artifact.strings.tag
|
|
618
|
-
if
|
|
659
|
+
artifact.podman.imprt.append(output + "/{_imagefile}.tar")
|
|
660
|
+
artifact.podman.rmi.append(artifact.strings.tag)
|
|
661
|
+
if output in ["directory", "oci-directory"]:
|
|
619
662
|
with tools.cwd("image.dir"):
|
|
620
|
-
artifact.collect("*", symlinks=True)
|
|
663
|
+
artifact.collect("*", f"{output}/", symlinks=True)
|
|
664
|
+
if output in ["cpio"]:
|
|
665
|
+
artifact.collect("image.cpio", output + "/{_imagefile}.cpio")
|
|
666
|
+
if output in ["custom"]:
|
|
667
|
+
self.publish_custom(artifact, tools)
|
|
668
|
+
if output in ["directory"]:
|
|
669
|
+
artifact.paths.rootfs = output
|
|
670
|
+
if output in ["ext4"]:
|
|
671
|
+
artifact.collect("image.ext4", output + "/{_imagefile}.ext4")
|
|
672
|
+
if output in ["squashfs"]:
|
|
673
|
+
artifact.collect("image.squashfs", output + "/{_imagefile}.squashfs")
|
|
674
|
+
|
|
675
|
+
def publish_custom(self, artifact, tools):
|
|
676
|
+
""" Publish custom output as produced by run_custom """
|
|
677
|
+
pass
|
jolt/plugins/python.py
CHANGED
|
@@ -1,9 +1,12 @@
|
|
|
1
1
|
import sys
|
|
2
2
|
|
|
3
|
+
from jolt import Task
|
|
4
|
+
from jolt import attributes
|
|
3
5
|
from jolt import filesystem as fs
|
|
4
6
|
from jolt.cache import ArtifactStringAttribute
|
|
5
7
|
from jolt.cache import ArtifactAttributeSet
|
|
6
8
|
from jolt.cache import ArtifactAttributeSetProvider
|
|
9
|
+
from jolt.error import raise_task_error_if
|
|
7
10
|
|
|
8
11
|
|
|
9
12
|
class PythonVariable(ArtifactStringAttribute):
|
|
@@ -98,3 +101,137 @@ class PythonProvider(ArtifactAttributeSetProvider):
|
|
|
98
101
|
|
|
99
102
|
def unapply(self, task, artifact):
|
|
100
103
|
artifact.python.unapply(task, artifact)
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
@attributes.system
|
|
107
|
+
@attributes.requires("requires_python")
|
|
108
|
+
class PythonEnv(Task):
|
|
109
|
+
"""
|
|
110
|
+
Base class for Python virtual environment tasks.
|
|
111
|
+
|
|
112
|
+
Builds a Python virtual environment and installs specified packages.
|
|
113
|
+
|
|
114
|
+
The venv module from the Python standard library must be available in the
|
|
115
|
+
Python installation used to run the task.
|
|
116
|
+
"""
|
|
117
|
+
|
|
118
|
+
abstract = True
|
|
119
|
+
""" This is an abstract base class that should be inherited by concrete tasks. """
|
|
120
|
+
|
|
121
|
+
executable = "python3"
|
|
122
|
+
""" Python executable to use for creating the virtual environment. """
|
|
123
|
+
|
|
124
|
+
requirements = []
|
|
125
|
+
"""
|
|
126
|
+
List of Python packages to install in the virtual environment.
|
|
127
|
+
|
|
128
|
+
Each entry should be a string suitable for pip, e.g., "package==version".
|
|
129
|
+
"""
|
|
130
|
+
|
|
131
|
+
def _verify_influence(self, deps, artifact, tools, sources=None):
|
|
132
|
+
# No influence to verify
|
|
133
|
+
return
|
|
134
|
+
|
|
135
|
+
def relocate_scripts(self, artifact, tools, frompath, topath):
|
|
136
|
+
bindir = "Scripts" if self.system == "windows" else "bin"
|
|
137
|
+
|
|
138
|
+
with tools.cwd(artifact.path, bindir):
|
|
139
|
+
for script in tools.glob("*"):
|
|
140
|
+
if script.startswith("python"):
|
|
141
|
+
continue
|
|
142
|
+
tools.replace_in_file(script, frompath, topath)
|
|
143
|
+
|
|
144
|
+
with tools.cwd(artifact.path):
|
|
145
|
+
if not tools.exists("local/bin"):
|
|
146
|
+
return
|
|
147
|
+
with tools.cwd("local", "bin"):
|
|
148
|
+
for script in tools.glob("*"):
|
|
149
|
+
tools.replace_in_file(script, frompath, topath)
|
|
150
|
+
|
|
151
|
+
def publish(self, artifact, tools):
|
|
152
|
+
# Create a parallel installation by copying a Python installation
|
|
153
|
+
|
|
154
|
+
# First locate the Python executable to copy
|
|
155
|
+
py_exe = tools.which(self.executable)
|
|
156
|
+
raise_task_error_if(
|
|
157
|
+
py_exe is None, self,
|
|
158
|
+
f"Python executable '{self.executable}' not found in PATH.",
|
|
159
|
+
)
|
|
160
|
+
|
|
161
|
+
# Follow symlinks to get the real executable
|
|
162
|
+
py_exe = fs.path.realpath(py_exe)
|
|
163
|
+
|
|
164
|
+
# Determine the Python home directory
|
|
165
|
+
py_home = fs.path.dirname(fs.path.dirname(py_exe))
|
|
166
|
+
|
|
167
|
+
# Determine the Python version
|
|
168
|
+
self.version_major = tools.run(
|
|
169
|
+
[py_exe, "-c", "import sys; print(\"{{}}.{{}}\".format(sys.version_info[0], sys.version_info[1]))"],
|
|
170
|
+
shell=False,
|
|
171
|
+
output_on_error=True).strip()
|
|
172
|
+
|
|
173
|
+
self.info("Python executable: {0}", py_exe)
|
|
174
|
+
self.info("Python home: {0}", py_home)
|
|
175
|
+
self.info("Python version: {0}", self.version_major)
|
|
176
|
+
|
|
177
|
+
# Copy the Python installation to the artifact path
|
|
178
|
+
with tools.cwd(py_home):
|
|
179
|
+
artifact.collect(py_exe, "bin/python3")
|
|
180
|
+
artifact.collect("lib/python3")
|
|
181
|
+
artifact.collect("lib/python{version_major}")
|
|
182
|
+
artifact.collect("lib/libpython{version_major}.*")
|
|
183
|
+
|
|
184
|
+
# Create common symlinks
|
|
185
|
+
if self.system != "windows":
|
|
186
|
+
with tools.cwd(artifact.path, "bin"):
|
|
187
|
+
tools.symlink("python3", "python")
|
|
188
|
+
tools.symlink("python3", "python{version_major}")
|
|
189
|
+
|
|
190
|
+
# Install required packages into the artifact using pip
|
|
191
|
+
with tools.environ(PYTHONHOME=artifact.path):
|
|
192
|
+
py_exe = fs.path.join(artifact.path, "bin", "python3")
|
|
193
|
+
with tools.tmpdir() as tmp, tools.cwd(tmp):
|
|
194
|
+
tools.write_file(
|
|
195
|
+
"requirements.txt",
|
|
196
|
+
"\n".join(self.requirements) + "\n"
|
|
197
|
+
)
|
|
198
|
+
|
|
199
|
+
pip_cmd = [
|
|
200
|
+
py_exe,
|
|
201
|
+
"-m",
|
|
202
|
+
"pip",
|
|
203
|
+
"--isolated",
|
|
204
|
+
"--no-cache-dir",
|
|
205
|
+
"install",
|
|
206
|
+
"-r",
|
|
207
|
+
"requirements.txt",
|
|
208
|
+
"--break-system-packages",
|
|
209
|
+
]
|
|
210
|
+
tools.run(pip_cmd, shell=False)
|
|
211
|
+
|
|
212
|
+
artifact.environ.PATH.append("bin")
|
|
213
|
+
artifact.environ.PATH.append("local/bin")
|
|
214
|
+
artifact.strings.install_prefix = artifact.path
|
|
215
|
+
|
|
216
|
+
def unpack(self, artifact, tools):
|
|
217
|
+
# Relocate the virtual environment by adjusting script paths
|
|
218
|
+
frompath = artifact.strings.install_prefix
|
|
219
|
+
topath = artifact.final_path
|
|
220
|
+
self.relocate_scripts(artifact, tools, frompath, topath)
|
|
221
|
+
|
|
222
|
+
artifact.strings.install_prefix = artifact.final_path
|
|
223
|
+
|
|
224
|
+
|
|
225
|
+
def requires(python=True):
|
|
226
|
+
""" Decorator to add Python requirements to a task. """
|
|
227
|
+
|
|
228
|
+
import jolt.pkgs.cpython
|
|
229
|
+
|
|
230
|
+
def decorate(cls):
|
|
231
|
+
if python:
|
|
232
|
+
cls = attributes.requires("requires_python")(cls)
|
|
233
|
+
cls.requires_python = ["cpython"]
|
|
234
|
+
|
|
235
|
+
return cls
|
|
236
|
+
|
|
237
|
+
return decorate
|
|
File without changes
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
|
3
|
+
# source: jolt/plugins/remote_execution/administration.proto
|
|
4
|
+
"""Generated protocol buffer code."""
|
|
5
|
+
from google.protobuf import descriptor as _descriptor
|
|
6
|
+
from google.protobuf import descriptor_pool as _descriptor_pool
|
|
7
|
+
from google.protobuf import symbol_database as _symbol_database
|
|
8
|
+
from google.protobuf.internal import builder as _builder
|
|
9
|
+
# @@protoc_insertion_point(imports)
|
|
10
|
+
|
|
11
|
+
_sym_db = _symbol_database.Default()
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
|
|
15
|
+
from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
|
|
16
|
+
from jolt import common_pb2 as jolt_dot_common__pb2
|
|
17
|
+
from jolt.plugins.remote_execution import scheduler_pb2 as jolt_dot_plugins_dot_remote__execution_dot_scheduler__pb2
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n2jolt/plugins/remote_execution/administration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x11jolt/common.proto\x1a-jolt/plugins/remote_execution/scheduler.proto\"\"\n\x11ListBuildsRequest\x12\r\n\x05tasks\x18\x01 \x01(\x08\"\xf4\x02\n\x12ListBuildsResponse\x12)\n\x06\x62uilds\x18\x01 \x03(\x0b\x32\x19.ListBuildsResponse.Build\x1aR\n\x04Task\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x1b\n\x06status\x18\x03 \x01(\x0e\x32\x0b.TaskStatus\x12\x13\n\x0bhasObserver\x18\x04 \x01(\x08\x1a\xde\x01\n\x05\x42uild\x12\n\n\x02id\x18\x01 \x01(\t\x12\x1c\n\x06status\x18\x02 \x01(\x0e\x32\x0c.BuildStatus\x12\'\n\x05tasks\x18\x03 \x03(\x0b\x32\x18.ListBuildsResponse.Task\x12/\n\x0bscheduledAt\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x13\n\x0bhasObserver\x18\x05 \x01(\x08\x12\x16\n\x0ehasRunningTask\x18\x06 \x01(\x08\x12\x15\n\rhasQueuedTask\x18\x07 \x01(\x08\x12\r\n\x05ready\x18\x08 \x01(\x08\"\x14\n\x12ListWorkersRequest\"\x80\x02\n\x13ListWorkersResponse\x12,\n\x07workers\x18\x01 \x03(\x0b\x32\x1b.ListWorkersResponse.Worker\x1a=\n\x04Task\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x1b\n\x06status\x18\x03 \x01(\x0e\x32\x0b.TaskStatus\x1a|\n\x06Worker\x12\n\n\x02id\x18\x01 \x01(\t\x12\x1b\n\x08platform\x18\x02 \x01(\x0b\x32\t.Platform\x12 \n\rtask_platform\x18\x03 \x01(\x0b\x32\t.Platform\x12\'\n\x04task\x18\x04 \x01(\x0b\x32\x19.ListWorkersResponse.Task2\xf9\x01\n\x0e\x41\x64ministration\x12\x38\n\x0b\x43\x61ncelBuild\x12\x13.CancelBuildRequest\x1a\x14.CancelBuildResponse\x12\x35\n\nListBuilds\x12\x12.ListBuildsRequest\x1a\x13.ListBuildsResponse\x12\x38\n\x0bListWorkers\x12\x13.ListWorkersRequest\x1a\x14.ListWorkersResponse\x12<\n\nReschedule\x12\x16.google.protobuf.Empty\x1a\x16.google.protobuf.EmptyB\x0eZ\x0cpkg/protocolb\x06proto3')
|
|
21
|
+
|
|
22
|
+
_globals = globals()
|
|
23
|
+
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
|
|
24
|
+
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'jolt.plugins.remote_execution.administration_pb2', _globals)
|
|
25
|
+
if _descriptor._USE_C_DESCRIPTORS == False:
|
|
26
|
+
_globals['DESCRIPTOR']._options = None
|
|
27
|
+
_globals['DESCRIPTOR']._serialized_options = b'Z\014pkg/protocol'
|
|
28
|
+
_globals['_LISTBUILDSREQUEST']._serialized_start=182
|
|
29
|
+
_globals['_LISTBUILDSREQUEST']._serialized_end=216
|
|
30
|
+
_globals['_LISTBUILDSRESPONSE']._serialized_start=219
|
|
31
|
+
_globals['_LISTBUILDSRESPONSE']._serialized_end=591
|
|
32
|
+
_globals['_LISTBUILDSRESPONSE_TASK']._serialized_start=284
|
|
33
|
+
_globals['_LISTBUILDSRESPONSE_TASK']._serialized_end=366
|
|
34
|
+
_globals['_LISTBUILDSRESPONSE_BUILD']._serialized_start=369
|
|
35
|
+
_globals['_LISTBUILDSRESPONSE_BUILD']._serialized_end=591
|
|
36
|
+
_globals['_LISTWORKERSREQUEST']._serialized_start=593
|
|
37
|
+
_globals['_LISTWORKERSREQUEST']._serialized_end=613
|
|
38
|
+
_globals['_LISTWORKERSRESPONSE']._serialized_start=616
|
|
39
|
+
_globals['_LISTWORKERSRESPONSE']._serialized_end=872
|
|
40
|
+
_globals['_LISTWORKERSRESPONSE_TASK']._serialized_start=284
|
|
41
|
+
_globals['_LISTWORKERSRESPONSE_TASK']._serialized_end=345
|
|
42
|
+
_globals['_LISTWORKERSRESPONSE_WORKER']._serialized_start=748
|
|
43
|
+
_globals['_LISTWORKERSRESPONSE_WORKER']._serialized_end=872
|
|
44
|
+
_globals['_ADMINISTRATION']._serialized_start=875
|
|
45
|
+
_globals['_ADMINISTRATION']._serialized_end=1124
|
|
46
|
+
# @@protoc_insertion_point(module_scope)
|