jolt 0.9.172__py3-none-any.whl → 0.9.435__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- jolt/__init__.py +80 -7
- jolt/__main__.py +9 -1
- jolt/bin/fstree-darwin-x86_64 +0 -0
- jolt/bin/fstree-linux-x86_64 +0 -0
- jolt/cache.py +596 -252
- jolt/chroot.py +36 -11
- jolt/cli.py +143 -130
- jolt/common_pb2.py +45 -45
- jolt/config.py +76 -40
- jolt/error.py +19 -4
- jolt/filesystem.py +2 -6
- jolt/graph.py +400 -82
- jolt/influence.py +110 -3
- jolt/loader.py +338 -174
- jolt/log.py +127 -31
- jolt/manifest.py +13 -46
- jolt/options.py +35 -11
- jolt/pkgs/abseil.py +42 -0
- jolt/pkgs/asio.py +25 -0
- jolt/pkgs/autoconf.py +41 -0
- jolt/pkgs/automake.py +41 -0
- jolt/pkgs/b2.py +31 -0
- jolt/pkgs/boost.py +111 -0
- jolt/pkgs/boringssl.py +32 -0
- jolt/pkgs/busybox.py +39 -0
- jolt/pkgs/bzip2.py +43 -0
- jolt/pkgs/cares.py +29 -0
- jolt/pkgs/catch2.py +36 -0
- jolt/pkgs/cbindgen.py +17 -0
- jolt/pkgs/cista.py +19 -0
- jolt/pkgs/clang.py +44 -0
- jolt/pkgs/cli11.py +24 -0
- jolt/pkgs/cmake.py +48 -0
- jolt/pkgs/cpython.py +196 -0
- jolt/pkgs/crun.py +29 -0
- jolt/pkgs/curl.py +38 -0
- jolt/pkgs/dbus.py +18 -0
- jolt/pkgs/double_conversion.py +24 -0
- jolt/pkgs/fastfloat.py +21 -0
- jolt/pkgs/ffmpeg.py +28 -0
- jolt/pkgs/flatbuffers.py +29 -0
- jolt/pkgs/fmt.py +27 -0
- jolt/pkgs/fstree.py +20 -0
- jolt/pkgs/gflags.py +18 -0
- jolt/pkgs/glib.py +18 -0
- jolt/pkgs/glog.py +25 -0
- jolt/pkgs/glslang.py +21 -0
- jolt/pkgs/golang.py +16 -11
- jolt/pkgs/googlebenchmark.py +18 -0
- jolt/pkgs/googletest.py +46 -0
- jolt/pkgs/gperf.py +15 -0
- jolt/pkgs/grpc.py +73 -0
- jolt/pkgs/hdf5.py +19 -0
- jolt/pkgs/help2man.py +14 -0
- jolt/pkgs/inja.py +28 -0
- jolt/pkgs/jsoncpp.py +31 -0
- jolt/pkgs/libarchive.py +43 -0
- jolt/pkgs/libcap.py +44 -0
- jolt/pkgs/libdrm.py +44 -0
- jolt/pkgs/libedit.py +42 -0
- jolt/pkgs/libevent.py +31 -0
- jolt/pkgs/libexpat.py +27 -0
- jolt/pkgs/libfastjson.py +21 -0
- jolt/pkgs/libffi.py +16 -0
- jolt/pkgs/libglvnd.py +30 -0
- jolt/pkgs/libogg.py +28 -0
- jolt/pkgs/libpciaccess.py +18 -0
- jolt/pkgs/libseccomp.py +21 -0
- jolt/pkgs/libtirpc.py +24 -0
- jolt/pkgs/libtool.py +42 -0
- jolt/pkgs/libunwind.py +35 -0
- jolt/pkgs/libva.py +18 -0
- jolt/pkgs/libvorbis.py +33 -0
- jolt/pkgs/libxml2.py +35 -0
- jolt/pkgs/libxslt.py +17 -0
- jolt/pkgs/libyajl.py +16 -0
- jolt/pkgs/llvm.py +81 -0
- jolt/pkgs/lua.py +54 -0
- jolt/pkgs/lz4.py +26 -0
- jolt/pkgs/m4.py +14 -0
- jolt/pkgs/make.py +17 -0
- jolt/pkgs/mesa.py +81 -0
- jolt/pkgs/meson.py +17 -0
- jolt/pkgs/mstch.py +28 -0
- jolt/pkgs/mysql.py +60 -0
- jolt/pkgs/nasm.py +49 -0
- jolt/pkgs/ncurses.py +30 -0
- jolt/pkgs/ng_log.py +25 -0
- jolt/pkgs/ninja.py +45 -0
- jolt/pkgs/nlohmann_json.py +25 -0
- jolt/pkgs/nodejs.py +19 -11
- jolt/pkgs/opencv.py +24 -0
- jolt/pkgs/openjdk.py +26 -0
- jolt/pkgs/openssl.py +103 -0
- jolt/pkgs/paho.py +76 -0
- jolt/pkgs/patchelf.py +16 -0
- jolt/pkgs/perl.py +42 -0
- jolt/pkgs/pkgconfig.py +64 -0
- jolt/pkgs/poco.py +39 -0
- jolt/pkgs/protobuf.py +77 -0
- jolt/pkgs/pugixml.py +27 -0
- jolt/pkgs/python.py +19 -0
- jolt/pkgs/qt.py +35 -0
- jolt/pkgs/rapidjson.py +26 -0
- jolt/pkgs/rapidyaml.py +28 -0
- jolt/pkgs/re2.py +30 -0
- jolt/pkgs/re2c.py +17 -0
- jolt/pkgs/readline.py +15 -0
- jolt/pkgs/rust.py +41 -0
- jolt/pkgs/sdl.py +28 -0
- jolt/pkgs/simdjson.py +27 -0
- jolt/pkgs/soci.py +46 -0
- jolt/pkgs/spdlog.py +29 -0
- jolt/pkgs/spirv_llvm.py +21 -0
- jolt/pkgs/spirv_tools.py +24 -0
- jolt/pkgs/sqlite.py +83 -0
- jolt/pkgs/ssl.py +12 -0
- jolt/pkgs/texinfo.py +15 -0
- jolt/pkgs/tomlplusplus.py +22 -0
- jolt/pkgs/wayland.py +26 -0
- jolt/pkgs/x11.py +58 -0
- jolt/pkgs/xerces_c.py +20 -0
- jolt/pkgs/xorg.py +360 -0
- jolt/pkgs/xz.py +29 -0
- jolt/pkgs/yamlcpp.py +30 -0
- jolt/pkgs/zeromq.py +47 -0
- jolt/pkgs/zlib.py +87 -0
- jolt/pkgs/zstd.py +33 -0
- jolt/plugins/alias.py +3 -0
- jolt/plugins/allure.py +2 -2
- jolt/plugins/autotools.py +66 -0
- jolt/plugins/cache.py +1 -1
- jolt/plugins/cmake.py +74 -6
- jolt/plugins/conan.py +238 -0
- jolt/plugins/cxxinfo.py +7 -0
- jolt/plugins/docker.py +76 -19
- jolt/plugins/email.xslt +141 -118
- jolt/plugins/environ.py +11 -0
- jolt/plugins/fetch.py +141 -0
- jolt/plugins/gdb.py +33 -14
- jolt/plugins/gerrit.py +0 -13
- jolt/plugins/git.py +248 -66
- jolt/plugins/googletest.py +1 -1
- jolt/plugins/http.py +1 -1
- jolt/plugins/libtool.py +63 -0
- jolt/plugins/linux.py +990 -0
- jolt/plugins/logstash.py +4 -4
- jolt/plugins/meson.py +61 -0
- jolt/plugins/ninja-compdb.py +96 -28
- jolt/plugins/ninja.py +424 -150
- jolt/plugins/paths.py +11 -1
- jolt/plugins/pkgconfig.py +219 -0
- jolt/plugins/podman.py +131 -87
- jolt/plugins/python.py +137 -0
- jolt/plugins/remote_execution/administration_pb2.py +27 -19
- jolt/plugins/remote_execution/log_pb2.py +12 -12
- jolt/plugins/remote_execution/scheduler_pb2.py +23 -23
- jolt/plugins/remote_execution/worker_pb2.py +19 -19
- jolt/plugins/report.py +7 -2
- jolt/plugins/rust.py +25 -0
- jolt/plugins/scheduler.py +135 -86
- jolt/plugins/selfdeploy/setup.py +6 -6
- jolt/plugins/selfdeploy.py +49 -31
- jolt/plugins/strings.py +35 -22
- jolt/plugins/symlinks.py +11 -4
- jolt/plugins/telemetry.py +1 -2
- jolt/plugins/timeline.py +13 -3
- jolt/scheduler.py +467 -165
- jolt/tasks.py +427 -111
- jolt/templates/timeline.html.template +44 -47
- jolt/timer.py +22 -0
- jolt/tools.py +527 -188
- jolt/utils.py +183 -3
- jolt/version.py +1 -1
- jolt/xmldom.py +12 -2
- {jolt-0.9.172.dist-info → jolt-0.9.435.dist-info}/METADATA +97 -41
- jolt-0.9.435.dist-info/RECORD +207 -0
- {jolt-0.9.172.dist-info → jolt-0.9.435.dist-info}/WHEEL +1 -1
- jolt/plugins/amqp.py +0 -855
- jolt/plugins/debian.py +0 -338
- jolt/plugins/repo.py +0 -253
- jolt/plugins/snap.py +0 -122
- jolt-0.9.172.dist-info/RECORD +0 -92
- {jolt-0.9.172.dist-info → jolt-0.9.435.dist-info}/entry_points.txt +0 -0
- {jolt-0.9.172.dist-info → jolt-0.9.435.dist-info}/top_level.txt +0 -0
jolt/tools.py
CHANGED
|
@@ -1,13 +1,16 @@
|
|
|
1
|
+
import py7zr
|
|
1
2
|
import bz2
|
|
2
3
|
import copy
|
|
3
4
|
import getpass
|
|
4
5
|
import gzip
|
|
6
|
+
import json
|
|
5
7
|
import lzma
|
|
6
8
|
import subprocess
|
|
7
9
|
import os
|
|
8
10
|
import platform
|
|
9
11
|
import sys
|
|
10
12
|
import threading
|
|
13
|
+
import time
|
|
11
14
|
if os.name != "nt":
|
|
12
15
|
import termios
|
|
13
16
|
import glob
|
|
@@ -18,6 +21,7 @@ import tarfile
|
|
|
18
21
|
import zipfile
|
|
19
22
|
import bz2file
|
|
20
23
|
import hashlib
|
|
24
|
+
import zstandard
|
|
21
25
|
from contextlib import contextmanager
|
|
22
26
|
from psutil import NoSuchProcess, Process
|
|
23
27
|
from jinja2 import Environment, FileSystemLoader
|
|
@@ -25,6 +29,8 @@ from jinja2.exceptions import TemplateError
|
|
|
25
29
|
from jinja2.runtime import Context
|
|
26
30
|
from jinja2.utils import missing
|
|
27
31
|
from requests import Session
|
|
32
|
+
from requests.auth import HTTPBasicAuth
|
|
33
|
+
from urllib.parse import urlparse, urlunparse
|
|
28
34
|
|
|
29
35
|
|
|
30
36
|
from jolt import cache
|
|
@@ -32,11 +38,14 @@ from jolt import filesystem as fs
|
|
|
32
38
|
from jolt import log
|
|
33
39
|
from jolt import utils
|
|
34
40
|
from jolt import config
|
|
35
|
-
from jolt.error import JoltCommandError
|
|
41
|
+
from jolt.error import JoltCommandError, JoltTimeoutError
|
|
36
42
|
from jolt.error import raise_error_if
|
|
37
43
|
from jolt.error import raise_task_error, raise_task_error_if
|
|
38
44
|
|
|
39
45
|
|
|
46
|
+
SUPPORTED_ARCHIVE_TYPES = [".tar", ".tar.bz2", ".tar.gz", ".tgz", ".tar.xz", ".tar.zst", ".zip"]
|
|
47
|
+
|
|
48
|
+
|
|
40
49
|
http_session = Session()
|
|
41
50
|
|
|
42
51
|
|
|
@@ -50,66 +59,54 @@ def stderr_write(line):
|
|
|
50
59
|
sys.stderr.flush()
|
|
51
60
|
|
|
52
61
|
|
|
53
|
-
|
|
62
|
+
class Reader(threading.Thread):
|
|
63
|
+
def __init__(self, parent, stream, output=None, logbuf=None, output_rstrip=True):
|
|
64
|
+
super(Reader, self).__init__()
|
|
65
|
+
self.output = output
|
|
66
|
+
self.output_rstrip = output_rstrip
|
|
67
|
+
self.parent = parent
|
|
68
|
+
self.stream = stream
|
|
69
|
+
self.logbuf = logbuf if logbuf is not None else []
|
|
70
|
+
self.start()
|
|
71
|
+
|
|
72
|
+
def run(self):
|
|
73
|
+
line = ""
|
|
74
|
+
try:
|
|
75
|
+
with log.map_thread(self, self.parent):
|
|
76
|
+
for line in iter(self.stream.readline, b''):
|
|
77
|
+
if self.output_rstrip:
|
|
78
|
+
line = line.rstrip()
|
|
79
|
+
line = line.decode(errors='ignore')
|
|
80
|
+
if self.output:
|
|
81
|
+
self.output(line)
|
|
82
|
+
self.logbuf.append((self, line))
|
|
83
|
+
except Exception as e:
|
|
84
|
+
if self.output:
|
|
85
|
+
self.output("{0}", str(e))
|
|
86
|
+
self.output(line)
|
|
87
|
+
self.logbuf.append((self, line))
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def _run(cmd, cwd, env, *args, **kwargs):
|
|
54
91
|
output = kwargs.get("output")
|
|
55
92
|
output_on_error = kwargs.get("output_on_error")
|
|
56
93
|
output_rstrip = kwargs.get("output_rstrip", True)
|
|
57
94
|
output_stdio = kwargs.get("output_stdio", False)
|
|
95
|
+
output_stderr = kwargs.get("output_stderr", True)
|
|
96
|
+
output_stdout = kwargs.get("output_stdout", True)
|
|
58
97
|
return_stderr = kwargs.get("return_stderr", False)
|
|
59
98
|
output = output if output is not None else True
|
|
60
99
|
output = False if output_on_error else output
|
|
61
100
|
shell = kwargs.get("shell", True)
|
|
62
|
-
timeout = kwargs.get("timeout",
|
|
101
|
+
timeout = kwargs.get("timeout", config.getint("jolt", "command_timeout", 0))
|
|
102
|
+
timeout = timeout if type(timeout) is int and timeout > 0 else None
|
|
63
103
|
|
|
64
104
|
log.debug("Running: '{0}' (CWD: {1})", cmd, cwd)
|
|
65
105
|
|
|
66
|
-
p =
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
stderr=subprocess.PIPE,
|
|
71
|
-
shell=shell,
|
|
72
|
-
cwd=cwd,
|
|
73
|
-
env=env,
|
|
74
|
-
preexec_fn=preexec_fn,
|
|
75
|
-
)
|
|
76
|
-
|
|
77
|
-
class Reader(threading.Thread):
|
|
78
|
-
def __init__(self, parent, stream, output=None, logbuf=None):
|
|
79
|
-
super(Reader, self).__init__()
|
|
80
|
-
self.output = output
|
|
81
|
-
self.parent = parent
|
|
82
|
-
self.stream = stream
|
|
83
|
-
self.logbuf = logbuf if logbuf is not None else []
|
|
84
|
-
self.start()
|
|
85
|
-
|
|
86
|
-
def run(self):
|
|
87
|
-
line = ""
|
|
88
|
-
try:
|
|
89
|
-
with log.map_thread(self, self.parent):
|
|
90
|
-
for line in iter(self.stream.readline, b''):
|
|
91
|
-
if output_rstrip:
|
|
92
|
-
line = line.rstrip()
|
|
93
|
-
line = line.decode(errors='ignore')
|
|
94
|
-
if self.output:
|
|
95
|
-
self.output(line)
|
|
96
|
-
self.logbuf.append((self, line))
|
|
97
|
-
except Exception as e:
|
|
98
|
-
if self.output:
|
|
99
|
-
self.output("{0}", str(e))
|
|
100
|
-
self.output(line)
|
|
101
|
-
self.logbuf.append((self, line))
|
|
102
|
-
|
|
103
|
-
stdout_func = log.stdout if not output_stdio else stdout_write
|
|
104
|
-
stderr_func = log.stderr if not output_stdio else stderr_write
|
|
105
|
-
|
|
106
|
-
logbuf = []
|
|
107
|
-
stdout = Reader(
|
|
108
|
-
threading.current_thread(), p.stdout,
|
|
109
|
-
output=stdout_func if output else None, logbuf=logbuf)
|
|
110
|
-
stderr = Reader(
|
|
111
|
-
threading.current_thread(), p.stderr,
|
|
112
|
-
output=stderr_func if output else None, logbuf=logbuf)
|
|
106
|
+
p = None
|
|
107
|
+
stdout = None
|
|
108
|
+
stderr = None
|
|
109
|
+
timedout = False
|
|
113
110
|
|
|
114
111
|
def terminate(pid):
|
|
115
112
|
try:
|
|
@@ -129,23 +126,73 @@ def _run(cmd, cwd, env, preexec_fn, *args, **kwargs):
|
|
|
129
126
|
except NoSuchProcess:
|
|
130
127
|
pass
|
|
131
128
|
|
|
132
|
-
timedout = False
|
|
133
129
|
try:
|
|
130
|
+
with utils.delayed_interrupt():
|
|
131
|
+
p = subprocess.Popen(
|
|
132
|
+
cmd,
|
|
133
|
+
stdin=subprocess.PIPE,
|
|
134
|
+
stdout=subprocess.PIPE,
|
|
135
|
+
stderr=subprocess.PIPE,
|
|
136
|
+
shell=shell,
|
|
137
|
+
cwd=cwd,
|
|
138
|
+
env=env,
|
|
139
|
+
)
|
|
140
|
+
|
|
141
|
+
if output_stdout:
|
|
142
|
+
stdout_func = log.stdout if not output_stdio else stdout_write
|
|
143
|
+
else:
|
|
144
|
+
stdout_func = None
|
|
145
|
+
|
|
146
|
+
if output_stderr:
|
|
147
|
+
stderr_func = log.stderr if not output_stdio else stderr_write
|
|
148
|
+
else:
|
|
149
|
+
stderr_func = None
|
|
150
|
+
|
|
151
|
+
logbuf = []
|
|
152
|
+
stdout = Reader(
|
|
153
|
+
threading.current_thread(),
|
|
154
|
+
p.stdout,
|
|
155
|
+
output=stdout_func if output else None,
|
|
156
|
+
logbuf=logbuf,
|
|
157
|
+
output_rstrip=output_rstrip)
|
|
158
|
+
stderr = Reader(
|
|
159
|
+
threading.current_thread(),
|
|
160
|
+
p.stderr,
|
|
161
|
+
output=stderr_func if output else None,
|
|
162
|
+
logbuf=logbuf,
|
|
163
|
+
output_rstrip=output_rstrip)
|
|
164
|
+
|
|
134
165
|
p.wait(timeout=timeout)
|
|
135
|
-
|
|
166
|
+
|
|
167
|
+
except KeyboardInterrupt:
|
|
168
|
+
if not p:
|
|
169
|
+
raise
|
|
170
|
+
try:
|
|
171
|
+
terminate(p.pid)
|
|
172
|
+
p.wait(10)
|
|
173
|
+
except subprocess.TimeoutExpired:
|
|
174
|
+
kill(p.pid)
|
|
175
|
+
utils.call_and_catch(p.wait, 10)
|
|
176
|
+
raise
|
|
177
|
+
|
|
178
|
+
except (subprocess.TimeoutExpired, JoltTimeoutError):
|
|
136
179
|
timedout = True
|
|
137
180
|
try:
|
|
138
181
|
terminate(p.pid)
|
|
139
182
|
p.wait(10)
|
|
140
183
|
except subprocess.TimeoutExpired:
|
|
141
184
|
kill(p.pid)
|
|
142
|
-
p.wait
|
|
185
|
+
utils.call_and_catch(p.wait, 10)
|
|
186
|
+
|
|
143
187
|
finally:
|
|
144
|
-
stdout
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
p
|
|
188
|
+
if stdout:
|
|
189
|
+
stdout.join()
|
|
190
|
+
if stderr:
|
|
191
|
+
stderr.join()
|
|
192
|
+
if p:
|
|
193
|
+
p.stdin.close()
|
|
194
|
+
p.stdout.close()
|
|
195
|
+
p.stderr.close()
|
|
149
196
|
|
|
150
197
|
if p.returncode != 0 and output_on_error:
|
|
151
198
|
for reader, line in logbuf:
|
|
@@ -164,11 +211,13 @@ def _run(cmd, cwd, env, preexec_fn, *args, **kwargs):
|
|
|
164
211
|
|
|
165
212
|
if p.returncode != 0:
|
|
166
213
|
stderrbuf = [line for reader, line in logbuf if reader is stderr]
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
"timeout" if
|
|
170
|
-
|
|
171
|
-
|
|
214
|
+
if timedout:
|
|
215
|
+
raise JoltTimeoutError(
|
|
216
|
+
"Command timeout: " + (" ".join(cmd) if type(cmd) is list else cmd))
|
|
217
|
+
else:
|
|
218
|
+
raise JoltCommandError(
|
|
219
|
+
"Command failed: " + (" ".join(cmd) if type(cmd) is list else cmd),
|
|
220
|
+
stdoutbuf, stderrbuf, p.returncode)
|
|
172
221
|
if return_stderr:
|
|
173
222
|
return "\n".join(stdoutbuf) if output_rstrip else "".join(stdoutbuf), \
|
|
174
223
|
"\n".join(stderrbuf) if output_rstrip else "".join(stderrbuf)
|
|
@@ -205,36 +254,6 @@ class _String(object):
|
|
|
205
254
|
return self._str.startswith(substr)
|
|
206
255
|
|
|
207
256
|
|
|
208
|
-
class _tmpdir(object):
|
|
209
|
-
def __init__(self, name, cwd=None):
|
|
210
|
-
self._name = name
|
|
211
|
-
self._path = None
|
|
212
|
-
self._cwd = cwd or os.getcwd()
|
|
213
|
-
|
|
214
|
-
def __enter__(self):
|
|
215
|
-
try:
|
|
216
|
-
dirname = self._cwd
|
|
217
|
-
fs.makedirs(fs.path.join(dirname, fs.path.dirname(self._name)))
|
|
218
|
-
self._path = fs.mkdtemp(prefix=self._name + "-", dir=dirname)
|
|
219
|
-
except KeyboardInterrupt as e:
|
|
220
|
-
raise e
|
|
221
|
-
except Exception as e:
|
|
222
|
-
raise e
|
|
223
|
-
raise_error_if(not self._path, "failed to create temporary directory")
|
|
224
|
-
return self
|
|
225
|
-
|
|
226
|
-
def __exit__(self, type, value, tb):
|
|
227
|
-
if self._path:
|
|
228
|
-
fs.rmtree(self._path, ignore_errors=True)
|
|
229
|
-
|
|
230
|
-
@property
|
|
231
|
-
def path(self):
|
|
232
|
-
return self.get_path()
|
|
233
|
-
|
|
234
|
-
def get_path(self):
|
|
235
|
-
return self._path
|
|
236
|
-
|
|
237
|
-
|
|
238
257
|
class _CMake(object):
|
|
239
258
|
def __init__(self, deps, tools, incremental=False):
|
|
240
259
|
self.deps = deps
|
|
@@ -242,6 +261,10 @@ class _CMake(object):
|
|
|
242
261
|
self.builddir = self.tools.builddir(incremental=incremental)
|
|
243
262
|
self.installdir = self.tools.builddir("install", incremental=False)
|
|
244
263
|
|
|
264
|
+
def clean(self):
|
|
265
|
+
self.tools.rmtree(self.builddir, ignore_errors=True)
|
|
266
|
+
self.tools.rmtree(self.installdir, ignore_errors=True)
|
|
267
|
+
|
|
245
268
|
def configure(self, sourcedir, *args, generator=None, **kwargs):
|
|
246
269
|
sourcedir = self.tools.expand_path(sourcedir)
|
|
247
270
|
|
|
@@ -252,74 +275,85 @@ class _CMake(object):
|
|
|
252
275
|
|
|
253
276
|
with self.tools.cwd(self.builddir):
|
|
254
277
|
self.tools.run(
|
|
255
|
-
"cmake {0}
|
|
278
|
+
"cmake {0} {1} -DCMAKE_INSTALL_PREFIX=/jolt-prefix {1} {2} {3}",
|
|
256
279
|
sourcedir,
|
|
257
|
-
self.builddir,
|
|
258
|
-
self.installdir,
|
|
280
|
+
utils.option("-B", self.builddir),
|
|
259
281
|
utils.option("-G", generator),
|
|
260
282
|
extra_args,
|
|
261
283
|
output=True)
|
|
262
284
|
|
|
263
|
-
def build(self,
|
|
285
|
+
def build(self, *args, config="Release", **kwargs):
|
|
264
286
|
threading_args = ' -j {}'.format(kwargs.get("threads", self.tools.thread_count()))
|
|
265
287
|
with self.tools.cwd(self.builddir):
|
|
266
|
-
|
|
267
|
-
self.tools.run("cmake --build . {0}{1}", release, threading_args, output=True)
|
|
288
|
+
self.tools.run("cmake --build . --config {0} {1}", config, threading_args, output=True)
|
|
268
289
|
|
|
269
|
-
def install(self,
|
|
270
|
-
with self.tools.cwd(self.builddir):
|
|
271
|
-
|
|
272
|
-
self.tools.run("cmake --build . --target install {0}", release, output=True)
|
|
290
|
+
def install(self, target="install", config="Release", **kwargs):
|
|
291
|
+
with self.tools.cwd(self.builddir), self.tools.environ(DESTDIR=self.installdir):
|
|
292
|
+
self.tools.run("cmake --build . --config {0} --target {1}", config, target, output=True)
|
|
273
293
|
|
|
274
|
-
def publish(self, artifact, files='*', *args, **kwargs):
|
|
275
|
-
with self.tools.cwd(self.installdir):
|
|
276
|
-
artifact.collect(files, *args, **kwargs)
|
|
294
|
+
def publish(self, artifact, files='*', symlinks=True, *args, **kwargs):
|
|
295
|
+
with self.tools.cwd(self.installdir, "jolt-prefix"):
|
|
296
|
+
artifact.collect(files, *args, symlinks=symlinks, **kwargs)
|
|
297
|
+
artifact.strings.install_prefix = "/jolt-prefix"
|
|
277
298
|
|
|
278
299
|
|
|
279
300
|
class _Meson(object):
|
|
280
|
-
def __init__(self, deps, tools):
|
|
301
|
+
def __init__(self, deps, tools, incremental=False):
|
|
281
302
|
self.deps = deps
|
|
282
303
|
self.tools = tools
|
|
283
|
-
self.builddir = self.tools.builddir()
|
|
284
|
-
self.installdir = self.tools.builddir("install")
|
|
304
|
+
self.builddir = self.tools.builddir(incremental=incremental)
|
|
305
|
+
self.installdir = self.tools.builddir("install", incremental=False)
|
|
306
|
+
self.prefix = "/jolt-prefix" if os.name != "nt" else "C:\\jolt-prefix"
|
|
307
|
+
|
|
308
|
+
def clean(self):
|
|
309
|
+
self.tools.rmtree(self.builddir, ignore_errors=True)
|
|
310
|
+
self.tools.rmtree(self.installdir, ignore_errors=True)
|
|
285
311
|
|
|
286
312
|
def configure(self, sourcedir, *args, **kwargs):
|
|
287
313
|
sourcedir = self.tools.expand_path(sourcedir)
|
|
288
|
-
|
|
314
|
+
options = " ".join([f"-D{arg}" for arg in args]) + " "
|
|
315
|
+
options += " ".join(["-D{0}={1}".format(key, self.tools.expand(val)) for key, val in kwargs.items()])
|
|
316
|
+
self.tools.run("meson setup --prefix={0} {1} {2} {3}", self.prefix, sourcedir, self.builddir, options,
|
|
289
317
|
output=True)
|
|
290
318
|
|
|
291
319
|
def build(self, *args, **kwargs):
|
|
292
320
|
self.tools.run("ninja -C {0} ", self.builddir, output=True)
|
|
293
321
|
|
|
294
322
|
def install(self, *args, **kwargs):
|
|
295
|
-
self.tools.
|
|
296
|
-
|
|
297
|
-
output=True)
|
|
323
|
+
with self.tools.environ(DESTDIR=self.installdir):
|
|
324
|
+
self.tools.run("ninja -C {0} install", self.builddir, output=True)
|
|
298
325
|
|
|
299
|
-
def publish(self, artifact, files='*', *args, **kwargs):
|
|
300
|
-
with self.tools.cwd(self.installdir):
|
|
301
|
-
artifact.collect(files, *args, **kwargs)
|
|
326
|
+
def publish(self, artifact, files='*', symlinks=True, *args, **kwargs):
|
|
327
|
+
with self.tools.cwd(self.installdir, "jolt-prefix"):
|
|
328
|
+
artifact.collect(files, *args, symlinks=symlinks, **kwargs)
|
|
329
|
+
artifact.strings.install_prefix = self.prefix
|
|
302
330
|
|
|
303
331
|
|
|
304
332
|
class _AutoTools(object):
|
|
305
|
-
def __init__(self, deps, tools):
|
|
333
|
+
def __init__(self, deps, tools, incremental=False):
|
|
306
334
|
self.deps = deps
|
|
307
335
|
self.tools = tools
|
|
308
|
-
self.builddir = self.tools.builddir()
|
|
309
|
-
self.installdir = self.tools.builddir("install")
|
|
336
|
+
self.builddir = self.tools.builddir(incremental=incremental)
|
|
337
|
+
self.installdir = self.tools.builddir("install", incremental=False)
|
|
338
|
+
self.prefix = "jolt-prefix"
|
|
310
339
|
|
|
311
|
-
def
|
|
340
|
+
def clean(self):
|
|
341
|
+
self.tools.rmtree(self.builddir, ignore_errors=True)
|
|
342
|
+
self.tools.rmtree(self.installdir, ignore_errors=True)
|
|
343
|
+
|
|
344
|
+
def configure(self, sourcedir, *args):
|
|
312
345
|
sourcedir = self.tools.expand_path(sourcedir)
|
|
313
|
-
prefix = kwargs.get("prefix", "/")
|
|
314
346
|
|
|
315
347
|
if not fs.path.exists(fs.path.join(sourcedir, "configure")):
|
|
316
348
|
with self.tools.cwd(sourcedir):
|
|
317
349
|
self.tools.run("autoreconf -visf", output=True)
|
|
318
350
|
|
|
319
351
|
with self.tools.cwd(self.builddir), self.tools.environ(DESTDIR=self.installdir):
|
|
320
|
-
self.tools.run("{0}/configure --prefix
|
|
321
|
-
sourcedir,
|
|
322
|
-
self.
|
|
352
|
+
self.tools.run("{0}/configure --prefix=/{1} {2} {3}",
|
|
353
|
+
sourcedir,
|
|
354
|
+
self.prefix,
|
|
355
|
+
self.tools.getenv("CONFIGURE_FLAGS", ""),
|
|
356
|
+
" ".join(args),
|
|
323
357
|
output=True)
|
|
324
358
|
|
|
325
359
|
def build(self, *args, **kwargs):
|
|
@@ -327,13 +361,14 @@ class _AutoTools(object):
|
|
|
327
361
|
self.tools.run("make VERBOSE=yes Q= V=1 -j{0}",
|
|
328
362
|
self.tools.cpu_count(), output=True)
|
|
329
363
|
|
|
330
|
-
def install(self, target="install"
|
|
331
|
-
with self.tools.cwd(self.builddir)
|
|
332
|
-
self.tools.run("make {}", target, output=True)
|
|
364
|
+
def install(self, target="install"):
|
|
365
|
+
with self.tools.cwd(self.builddir):
|
|
366
|
+
self.tools.run("make DESTDIR={} {}", self.installdir, target, output=True)
|
|
333
367
|
|
|
334
|
-
def publish(self, artifact, files='*', *args, **kwargs):
|
|
335
|
-
with self.tools.cwd(self.installdir):
|
|
336
|
-
artifact.collect(files, *args, **kwargs)
|
|
368
|
+
def publish(self, artifact, files='*', symlinks=True, *args, **kwargs):
|
|
369
|
+
with self.tools.cwd(self.installdir, self.prefix):
|
|
370
|
+
artifact.collect(files, *args, symlinks=symlinks, **kwargs)
|
|
371
|
+
artifact.strings.install_prefix = "/" + self.prefix
|
|
337
372
|
|
|
338
373
|
|
|
339
374
|
class ZipFile(zipfile.ZipFile):
|
|
@@ -461,8 +496,9 @@ class Tools(object):
|
|
|
461
496
|
def __init__(self, task=None, cwd=None, env=None):
|
|
462
497
|
self._chroot = None
|
|
463
498
|
self._chroot_prefix = []
|
|
499
|
+
self._chroot_path = []
|
|
500
|
+
self._deadline = None
|
|
464
501
|
self._run_prefix = []
|
|
465
|
-
self._preexec_fn = None
|
|
466
502
|
self._cwd = fs.path.normpath(fs.path.join(config.get_workdir(), cwd or config.get_workdir()))
|
|
467
503
|
self._env = copy.deepcopy(env or os.environ)
|
|
468
504
|
self._task = task
|
|
@@ -511,24 +547,50 @@ class Tools(object):
|
|
|
511
547
|
zf.write(path, zippath)
|
|
512
548
|
return filename
|
|
513
549
|
|
|
550
|
+
def _make_7zfile(self, filename, fmt, rootdir):
|
|
551
|
+
self.mkdirname(filename)
|
|
552
|
+
with py7zr.SevenZipFile(filename, 'w') as archive:
|
|
553
|
+
archive.writeall(rootdir, ".")
|
|
554
|
+
return filename
|
|
555
|
+
|
|
514
556
|
def _make_tarfile(self, filename, fmt, rootdir):
|
|
515
|
-
|
|
516
|
-
if not os.path.exists(dirname):
|
|
517
|
-
fs.makedirs(dirname)
|
|
557
|
+
self.mkdirname(filename)
|
|
518
558
|
with tarfile.open(filename, 'w|%s' % fmt) as tar:
|
|
519
559
|
tar.add(rootdir, ".")
|
|
520
560
|
return filename
|
|
521
561
|
|
|
562
|
+
def _make_tarzstd(self, filename, rootdir):
|
|
563
|
+
self.mkdirname(filename)
|
|
564
|
+
with open(filename, 'wb') as zstd_file:
|
|
565
|
+
compressor = zstandard.ZstdCompressor(threads=self.thread_count())
|
|
566
|
+
with compressor.stream_writer(zstd_file) as stream:
|
|
567
|
+
with tarfile.open(mode="w|", fileobj=stream) as tar:
|
|
568
|
+
tar.add(rootdir, ".")
|
|
569
|
+
return filename
|
|
570
|
+
|
|
571
|
+
def _extract_tarzstd(self, filename, pathname, files=None):
|
|
572
|
+
with open(filename, 'rb') as zstd_file:
|
|
573
|
+
decompressor = zstandard.ZstdDecompressor()
|
|
574
|
+
with decompressor.stream_reader(zstd_file) as stream:
|
|
575
|
+
with tarfile.open(mode="r|", fileobj=stream) as tar:
|
|
576
|
+
if files:
|
|
577
|
+
for file in files:
|
|
578
|
+
tar.extract(file, pathname)
|
|
579
|
+
else:
|
|
580
|
+
tar.extractall(pathname)
|
|
581
|
+
|
|
522
582
|
def archive(self, pathname, filename):
|
|
523
583
|
""" Creates a (compressed) archive.
|
|
524
584
|
|
|
525
585
|
The type of archive to create is determined by the filename extension.
|
|
526
586
|
Supported formats are:
|
|
527
587
|
|
|
588
|
+
- 7z
|
|
528
589
|
- tar
|
|
529
590
|
- tar.bz2
|
|
530
591
|
- tar.gz
|
|
531
592
|
- tar.xz
|
|
593
|
+
- tar.zst
|
|
532
594
|
- zip
|
|
533
595
|
|
|
534
596
|
Args:
|
|
@@ -548,6 +610,8 @@ class Tools(object):
|
|
|
548
610
|
self.run("tar -I pigz -cf {} -C {} .", filename, pathname)
|
|
549
611
|
return filename
|
|
550
612
|
fmt = "targz"
|
|
613
|
+
elif filename.endswith(".tar.zst"):
|
|
614
|
+
return self._make_tarzstd(filename, rootdir=pathname)
|
|
551
615
|
elif filename.endswith(".tgz"):
|
|
552
616
|
if self.which("tar") and self.which("pigz"):
|
|
553
617
|
self.run("tar -I pigz -cf {} -C {} .", filename, pathname)
|
|
@@ -557,12 +621,16 @@ class Tools(object):
|
|
|
557
621
|
fmt = "tarbz2"
|
|
558
622
|
elif filename.endswith(".tar.xz"):
|
|
559
623
|
fmt = "tarxz"
|
|
624
|
+
elif filename.endswith(".7z"):
|
|
625
|
+
fmt = "7z"
|
|
560
626
|
raise_task_error_if(
|
|
561
627
|
not fmt, self._task,
|
|
562
628
|
"unknown archive type '{0}'", fs.path.basename(filename))
|
|
563
629
|
try:
|
|
564
630
|
if fmt == "zip":
|
|
565
631
|
outfile = self._make_zipfile(filename, fmt, rootdir=pathname)
|
|
632
|
+
elif fmt == "7z":
|
|
633
|
+
outfile = self._make_7zfile(filename, fmt, rootdir=pathname)
|
|
566
634
|
else:
|
|
567
635
|
outfile = self._make_tarfile(filename, fmt[3:], rootdir=pathname)
|
|
568
636
|
if outfile != filename:
|
|
@@ -571,9 +639,9 @@ class Tools(object):
|
|
|
571
639
|
except Exception:
|
|
572
640
|
raise_task_error(self._task, "failed to create archive from directory '{0}'", pathname)
|
|
573
641
|
|
|
574
|
-
def autotools(self, deps=None):
|
|
642
|
+
def autotools(self, deps=None, incremental=False):
|
|
575
643
|
""" Creates an AutoTools invokation helper """
|
|
576
|
-
return _AutoTools(deps, self)
|
|
644
|
+
return _AutoTools(deps, self, incremental=incremental)
|
|
577
645
|
|
|
578
646
|
@utils.locked(lock='_builddir_lock')
|
|
579
647
|
def builddir(self, name=None, incremental=False, unique=True):
|
|
@@ -600,8 +668,16 @@ class Tools(object):
|
|
|
600
668
|
name += "-" + utils.canonical(self._task.short_qualified_name)
|
|
601
669
|
|
|
602
670
|
dirname = fs.path.join(self.getcwd(), name)
|
|
671
|
+
if incremental:
|
|
672
|
+
dirname += "-inc"
|
|
673
|
+
|
|
674
|
+
# Check if incremental build directories are disabled in the configuration
|
|
675
|
+
if incremental not in ["always"] and not config.is_incremental_build():
|
|
676
|
+
incremental = False
|
|
603
677
|
|
|
604
678
|
if incremental:
|
|
679
|
+
# Create a unique build directory for each task
|
|
680
|
+
# and store the task name in a hidden file.
|
|
605
681
|
if self._task is not None and unique:
|
|
606
682
|
meta_task = fs.path.join(dirname, ".task")
|
|
607
683
|
if not fs.path.exists(meta_task) \
|
|
@@ -609,6 +685,7 @@ class Tools(object):
|
|
|
609
685
|
fs.rmtree(dirname, ignore_errors=True)
|
|
610
686
|
fs.makedirs(dirname)
|
|
611
687
|
|
|
688
|
+
# Remove the build directory if the task taint has changed (--force or --salt)
|
|
612
689
|
if self._task.taint is not None:
|
|
613
690
|
meta = fs.path.join(dirname, ".taint")
|
|
614
691
|
if not fs.path.exists(meta) or self.read_file(meta) != str(self._task.taint):
|
|
@@ -622,21 +699,16 @@ class Tools(object):
|
|
|
622
699
|
return dirname
|
|
623
700
|
|
|
624
701
|
if name not in self._builddir:
|
|
625
|
-
|
|
626
|
-
|
|
627
|
-
self._builddir[name] = fs.mkdtemp(
|
|
628
|
-
prefix=fs.path.basename(dirname) + "-",
|
|
629
|
-
dir=fs.path.dirname(dirname))
|
|
630
|
-
else:
|
|
631
|
-
fs.makedirs(dirname)
|
|
632
|
-
self._builddir[name] = dirname
|
|
702
|
+
fs.makedirs(dirname)
|
|
703
|
+
self._builddir[name] = dirname
|
|
633
704
|
|
|
634
705
|
return self._builddir[name]
|
|
635
706
|
|
|
636
707
|
@property
|
|
637
708
|
def buildroot(self):
|
|
638
709
|
""" Return the root path of all build directories """
|
|
639
|
-
|
|
710
|
+
from jolt.loader import JoltLoader
|
|
711
|
+
return fs.path.normpath(JoltLoader.get().build_path)
|
|
640
712
|
|
|
641
713
|
def checksum_file(self, filelist, concat=False, hashfn=hashlib.sha1, filterfn=None):
|
|
642
714
|
""" Calculate a checksum of one or multiple files.
|
|
@@ -693,6 +765,7 @@ class Tools(object):
|
|
|
693
765
|
- .bz2
|
|
694
766
|
- .gz
|
|
695
767
|
- .xz
|
|
768
|
+
- .zst
|
|
696
769
|
|
|
697
770
|
Args:
|
|
698
771
|
src (str): Source file to be compressed.
|
|
@@ -720,6 +793,13 @@ class Tools(object):
|
|
|
720
793
|
with lzma.open(dst, 'wb') as outfp:
|
|
721
794
|
for block in iter(lambda: infp.read(0x10000), b''):
|
|
722
795
|
outfp.write(block)
|
|
796
|
+
elif ext == "zst":
|
|
797
|
+
with open(src, 'rb') as infp:
|
|
798
|
+
with open(dst, 'wb') as outfp:
|
|
799
|
+
compressor = zstandard.ZstdCompressor(threads=self.thread_count())
|
|
800
|
+
with compressor.stream_writer(outfp) as stream:
|
|
801
|
+
for block in iter(lambda: infp.read(0x10000), b''):
|
|
802
|
+
stream.write(block)
|
|
723
803
|
|
|
724
804
|
def copy(self, src, dst, symlinks=False):
|
|
725
805
|
""" Copies file and directories (recursively).
|
|
@@ -788,10 +868,19 @@ class Tools(object):
|
|
|
788
868
|
finally:
|
|
789
869
|
self._cwd = prev
|
|
790
870
|
|
|
791
|
-
def download(self, url, pathname, exceptions=True, **kwargs):
|
|
871
|
+
def download(self, url, pathname, exceptions=True, auth=None, **kwargs):
|
|
792
872
|
"""
|
|
793
873
|
Downloads a file using HTTP.
|
|
794
874
|
|
|
875
|
+
Automatically expands any {keyword} arguments in the URL and pathname.
|
|
876
|
+
|
|
877
|
+
Basic authentication is supported by including the credentials in the URL.
|
|
878
|
+
Environment variables can be used to hide sensitive information. Specify
|
|
879
|
+
the environment variable name in the URI as e.g.
|
|
880
|
+
``http://{environ[USER]}:{environ[PASS]}@host``.
|
|
881
|
+
Alternatively, the auth parameter can be used to provide an authentication
|
|
882
|
+
object that is passed to the requests.get() function.
|
|
883
|
+
|
|
795
884
|
Throws a JoltError exception on failure.
|
|
796
885
|
|
|
797
886
|
Args:
|
|
@@ -804,16 +893,32 @@ class Tools(object):
|
|
|
804
893
|
|
|
805
894
|
url = self.expand(url)
|
|
806
895
|
pathname = self.expand_path(pathname)
|
|
896
|
+
|
|
897
|
+
url_parsed = urlparse(url)
|
|
898
|
+
raise_task_error_if(
|
|
899
|
+
not url_parsed.scheme or not url_parsed.netloc,
|
|
900
|
+
self._task,
|
|
901
|
+
"Invalid URL: '{}'", url)
|
|
902
|
+
|
|
903
|
+
if auth is None and url_parsed.username and url_parsed.password:
|
|
904
|
+
auth = HTTPBasicAuth(url_parsed.username, url_parsed.password)
|
|
905
|
+
|
|
906
|
+
# Redact password from URL if present
|
|
907
|
+
if url_parsed.password:
|
|
908
|
+
url_parsed = url_parsed._replace(netloc=url_parsed.netloc.replace(url_parsed.password, "****"))
|
|
909
|
+
|
|
910
|
+
url_cleaned = urlunparse(url_parsed)
|
|
911
|
+
|
|
807
912
|
try:
|
|
808
|
-
response = http_session.get(url, stream=True, **kwargs)
|
|
913
|
+
response = http_session.get(url, stream=True, auth=auth, **kwargs)
|
|
809
914
|
raise_error_if(
|
|
810
915
|
exceptions and response.status_code not in [200],
|
|
811
|
-
f"Download from '{
|
|
916
|
+
f"Download from '{url_cleaned}' failed with status '{response.status_code}'")
|
|
812
917
|
|
|
813
918
|
name = fs.path.basename(pathname)
|
|
814
919
|
size = int(response.headers.get('content-length', 0))
|
|
815
920
|
with log.progress("Downloading {0}".format(utils.shorten(name)), size, "B") as pbar:
|
|
816
|
-
log.verbose("{} -> {}",
|
|
921
|
+
log.verbose("{} -> {}", url_cleaned, pathname)
|
|
817
922
|
with open(pathname, 'wb') as out_file:
|
|
818
923
|
chunk_size = 4096
|
|
819
924
|
for data in response.iter_content(chunk_size=chunk_size):
|
|
@@ -821,7 +926,7 @@ class Tools(object):
|
|
|
821
926
|
pbar.update(len(data))
|
|
822
927
|
actual_size = self.file_size(pathname)
|
|
823
928
|
raise_error_if(
|
|
824
|
-
size != 0 and size
|
|
929
|
+
size != 0 and size > actual_size,
|
|
825
930
|
f"Downloaded file was truncated to {actual_size}/{size} bytes: {name}")
|
|
826
931
|
|
|
827
932
|
return response.status_code == 200
|
|
@@ -850,16 +955,29 @@ class Tools(object):
|
|
|
850
955
|
with tools.environ(CC="clang"):
|
|
851
956
|
tools.run("make all")
|
|
852
957
|
"""
|
|
853
|
-
for key, value in kwargs.items():
|
|
854
|
-
kwargs[key] = self.expand(value)
|
|
855
|
-
|
|
856
958
|
restore = {key: value for key, value in self._env.items()}
|
|
857
|
-
|
|
858
|
-
yield self._env
|
|
959
|
+
|
|
859
960
|
for key, value in kwargs.items():
|
|
860
|
-
if
|
|
861
|
-
|
|
862
|
-
|
|
961
|
+
if value is not None:
|
|
962
|
+
self._env[key] = self.expand(value)
|
|
963
|
+
else:
|
|
964
|
+
self._env.pop(key, None)
|
|
965
|
+
|
|
966
|
+
try:
|
|
967
|
+
yield self._env
|
|
968
|
+
finally:
|
|
969
|
+
self._env = restore
|
|
970
|
+
|
|
971
|
+
def exists(self, pathname):
|
|
972
|
+
""" Checks if a file or directory exists.
|
|
973
|
+
|
|
974
|
+
Args:
|
|
975
|
+
pathname (str): Path to file or directory.
|
|
976
|
+
|
|
977
|
+
Returns:
|
|
978
|
+
bool: True if the file or directory exists, False otherwise.
|
|
979
|
+
"""
|
|
980
|
+
return fs.path.exists(self.expand_path(pathname))
|
|
863
981
|
|
|
864
982
|
def expand(self, string, *args, **kwargs):
|
|
865
983
|
""" Expands keyword arguments/macros in a format string.
|
|
@@ -967,10 +1085,12 @@ class Tools(object):
|
|
|
967
1085
|
|
|
968
1086
|
Supported formats are:
|
|
969
1087
|
|
|
1088
|
+
- 7z
|
|
970
1089
|
- tar
|
|
971
1090
|
- tar.bz2
|
|
972
1091
|
- tar.gz
|
|
973
1092
|
- tar.xz
|
|
1093
|
+
- tar.zst
|
|
974
1094
|
- zip
|
|
975
1095
|
|
|
976
1096
|
Args:
|
|
@@ -1024,6 +1144,18 @@ class Tools(object):
|
|
|
1024
1144
|
tar.extract(file, filepath)
|
|
1025
1145
|
else:
|
|
1026
1146
|
tar.extractall(filepath)
|
|
1147
|
+
elif filename.endswith(".tar.zst"):
|
|
1148
|
+
try:
|
|
1149
|
+
self._extract_tarzstd(filename, filepath, files)
|
|
1150
|
+
except tarfile.StreamError as e:
|
|
1151
|
+
raise_task_error(self._task, "failed to extract archive '{0}': {1}", filename, str(e))
|
|
1152
|
+
elif filename.endswith(".7z"):
|
|
1153
|
+
with py7zr.SevenZipFile(filename, 'r') as archive:
|
|
1154
|
+
if files:
|
|
1155
|
+
for file in files:
|
|
1156
|
+
archive.extract(file, filepath)
|
|
1157
|
+
else:
|
|
1158
|
+
archive.extractall(filepath)
|
|
1027
1159
|
else:
|
|
1028
1160
|
raise_task_error(self._task, "unknown archive type '{0}'", fs.path.basename(filename))
|
|
1029
1161
|
except Exception:
|
|
@@ -1053,7 +1185,7 @@ class Tools(object):
|
|
|
1053
1185
|
""" Returns the current working directory. """
|
|
1054
1186
|
return fs.path.normpath(self._cwd)
|
|
1055
1187
|
|
|
1056
|
-
def getenv(self, key, default=
|
|
1188
|
+
def getenv(self, key, default=None):
|
|
1057
1189
|
""" Returns the value of an environment variable.
|
|
1058
1190
|
|
|
1059
1191
|
Only child processes spawned by the same tools object can see
|
|
@@ -1090,7 +1222,19 @@ class Tools(object):
|
|
|
1090
1222
|
files = [self.expand_path(file) for file in files]
|
|
1091
1223
|
elif not fs.path.isabs(pathname):
|
|
1092
1224
|
files = [self.expand_relpath(file, self.getcwd()) for file in files]
|
|
1093
|
-
return files
|
|
1225
|
+
return list(sorted(files))
|
|
1226
|
+
|
|
1227
|
+
def isdir(self, pathname):
|
|
1228
|
+
""" Determines if a path is a directory.
|
|
1229
|
+
|
|
1230
|
+
Args:
|
|
1231
|
+
pathname (str): Path to a file or directory.
|
|
1232
|
+
|
|
1233
|
+
Returns:
|
|
1234
|
+
boolean: True if the path is a directory, False otherwise.
|
|
1235
|
+
"""
|
|
1236
|
+
pathname = self.expand_path(pathname)
|
|
1237
|
+
return fs.path.isdir(pathname)
|
|
1094
1238
|
|
|
1095
1239
|
def mkdir(self, pathname, recursively=True):
|
|
1096
1240
|
""" Create directory. """
|
|
@@ -1163,9 +1307,80 @@ class Tools(object):
|
|
|
1163
1307
|
"""
|
|
1164
1308
|
return utils.map_concurrent(callable, iterable, max_workers)
|
|
1165
1309
|
|
|
1166
|
-
def meson(self, deps=None):
|
|
1310
|
+
def meson(self, deps=None, incremental=False):
|
|
1167
1311
|
""" Creates a Meson invokation helper """
|
|
1168
|
-
return _Meson(deps, self)
|
|
1312
|
+
return _Meson(deps, self, incremental=incremental)
|
|
1313
|
+
|
|
1314
|
+
@contextmanager
|
|
1315
|
+
def nixpkgs(self, nixfile=None, packages=None, pure=False, path=None, options=None):
|
|
1316
|
+
"""
|
|
1317
|
+
Creates a Nix environment with the specified packages.
|
|
1318
|
+
|
|
1319
|
+
Args:
|
|
1320
|
+
nixfile (str): Path to a Nix expression file.
|
|
1321
|
+
packages (list): List of Nix packages to include in environment.
|
|
1322
|
+
pure (boolean): Create a pure environment.
|
|
1323
|
+
path (list): List of Nix expression paths.
|
|
1324
|
+
options (dict): Nix configuration options.
|
|
1325
|
+
|
|
1326
|
+
Example:
|
|
1327
|
+
|
|
1328
|
+
.. code-block:: python
|
|
1329
|
+
|
|
1330
|
+
def run(self, deps, tools):
|
|
1331
|
+
with tools.nixpkgs(packages=["gcc13"]):
|
|
1332
|
+
tools.run("gcc --version")
|
|
1333
|
+
|
|
1334
|
+
"""
|
|
1335
|
+
|
|
1336
|
+
# Check if Nix is available
|
|
1337
|
+
raise_task_error_if(
|
|
1338
|
+
not self.which("nix-shell"),
|
|
1339
|
+
self._task,
|
|
1340
|
+
"Nix not available on this system")
|
|
1341
|
+
|
|
1342
|
+
nixfile = self.expand_path(nixfile) if nixfile else ""
|
|
1343
|
+
pathflags = " ".join([f"-I {path}" for path in path or []])
|
|
1344
|
+
options = " ".join([f"--option {k} {v}" for k, v in (options or {}).items()])
|
|
1345
|
+
pureflag = "--pure" if pure else ""
|
|
1346
|
+
packages = "-p " + " ".join(packages) if packages else ""
|
|
1347
|
+
|
|
1348
|
+
# Expand all placeholders
|
|
1349
|
+
options = self.expand(options)
|
|
1350
|
+
packages = self.expand(packages)
|
|
1351
|
+
pathflags = self.expand(pathflags)
|
|
1352
|
+
|
|
1353
|
+
# Use cached-nix-shell is available
|
|
1354
|
+
nixshell = "cached-nix-shell" if self.which("cached-nix-shell") else "nix-shell"
|
|
1355
|
+
|
|
1356
|
+
# Run nix-shell to stage packages and environment
|
|
1357
|
+
env = self.run(
|
|
1358
|
+
"{} {} {} {} --run 'env -0' {}",
|
|
1359
|
+
nixshell,
|
|
1360
|
+
pathflags,
|
|
1361
|
+
pureflag,
|
|
1362
|
+
packages,
|
|
1363
|
+
nixfile,
|
|
1364
|
+
output_on_error=True)
|
|
1365
|
+
env = env.strip().strip("\x00")
|
|
1366
|
+
env = dict(line.split("=", 1) for line in env.split('\x00'))
|
|
1367
|
+
|
|
1368
|
+
# Add host path first to environment PATH
|
|
1369
|
+
host_path = env.get("HOST_PATH", None)
|
|
1370
|
+
if host_path:
|
|
1371
|
+
env["PATH"] = host_path + os.pathsep + env["PATH"]
|
|
1372
|
+
|
|
1373
|
+
# Enter the environment
|
|
1374
|
+
old_env = self._env
|
|
1375
|
+
try:
|
|
1376
|
+
if pure:
|
|
1377
|
+
self._env = env
|
|
1378
|
+
else:
|
|
1379
|
+
self._env = copy.deepcopy(env)
|
|
1380
|
+
self._env.update(env)
|
|
1381
|
+
yield
|
|
1382
|
+
finally:
|
|
1383
|
+
self._env = old_env
|
|
1169
1384
|
|
|
1170
1385
|
def render(self, template, **kwargs):
|
|
1171
1386
|
""" Render a Jinja template string.
|
|
@@ -1370,6 +1585,9 @@ class Tools(object):
|
|
|
1370
1585
|
refuses to terminate, it will be killed after an additional
|
|
1371
1586
|
10 seconds have passed. Default: None.
|
|
1372
1587
|
|
|
1588
|
+
Returns:
|
|
1589
|
+
str: stdout from command unless output=False
|
|
1590
|
+
|
|
1373
1591
|
Example:
|
|
1374
1592
|
|
|
1375
1593
|
.. code-block:: python
|
|
@@ -1390,6 +1608,11 @@ class Tools(object):
|
|
|
1390
1608
|
else:
|
|
1391
1609
|
cmd = " ".join(self._chroot_prefix + self._run_prefix) + " " + cmd
|
|
1392
1610
|
|
|
1611
|
+
if self._deadline is not None:
|
|
1612
|
+
remaining = int(self._deadline - time.time() + 0.5)
|
|
1613
|
+
timeout = kwargs.get("timeout", remaining)
|
|
1614
|
+
kwargs["timeout"] = min(remaining, timeout)
|
|
1615
|
+
|
|
1393
1616
|
cmd = self.expand(cmd, *args, **kwargs)
|
|
1394
1617
|
|
|
1395
1618
|
stdi, stdo, stde = None, None, None
|
|
@@ -1404,7 +1627,7 @@ class Tools(object):
|
|
|
1404
1627
|
except Exception:
|
|
1405
1628
|
pass
|
|
1406
1629
|
|
|
1407
|
-
return _run(cmd, self._cwd, self._env,
|
|
1630
|
+
return _run(cmd, self._cwd, self._env, *args, **kwargs)
|
|
1408
1631
|
|
|
1409
1632
|
finally:
|
|
1410
1633
|
if stdi:
|
|
@@ -1484,7 +1707,7 @@ class Tools(object):
|
|
|
1484
1707
|
"""
|
|
1485
1708
|
|
|
1486
1709
|
raise_error_if(
|
|
1487
|
-
type(artifact)
|
|
1710
|
+
type(artifact) not in [cache.Artifact, cache.ArtifactToolsProxy],
|
|
1488
1711
|
"non-artifact passed as argument to Tools.sandbox()")
|
|
1489
1712
|
|
|
1490
1713
|
suffix = utils.canonical(artifact.task.short_qualified_name)
|
|
@@ -1518,11 +1741,15 @@ class Tools(object):
|
|
|
1518
1741
|
fs.makedirs(path)
|
|
1519
1742
|
for relsrcpath, reldstpath in artifact.files.items():
|
|
1520
1743
|
srcpath = fs.path.normpath(fs.path.join(artifact.task.joltdir, relsrcpath))
|
|
1744
|
+
srcpath = self.expand_path(srcpath)
|
|
1521
1745
|
dstpath = fs.path.normpath(fs.path.join(path, reldstpath))
|
|
1746
|
+
dstpath = self.expand_path(dstpath)
|
|
1747
|
+
|
|
1522
1748
|
if dstpath != fs.path.realpath(dstpath):
|
|
1523
1749
|
log.debug("Cannot symlink '{} -> {}', parent directory already symlinked",
|
|
1524
1750
|
srcpath, dstpath)
|
|
1525
1751
|
continue
|
|
1752
|
+
|
|
1526
1753
|
if fs.path.isdir(dstpath):
|
|
1527
1754
|
files = fs.scandir(srcpath)
|
|
1528
1755
|
for file in files:
|
|
@@ -1532,7 +1759,8 @@ class Tools(object):
|
|
|
1532
1759
|
self.symlink(srcpath, dstpath)
|
|
1533
1760
|
|
|
1534
1761
|
# Restore missing srcfiles if they resided in a build directory
|
|
1535
|
-
|
|
1762
|
+
buildroot_abs = self.expand_path(artifact.tools.buildroot)
|
|
1763
|
+
if srcpath.startswith(buildroot_abs) and \
|
|
1536
1764
|
not fs.path.exists(srcpath):
|
|
1537
1765
|
fs.copy(fs.path.join(artifact.path, reldstpath), srcpath, symlinks=True)
|
|
1538
1766
|
self.write_file(meta, artifact.path)
|
|
@@ -1579,7 +1807,41 @@ class Tools(object):
|
|
|
1579
1807
|
fs.makedirs(dstdir)
|
|
1580
1808
|
fs.symlink(src, dst)
|
|
1581
1809
|
|
|
1582
|
-
|
|
1810
|
+
@contextmanager
|
|
1811
|
+
def timeout(self, seconds):
|
|
1812
|
+
""" Context manager to set a timeout for a block of code.
|
|
1813
|
+
|
|
1814
|
+
A TimeoutError exception is raised if the block of code does not
|
|
1815
|
+
complete within the specified time.
|
|
1816
|
+
|
|
1817
|
+
Args:
|
|
1818
|
+
seconds (int): Timeout in seconds.
|
|
1819
|
+
|
|
1820
|
+
Example:
|
|
1821
|
+
|
|
1822
|
+
.. code-block:: python
|
|
1823
|
+
|
|
1824
|
+
with tools.timeout(5):
|
|
1825
|
+
tools.run("sleep 10")
|
|
1826
|
+
|
|
1827
|
+
"""
|
|
1828
|
+
if seconds is None:
|
|
1829
|
+
yield
|
|
1830
|
+
return
|
|
1831
|
+
|
|
1832
|
+
with utils.timeout(seconds, JoltTimeoutError):
|
|
1833
|
+
old_deadline = self._deadline
|
|
1834
|
+
try:
|
|
1835
|
+
if old_deadline is None:
|
|
1836
|
+
self._deadline = time.time() + seconds
|
|
1837
|
+
else:
|
|
1838
|
+
self._deadline = min(old_deadline, time.time() + seconds)
|
|
1839
|
+
yield
|
|
1840
|
+
finally:
|
|
1841
|
+
self._deadline = old_deadline
|
|
1842
|
+
|
|
1843
|
+
@contextmanager
|
|
1844
|
+
def tmpdir(self, name=None):
|
|
1583
1845
|
""" Creates a temporary directory.
|
|
1584
1846
|
|
|
1585
1847
|
The directory is only valid within a context and it is removed
|
|
@@ -1594,11 +1856,18 @@ class Tools(object):
|
|
|
1594
1856
|
|
|
1595
1857
|
.. code-block:: python
|
|
1596
1858
|
|
|
1597
|
-
with tools.tmpdir(
|
|
1859
|
+
with tools.tmpdir() as tmp, tools.cwd(tmp):
|
|
1598
1860
|
tools.write_file("tempfile", "tempdata")
|
|
1599
1861
|
|
|
1600
1862
|
"""
|
|
1601
|
-
|
|
1863
|
+
dirname = None
|
|
1864
|
+
try:
|
|
1865
|
+
self.mkdir(self.buildroot)
|
|
1866
|
+
dirname = fs.mkdtemp(prefix=(name or "tmpdir") + "-", dir=self.buildroot)
|
|
1867
|
+
yield fs.path.normpath(dirname)
|
|
1868
|
+
finally:
|
|
1869
|
+
if dirname:
|
|
1870
|
+
self.rmtree(dirname, ignore_errors=True)
|
|
1602
1871
|
|
|
1603
1872
|
def unlink(self, pathname, *args, **kwargs):
|
|
1604
1873
|
"""Removes a file from disk.
|
|
@@ -1613,7 +1882,8 @@ class Tools(object):
|
|
|
1613
1882
|
return fs.unlink(pathname, ignore_errors=kwargs.get("ignore_errors", False))
|
|
1614
1883
|
|
|
1615
1884
|
@contextmanager
|
|
1616
|
-
|
|
1885
|
+
@utils.deprecated
|
|
1886
|
+
def chroot(self, chroot, *args, path=None, **kwargs):
|
|
1617
1887
|
"""
|
|
1618
1888
|
Experimental: Use chroot as root filesystem when running commands.
|
|
1619
1889
|
|
|
@@ -1625,6 +1895,9 @@ class Tools(object):
|
|
|
1625
1895
|
Args:
|
|
1626
1896
|
chroot (str, artifact): Path to rootfs directory, or an artifact
|
|
1627
1897
|
with a 'rootfs' metadata path (artifact.paths.rootfs).
|
|
1898
|
+
path (list): List of directory paths within the chroot to add to
|
|
1899
|
+
the PATH environment variable, e.g. ["/usr/bin", "/bin"].
|
|
1900
|
+
By default, the current PATH is used also within the chroot.
|
|
1628
1901
|
|
|
1629
1902
|
Example:
|
|
1630
1903
|
|
|
@@ -1637,16 +1910,16 @@ class Tools(object):
|
|
|
1637
1910
|
raise_error_if(platform.system() != "Linux", "Tools.chroot() is only supported on Linux")
|
|
1638
1911
|
|
|
1639
1912
|
raise_task_error_if(
|
|
1640
|
-
not self.which("newuidmap"), self._task,
|
|
1913
|
+
not self.which("newuidmap") and not self.which("/usr/bin/newuidmap"), self._task,
|
|
1641
1914
|
"No usable 'newuidmap' found in PATH")
|
|
1642
1915
|
|
|
1643
1916
|
raise_task_error_if(
|
|
1644
|
-
not self.which("newgidmap"), self._task,
|
|
1917
|
+
not self.which("newgidmap") and not self.which("/usr/bin/newuidmap"), self._task,
|
|
1645
1918
|
"No usable 'newgidmap' found in PATH")
|
|
1646
1919
|
|
|
1647
|
-
if type(chroot)
|
|
1920
|
+
if type(chroot) in [cache.Artifact, cache.ArtifactToolsProxy]:
|
|
1648
1921
|
raise_task_error_if(
|
|
1649
|
-
not
|
|
1922
|
+
not chroot.paths.rootfs, self._task,
|
|
1650
1923
|
"No 'rootfs' path in artifact")
|
|
1651
1924
|
chroot = chroot.paths.rootfs
|
|
1652
1925
|
|
|
@@ -1703,22 +1976,34 @@ class Tools(object):
|
|
|
1703
1976
|
unshare = os.path.join(os.path.dirname(__file__), "chroot.py")
|
|
1704
1977
|
|
|
1705
1978
|
old_chroot = self._chroot
|
|
1979
|
+
old_chroot_path = self._chroot_path
|
|
1706
1980
|
old_chroot_prefix = self._chroot_prefix
|
|
1707
1981
|
self._chroot = chroot
|
|
1708
|
-
|
|
1709
|
-
|
|
1710
|
-
|
|
1711
|
-
|
|
1712
|
-
|
|
1713
|
-
|
|
1714
|
-
|
|
1715
|
-
"--shell={shell}",
|
|
1716
|
-
"--",
|
|
1717
|
-
]
|
|
1982
|
+
|
|
1983
|
+
if path:
|
|
1984
|
+
self._chroot_path = path
|
|
1985
|
+
else:
|
|
1986
|
+
self._chroot_path = self._env.get("PATH")
|
|
1987
|
+
self._chroot_path = self._chroot_path.split(fs.pathsep) if self._chroot_path else []
|
|
1988
|
+
|
|
1718
1989
|
try:
|
|
1719
|
-
|
|
1990
|
+
with self.tmpdir("chroot") as bindroot:
|
|
1991
|
+
self._chroot_prefix = [
|
|
1992
|
+
sys.executable,
|
|
1993
|
+
unshare,
|
|
1994
|
+
"-b",
|
|
1995
|
+
] + bind + [
|
|
1996
|
+
"-c",
|
|
1997
|
+
chroot,
|
|
1998
|
+
"-t",
|
|
1999
|
+
bindroot,
|
|
2000
|
+
"--shell={shell}",
|
|
2001
|
+
"--",
|
|
2002
|
+
]
|
|
2003
|
+
yield
|
|
1720
2004
|
finally:
|
|
1721
2005
|
self._chroot = old_chroot
|
|
2006
|
+
self._chroot_path = old_chroot_path
|
|
1722
2007
|
self._chroot_prefix = old_chroot_prefix
|
|
1723
2008
|
|
|
1724
2009
|
def _unshare(self, uidmap, gidmap):
|
|
@@ -1846,6 +2131,15 @@ class Tools(object):
|
|
|
1846
2131
|
"""
|
|
1847
2132
|
Uploads a file using HTTP (PUT).
|
|
1848
2133
|
|
|
2134
|
+
Automatically expands any {keyword} arguments in the URL and pathname.
|
|
2135
|
+
|
|
2136
|
+
Basic authentication is supported by including the credentials in the URL.
|
|
2137
|
+
Environment variables can be used to hide sensitive information. Specify
|
|
2138
|
+
the environment variable name in the URI as e.g.
|
|
2139
|
+
``http://{environ[USER]}:{environ[PASS]}@host``.
|
|
2140
|
+
Alternatively, the auth parameter can be used to provide an authentication
|
|
2141
|
+
object that is passed to the requests.get() function.
|
|
2142
|
+
|
|
1849
2143
|
Throws a JoltError exception on failure.
|
|
1850
2144
|
|
|
1851
2145
|
Args:
|
|
@@ -1858,11 +2152,28 @@ class Tools(object):
|
|
|
1858
2152
|
|
|
1859
2153
|
"""
|
|
1860
2154
|
pathname = self.expand_path(pathname)
|
|
2155
|
+
url = self.expand(url)
|
|
1861
2156
|
name = fs.path.basename(pathname)
|
|
1862
2157
|
size = self.file_size(pathname)
|
|
2158
|
+
|
|
2159
|
+
url_parsed = urlparse(url)
|
|
2160
|
+
raise_task_error_if(
|
|
2161
|
+
not url_parsed.scheme or not url_parsed.netloc,
|
|
2162
|
+
self._task,
|
|
2163
|
+
"Invalid URL: '{}'", url)
|
|
2164
|
+
|
|
2165
|
+
if auth is None and url_parsed.username and url_parsed.password:
|
|
2166
|
+
auth = HTTPBasicAuth(url_parsed.username, url_parsed.password)
|
|
2167
|
+
|
|
2168
|
+
# Redact password from URL if present
|
|
2169
|
+
if url_parsed.password:
|
|
2170
|
+
url_parsed = url_parsed._replace(netloc=url_parsed.netloc.replace(url_parsed.password, "****"))
|
|
2171
|
+
|
|
2172
|
+
url_cleaned = urlunparse(url_parsed)
|
|
2173
|
+
|
|
1863
2174
|
with log.progress("Uploading " + utils.shorten(name), size, "B") as pbar, \
|
|
1864
2175
|
open(pathname, 'rb') as fileobj:
|
|
1865
|
-
log.verbose("{} -> {}", pathname,
|
|
2176
|
+
log.verbose("{} -> {}", pathname, url_cleaned)
|
|
1866
2177
|
|
|
1867
2178
|
def read():
|
|
1868
2179
|
data = fileobj.read(4096)
|
|
@@ -1872,7 +2183,7 @@ class Tools(object):
|
|
|
1872
2183
|
response = http_session.put(url, data=iter(read, b''), auth=auth, **kwargs)
|
|
1873
2184
|
raise_error_if(
|
|
1874
2185
|
exceptions and response.status_code not in [201, 204],
|
|
1875
|
-
f"Upload to '{
|
|
2186
|
+
f"Upload to '{url_cleaned}' failed with status '{response.status_code}'")
|
|
1876
2187
|
return response.status_code in [201, 204]
|
|
1877
2188
|
|
|
1878
2189
|
def read_file(self, pathname, binary=False):
|
|
@@ -1921,6 +2232,20 @@ class Tools(object):
|
|
|
1921
2232
|
|
|
1922
2233
|
return deps
|
|
1923
2234
|
|
|
2235
|
+
def read_json(self, pathname):
|
|
2236
|
+
"""
|
|
2237
|
+
Reads a JSON file.
|
|
2238
|
+
|
|
2239
|
+
Args:
|
|
2240
|
+
pathname (str): Name/path of file to be read.
|
|
2241
|
+
|
|
2242
|
+
Returns:
|
|
2243
|
+
dict: Dictionary of JSON data.
|
|
2244
|
+
"""
|
|
2245
|
+
pathname = self.expand_path(pathname)
|
|
2246
|
+
with open(pathname) as f:
|
|
2247
|
+
return json.load(f)
|
|
2248
|
+
|
|
1924
2249
|
def which(self, executable):
|
|
1925
2250
|
""" Find executable in PATH.
|
|
1926
2251
|
|
|
@@ -1934,8 +2259,9 @@ class Tools(object):
|
|
|
1934
2259
|
path = self._env.get("PATH")
|
|
1935
2260
|
|
|
1936
2261
|
if self._chroot:
|
|
1937
|
-
path = fs.pathsep
|
|
1938
|
-
|
|
2262
|
+
path = path.split(fs.pathsep) if path else []
|
|
2263
|
+
path = [os.path.join(self._chroot, p.lstrip(fs.sep)) for p in self._chroot_path] + path
|
|
2264
|
+
path = fs.pathsep.join(path)
|
|
1939
2265
|
|
|
1940
2266
|
result = shutil.which(executable, path=path)
|
|
1941
2267
|
if result and self._chroot and result.startswith(self._chroot):
|
|
@@ -1964,6 +2290,19 @@ class Tools(object):
|
|
|
1964
2290
|
with open(pathname, "wb") as f:
|
|
1965
2291
|
f.write(content.encode())
|
|
1966
2292
|
|
|
2293
|
+
def write_json(self, pathname, data, indent=4, sort_keys=True):
|
|
2294
|
+
"""
|
|
2295
|
+
Writes a JSON file.
|
|
2296
|
+
|
|
2297
|
+
Args:
|
|
2298
|
+
pathname (str): Name/path of file to be written.
|
|
2299
|
+
data (dict): Dictionary of JSON data.
|
|
2300
|
+
indent (int): Indentation level for JSON output.
|
|
2301
|
+
"""
|
|
2302
|
+
pathname = self.expand_path(pathname)
|
|
2303
|
+
with open(pathname, "w") as f:
|
|
2304
|
+
json.dump(data, f, indent=indent, sort_keys=sort_keys)
|
|
2305
|
+
|
|
1967
2306
|
@property
|
|
1968
2307
|
def wsroot(self):
|
|
1969
2308
|
""" Return the root path of all build directories """
|