jolt 0.9.123__py3-none-any.whl → 0.9.435__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (196) hide show
  1. jolt/__init__.py +80 -7
  2. jolt/__main__.py +9 -1
  3. jolt/bin/fstree-darwin-x86_64 +0 -0
  4. jolt/bin/fstree-linux-x86_64 +0 -0
  5. jolt/cache.py +832 -362
  6. jolt/chroot.py +156 -0
  7. jolt/cli.py +281 -162
  8. jolt/common_pb2.py +63 -0
  9. jolt/common_pb2_grpc.py +4 -0
  10. jolt/config.py +98 -41
  11. jolt/error.py +19 -4
  12. jolt/filesystem.py +2 -6
  13. jolt/graph.py +705 -117
  14. jolt/hooks.py +43 -0
  15. jolt/influence.py +122 -3
  16. jolt/loader.py +369 -121
  17. jolt/log.py +225 -63
  18. jolt/manifest.py +28 -38
  19. jolt/options.py +35 -10
  20. jolt/pkgs/abseil.py +42 -0
  21. jolt/pkgs/asio.py +25 -0
  22. jolt/pkgs/autoconf.py +41 -0
  23. jolt/pkgs/automake.py +41 -0
  24. jolt/pkgs/b2.py +31 -0
  25. jolt/pkgs/boost.py +111 -0
  26. jolt/pkgs/boringssl.py +32 -0
  27. jolt/pkgs/busybox.py +39 -0
  28. jolt/pkgs/bzip2.py +43 -0
  29. jolt/pkgs/cares.py +29 -0
  30. jolt/pkgs/catch2.py +36 -0
  31. jolt/pkgs/cbindgen.py +17 -0
  32. jolt/pkgs/cista.py +19 -0
  33. jolt/pkgs/clang.py +44 -0
  34. jolt/pkgs/cli11.py +24 -0
  35. jolt/pkgs/cmake.py +48 -0
  36. jolt/pkgs/cpython.py +196 -0
  37. jolt/pkgs/crun.py +29 -0
  38. jolt/pkgs/curl.py +38 -0
  39. jolt/pkgs/dbus.py +18 -0
  40. jolt/pkgs/double_conversion.py +24 -0
  41. jolt/pkgs/fastfloat.py +21 -0
  42. jolt/pkgs/ffmpeg.py +28 -0
  43. jolt/pkgs/flatbuffers.py +29 -0
  44. jolt/pkgs/fmt.py +27 -0
  45. jolt/pkgs/fstree.py +20 -0
  46. jolt/pkgs/gflags.py +18 -0
  47. jolt/pkgs/glib.py +18 -0
  48. jolt/pkgs/glog.py +25 -0
  49. jolt/pkgs/glslang.py +21 -0
  50. jolt/pkgs/golang.py +16 -11
  51. jolt/pkgs/googlebenchmark.py +18 -0
  52. jolt/pkgs/googletest.py +46 -0
  53. jolt/pkgs/gperf.py +15 -0
  54. jolt/pkgs/grpc.py +73 -0
  55. jolt/pkgs/hdf5.py +19 -0
  56. jolt/pkgs/help2man.py +14 -0
  57. jolt/pkgs/inja.py +28 -0
  58. jolt/pkgs/jsoncpp.py +31 -0
  59. jolt/pkgs/libarchive.py +43 -0
  60. jolt/pkgs/libcap.py +44 -0
  61. jolt/pkgs/libdrm.py +44 -0
  62. jolt/pkgs/libedit.py +42 -0
  63. jolt/pkgs/libevent.py +31 -0
  64. jolt/pkgs/libexpat.py +27 -0
  65. jolt/pkgs/libfastjson.py +21 -0
  66. jolt/pkgs/libffi.py +16 -0
  67. jolt/pkgs/libglvnd.py +30 -0
  68. jolt/pkgs/libogg.py +28 -0
  69. jolt/pkgs/libpciaccess.py +18 -0
  70. jolt/pkgs/libseccomp.py +21 -0
  71. jolt/pkgs/libtirpc.py +24 -0
  72. jolt/pkgs/libtool.py +42 -0
  73. jolt/pkgs/libunwind.py +35 -0
  74. jolt/pkgs/libva.py +18 -0
  75. jolt/pkgs/libvorbis.py +33 -0
  76. jolt/pkgs/libxml2.py +35 -0
  77. jolt/pkgs/libxslt.py +17 -0
  78. jolt/pkgs/libyajl.py +16 -0
  79. jolt/pkgs/llvm.py +81 -0
  80. jolt/pkgs/lua.py +54 -0
  81. jolt/pkgs/lz4.py +26 -0
  82. jolt/pkgs/m4.py +14 -0
  83. jolt/pkgs/make.py +17 -0
  84. jolt/pkgs/mesa.py +81 -0
  85. jolt/pkgs/meson.py +17 -0
  86. jolt/pkgs/mstch.py +28 -0
  87. jolt/pkgs/mysql.py +60 -0
  88. jolt/pkgs/nasm.py +49 -0
  89. jolt/pkgs/ncurses.py +30 -0
  90. jolt/pkgs/ng_log.py +25 -0
  91. jolt/pkgs/ninja.py +45 -0
  92. jolt/pkgs/nlohmann_json.py +25 -0
  93. jolt/pkgs/nodejs.py +19 -11
  94. jolt/pkgs/opencv.py +24 -0
  95. jolt/pkgs/openjdk.py +26 -0
  96. jolt/pkgs/openssl.py +103 -0
  97. jolt/pkgs/paho.py +76 -0
  98. jolt/pkgs/patchelf.py +16 -0
  99. jolt/pkgs/perl.py +42 -0
  100. jolt/pkgs/pkgconfig.py +64 -0
  101. jolt/pkgs/poco.py +39 -0
  102. jolt/pkgs/protobuf.py +77 -0
  103. jolt/pkgs/pugixml.py +27 -0
  104. jolt/pkgs/python.py +19 -0
  105. jolt/pkgs/qt.py +35 -0
  106. jolt/pkgs/rapidjson.py +26 -0
  107. jolt/pkgs/rapidyaml.py +28 -0
  108. jolt/pkgs/re2.py +30 -0
  109. jolt/pkgs/re2c.py +17 -0
  110. jolt/pkgs/readline.py +15 -0
  111. jolt/pkgs/rust.py +41 -0
  112. jolt/pkgs/sdl.py +28 -0
  113. jolt/pkgs/simdjson.py +27 -0
  114. jolt/pkgs/soci.py +46 -0
  115. jolt/pkgs/spdlog.py +29 -0
  116. jolt/pkgs/spirv_llvm.py +21 -0
  117. jolt/pkgs/spirv_tools.py +24 -0
  118. jolt/pkgs/sqlite.py +83 -0
  119. jolt/pkgs/ssl.py +12 -0
  120. jolt/pkgs/texinfo.py +15 -0
  121. jolt/pkgs/tomlplusplus.py +22 -0
  122. jolt/pkgs/wayland.py +26 -0
  123. jolt/pkgs/x11.py +58 -0
  124. jolt/pkgs/xerces_c.py +20 -0
  125. jolt/pkgs/xorg.py +360 -0
  126. jolt/pkgs/xz.py +29 -0
  127. jolt/pkgs/yamlcpp.py +30 -0
  128. jolt/pkgs/zeromq.py +47 -0
  129. jolt/pkgs/zlib.py +87 -0
  130. jolt/pkgs/zstd.py +33 -0
  131. jolt/plugins/alias.py +3 -0
  132. jolt/plugins/allure.py +5 -2
  133. jolt/plugins/autotools.py +66 -0
  134. jolt/plugins/cache.py +133 -0
  135. jolt/plugins/cmake.py +74 -6
  136. jolt/plugins/conan.py +238 -0
  137. jolt/plugins/cxx.py +698 -0
  138. jolt/plugins/cxxinfo.py +7 -0
  139. jolt/plugins/dashboard.py +1 -1
  140. jolt/plugins/docker.py +80 -23
  141. jolt/plugins/email.py +2 -2
  142. jolt/plugins/email.xslt +144 -101
  143. jolt/plugins/environ.py +11 -0
  144. jolt/plugins/fetch.py +141 -0
  145. jolt/plugins/gdb.py +39 -19
  146. jolt/plugins/gerrit.py +1 -14
  147. jolt/plugins/git.py +283 -85
  148. jolt/plugins/googletest.py +2 -1
  149. jolt/plugins/http.py +36 -38
  150. jolt/plugins/libtool.py +63 -0
  151. jolt/plugins/linux.py +990 -0
  152. jolt/plugins/logstash.py +4 -4
  153. jolt/plugins/meson.py +61 -0
  154. jolt/plugins/ninja-compdb.py +99 -30
  155. jolt/plugins/ninja.py +468 -166
  156. jolt/plugins/paths.py +11 -1
  157. jolt/plugins/pkgconfig.py +219 -0
  158. jolt/plugins/podman.py +136 -92
  159. jolt/plugins/python.py +137 -0
  160. jolt/plugins/remote_execution/__init__.py +0 -0
  161. jolt/plugins/remote_execution/administration_pb2.py +46 -0
  162. jolt/plugins/remote_execution/administration_pb2_grpc.py +170 -0
  163. jolt/plugins/remote_execution/log_pb2.py +32 -0
  164. jolt/plugins/remote_execution/log_pb2_grpc.py +68 -0
  165. jolt/plugins/remote_execution/scheduler_pb2.py +41 -0
  166. jolt/plugins/remote_execution/scheduler_pb2_grpc.py +141 -0
  167. jolt/plugins/remote_execution/worker_pb2.py +38 -0
  168. jolt/plugins/remote_execution/worker_pb2_grpc.py +112 -0
  169. jolt/plugins/report.py +12 -2
  170. jolt/plugins/rust.py +25 -0
  171. jolt/plugins/scheduler.py +710 -0
  172. jolt/plugins/selfdeploy/setup.py +8 -4
  173. jolt/plugins/selfdeploy.py +138 -88
  174. jolt/plugins/strings.py +35 -22
  175. jolt/plugins/symlinks.py +26 -11
  176. jolt/plugins/telemetry.py +5 -2
  177. jolt/plugins/timeline.py +13 -3
  178. jolt/plugins/volume.py +46 -48
  179. jolt/scheduler.py +589 -192
  180. jolt/tasks.py +625 -121
  181. jolt/templates/timeline.html.template +44 -47
  182. jolt/timer.py +22 -0
  183. jolt/tools.py +638 -282
  184. jolt/utils.py +211 -7
  185. jolt/version.py +1 -1
  186. jolt/xmldom.py +12 -2
  187. {jolt-0.9.123.dist-info → jolt-0.9.435.dist-info}/METADATA +97 -38
  188. jolt-0.9.435.dist-info/RECORD +207 -0
  189. {jolt-0.9.123.dist-info → jolt-0.9.435.dist-info}/WHEEL +1 -1
  190. jolt/plugins/amqp.py +0 -834
  191. jolt/plugins/debian.py +0 -338
  192. jolt/plugins/ftp.py +0 -181
  193. jolt/plugins/repo.py +0 -253
  194. jolt-0.9.123.dist-info/RECORD +0 -77
  195. {jolt-0.9.123.dist-info → jolt-0.9.435.dist-info}/entry_points.txt +0 -0
  196. {jolt-0.9.123.dist-info → jolt-0.9.435.dist-info}/top_level.txt +0 -0
jolt/tools.py CHANGED
@@ -1,13 +1,16 @@
1
+ import py7zr
1
2
  import bz2
2
3
  import copy
3
4
  import getpass
4
5
  import gzip
6
+ import json
5
7
  import lzma
6
8
  import subprocess
7
9
  import os
8
10
  import platform
9
11
  import sys
10
12
  import threading
13
+ import time
11
14
  if os.name != "nt":
12
15
  import termios
13
16
  import glob
@@ -18,12 +21,16 @@ import tarfile
18
21
  import zipfile
19
22
  import bz2file
20
23
  import hashlib
24
+ import zstandard
21
25
  from contextlib import contextmanager
22
26
  from psutil import NoSuchProcess, Process
23
-
24
- from jinja2 import Environment, FileSystemLoader, select_autoescape
27
+ from jinja2 import Environment, FileSystemLoader
25
28
  from jinja2.exceptions import TemplateError
26
29
  from jinja2.runtime import Context
30
+ from jinja2.utils import missing
31
+ from requests import Session
32
+ from requests.auth import HTTPBasicAuth
33
+ from urllib.parse import urlparse, urlunparse
27
34
 
28
35
 
29
36
  from jolt import cache
@@ -31,11 +38,17 @@ from jolt import filesystem as fs
31
38
  from jolt import log
32
39
  from jolt import utils
33
40
  from jolt import config
34
- from jolt.error import JoltCommandError
41
+ from jolt.error import JoltCommandError, JoltTimeoutError
35
42
  from jolt.error import raise_error_if
36
43
  from jolt.error import raise_task_error, raise_task_error_if
37
44
 
38
45
 
46
+ SUPPORTED_ARCHIVE_TYPES = [".tar", ".tar.bz2", ".tar.gz", ".tgz", ".tar.xz", ".tar.zst", ".zip"]
47
+
48
+
49
+ http_session = Session()
50
+
51
+
39
52
  def stdout_write(line):
40
53
  sys.stdout.write(line + "\n")
41
54
  sys.stdout.flush()
@@ -46,65 +59,54 @@ def stderr_write(line):
46
59
  sys.stderr.flush()
47
60
 
48
61
 
49
- def _run(cmd, cwd, env, preexec_fn, *args, **kwargs):
62
+ class Reader(threading.Thread):
63
+ def __init__(self, parent, stream, output=None, logbuf=None, output_rstrip=True):
64
+ super(Reader, self).__init__()
65
+ self.output = output
66
+ self.output_rstrip = output_rstrip
67
+ self.parent = parent
68
+ self.stream = stream
69
+ self.logbuf = logbuf if logbuf is not None else []
70
+ self.start()
71
+
72
+ def run(self):
73
+ line = ""
74
+ try:
75
+ with log.map_thread(self, self.parent):
76
+ for line in iter(self.stream.readline, b''):
77
+ if self.output_rstrip:
78
+ line = line.rstrip()
79
+ line = line.decode(errors='ignore')
80
+ if self.output:
81
+ self.output(line)
82
+ self.logbuf.append((self, line))
83
+ except Exception as e:
84
+ if self.output:
85
+ self.output("{0}", str(e))
86
+ self.output(line)
87
+ self.logbuf.append((self, line))
88
+
89
+
90
+ def _run(cmd, cwd, env, *args, **kwargs):
50
91
  output = kwargs.get("output")
51
92
  output_on_error = kwargs.get("output_on_error")
52
93
  output_rstrip = kwargs.get("output_rstrip", True)
53
94
  output_stdio = kwargs.get("output_stdio", False)
95
+ output_stderr = kwargs.get("output_stderr", True)
96
+ output_stdout = kwargs.get("output_stdout", True)
97
+ return_stderr = kwargs.get("return_stderr", False)
54
98
  output = output if output is not None else True
55
99
  output = False if output_on_error else output
56
100
  shell = kwargs.get("shell", True)
57
- timeout = kwargs.get("timeout", None)
101
+ timeout = kwargs.get("timeout", config.getint("jolt", "command_timeout", 0))
102
+ timeout = timeout if type(timeout) is int and timeout > 0 else None
58
103
 
59
104
  log.debug("Running: '{0}' (CWD: {1})", cmd, cwd)
60
105
 
61
- p = subprocess.Popen(
62
- cmd,
63
- stdin=subprocess.PIPE,
64
- stdout=subprocess.PIPE,
65
- stderr=subprocess.PIPE,
66
- shell=shell,
67
- cwd=cwd,
68
- env=env,
69
- preexec_fn=preexec_fn,
70
- )
71
-
72
- class Reader(threading.Thread):
73
- def __init__(self, parent, stream, output=None, logbuf=None):
74
- super(Reader, self).__init__()
75
- self.output = output
76
- self.parent = parent
77
- self.stream = stream
78
- self.logbuf = logbuf if logbuf is not None else []
79
- self.start()
80
-
81
- def run(self):
82
- line = ""
83
- try:
84
- with log.map_thread(self, self.parent):
85
- for line in iter(self.stream.readline, b''):
86
- if output_rstrip:
87
- line = line.rstrip()
88
- line = line.decode(errors='ignore')
89
- if self.output:
90
- self.output(line)
91
- self.logbuf.append((self, line))
92
- except Exception as e:
93
- if self.output:
94
- self.output("{0}", str(e))
95
- self.output(line)
96
- self.logbuf.append((self, line))
97
-
98
- stdout_func = log.stdout if not output_stdio else stdout_write
99
- stderr_func = log.stderr if not output_stdio else stderr_write
100
-
101
- logbuf = []
102
- stdout = Reader(
103
- threading.current_thread(), p.stdout,
104
- output=stdout_func if output else None, logbuf=logbuf)
105
- stderr = Reader(
106
- threading.current_thread(), p.stderr,
107
- output=stderr_func if output else None, logbuf=logbuf)
106
+ p = None
107
+ stdout = None
108
+ stderr = None
109
+ timedout = False
108
110
 
109
111
  def terminate(pid):
110
112
  try:
@@ -124,23 +126,73 @@ def _run(cmd, cwd, env, preexec_fn, *args, **kwargs):
124
126
  except NoSuchProcess:
125
127
  pass
126
128
 
127
- timedout = False
128
129
  try:
130
+ with utils.delayed_interrupt():
131
+ p = subprocess.Popen(
132
+ cmd,
133
+ stdin=subprocess.PIPE,
134
+ stdout=subprocess.PIPE,
135
+ stderr=subprocess.PIPE,
136
+ shell=shell,
137
+ cwd=cwd,
138
+ env=env,
139
+ )
140
+
141
+ if output_stdout:
142
+ stdout_func = log.stdout if not output_stdio else stdout_write
143
+ else:
144
+ stdout_func = None
145
+
146
+ if output_stderr:
147
+ stderr_func = log.stderr if not output_stdio else stderr_write
148
+ else:
149
+ stderr_func = None
150
+
151
+ logbuf = []
152
+ stdout = Reader(
153
+ threading.current_thread(),
154
+ p.stdout,
155
+ output=stdout_func if output else None,
156
+ logbuf=logbuf,
157
+ output_rstrip=output_rstrip)
158
+ stderr = Reader(
159
+ threading.current_thread(),
160
+ p.stderr,
161
+ output=stderr_func if output else None,
162
+ logbuf=logbuf,
163
+ output_rstrip=output_rstrip)
164
+
129
165
  p.wait(timeout=timeout)
130
- except subprocess.TimeoutExpired:
166
+
167
+ except KeyboardInterrupt:
168
+ if not p:
169
+ raise
170
+ try:
171
+ terminate(p.pid)
172
+ p.wait(10)
173
+ except subprocess.TimeoutExpired:
174
+ kill(p.pid)
175
+ utils.call_and_catch(p.wait, 10)
176
+ raise
177
+
178
+ except (subprocess.TimeoutExpired, JoltTimeoutError):
131
179
  timedout = True
132
180
  try:
133
181
  terminate(p.pid)
134
182
  p.wait(10)
135
183
  except subprocess.TimeoutExpired:
136
184
  kill(p.pid)
137
- p.wait()
185
+ utils.call_and_catch(p.wait, 10)
186
+
138
187
  finally:
139
- stdout.join()
140
- stderr.join()
141
- p.stdin.close()
142
- p.stdout.close()
143
- p.stderr.close()
188
+ if stdout:
189
+ stdout.join()
190
+ if stderr:
191
+ stderr.join()
192
+ if p:
193
+ p.stdin.close()
194
+ p.stdout.close()
195
+ p.stderr.close()
144
196
 
145
197
  if p.returncode != 0 and output_on_error:
146
198
  for reader, line in logbuf:
@@ -159,11 +211,16 @@ def _run(cmd, cwd, env, preexec_fn, *args, **kwargs):
159
211
 
160
212
  if p.returncode != 0:
161
213
  stderrbuf = [line for reader, line in logbuf if reader is stderr]
162
- raise JoltCommandError(
163
- "Command {0}: {1}".format(
164
- "timeout" if timedout else "failed",
165
- " ".join(cmd) if type(cmd) is list else cmd.format(*args, **kwargs)),
166
- stdoutbuf, stderrbuf, p.returncode)
214
+ if timedout:
215
+ raise JoltTimeoutError(
216
+ "Command timeout: " + (" ".join(cmd) if type(cmd) is list else cmd))
217
+ else:
218
+ raise JoltCommandError(
219
+ "Command failed: " + (" ".join(cmd) if type(cmd) is list else cmd),
220
+ stdoutbuf, stderrbuf, p.returncode)
221
+ if return_stderr:
222
+ return "\n".join(stdoutbuf) if output_rstrip else "".join(stdoutbuf), \
223
+ "\n".join(stderrbuf) if output_rstrip else "".join(stderrbuf)
167
224
  return "\n".join(stdoutbuf) if output_rstrip else "".join(stdoutbuf)
168
225
 
169
226
 
@@ -197,36 +254,6 @@ class _String(object):
197
254
  return self._str.startswith(substr)
198
255
 
199
256
 
200
- class _tmpdir(object):
201
- def __init__(self, name, cwd=None):
202
- self._name = name
203
- self._path = None
204
- self._cwd = cwd or os.getcwd()
205
-
206
- def __enter__(self):
207
- try:
208
- dirname = self._cwd
209
- fs.makedirs(fs.path.join(dirname, fs.path.dirname(self._name)))
210
- self._path = fs.mkdtemp(prefix=self._name + "-", dir=dirname)
211
- except KeyboardInterrupt as e:
212
- raise e
213
- except Exception as e:
214
- raise e
215
- raise_error_if(not self._path, "failed to create temporary directory")
216
- return self
217
-
218
- def __exit__(self, type, value, tb):
219
- if self._path:
220
- fs.rmtree(self._path, ignore_errors=True)
221
-
222
- @property
223
- def path(self):
224
- return self.get_path()
225
-
226
- def get_path(self):
227
- return self._path
228
-
229
-
230
257
  class _CMake(object):
231
258
  def __init__(self, deps, tools, incremental=False):
232
259
  self.deps = deps
@@ -234,6 +261,10 @@ class _CMake(object):
234
261
  self.builddir = self.tools.builddir(incremental=incremental)
235
262
  self.installdir = self.tools.builddir("install", incremental=False)
236
263
 
264
+ def clean(self):
265
+ self.tools.rmtree(self.builddir, ignore_errors=True)
266
+ self.tools.rmtree(self.installdir, ignore_errors=True)
267
+
237
268
  def configure(self, sourcedir, *args, generator=None, **kwargs):
238
269
  sourcedir = self.tools.expand_path(sourcedir)
239
270
 
@@ -244,81 +275,85 @@ class _CMake(object):
244
275
 
245
276
  with self.tools.cwd(self.builddir):
246
277
  self.tools.run(
247
- "cmake {0} -B {1} -DCMAKE_INSTALL_PREFIX={2} {3} {4}",
278
+ "cmake {0} {1} -DCMAKE_INSTALL_PREFIX=/jolt-prefix {1} {2} {3}",
248
279
  sourcedir,
249
- self.builddir,
250
- self.installdir,
280
+ utils.option("-B", self.builddir),
251
281
  utils.option("-G", generator),
252
282
  extra_args,
253
283
  output=True)
254
284
 
255
- def build(self, release=True, *args, **kwargs):
256
- threading_args = ''
257
- try:
258
- threading_args = ' -j {}'.format(kwargs.get("threads", self.tools.thread_count())) \
259
- if "--parallel" in self.tools.run("cmake --help-manual cmake 2>&1", output=False) \
260
- else ''
261
- except Exception:
262
- pass
263
-
285
+ def build(self, *args, config="Release", **kwargs):
286
+ threading_args = ' -j {}'.format(kwargs.get("threads", self.tools.thread_count()))
264
287
  with self.tools.cwd(self.builddir):
265
- release = "--config Release" if release else ""
266
- self.tools.run("cmake --build . {0}{1}", release, threading_args, output=True)
288
+ self.tools.run("cmake --build . --config {0} {1}", config, threading_args, output=True)
267
289
 
268
- def install(self, release=True, *args, **kwargs):
269
- with self.tools.cwd(self.builddir):
270
- release = "--config Release" if release else ""
271
- self.tools.run("cmake --build . --target install {0}", release, output=True)
290
+ def install(self, target="install", config="Release", **kwargs):
291
+ with self.tools.cwd(self.builddir), self.tools.environ(DESTDIR=self.installdir):
292
+ self.tools.run("cmake --build . --config {0} --target {1}", config, target, output=True)
272
293
 
273
- def publish(self, artifact, files='*', *args, **kwargs):
274
- with self.tools.cwd(self.installdir):
275
- artifact.collect(files, *args, **kwargs)
294
+ def publish(self, artifact, files='*', symlinks=True, *args, **kwargs):
295
+ with self.tools.cwd(self.installdir, "jolt-prefix"):
296
+ artifact.collect(files, *args, symlinks=symlinks, **kwargs)
297
+ artifact.strings.install_prefix = "/jolt-prefix"
276
298
 
277
299
 
278
300
  class _Meson(object):
279
- def __init__(self, deps, tools):
301
+ def __init__(self, deps, tools, incremental=False):
280
302
  self.deps = deps
281
303
  self.tools = tools
282
- self.builddir = self.tools.builddir()
283
- self.installdir = self.tools.builddir("install")
304
+ self.builddir = self.tools.builddir(incremental=incremental)
305
+ self.installdir = self.tools.builddir("install", incremental=False)
306
+ self.prefix = "/jolt-prefix" if os.name != "nt" else "C:\\jolt-prefix"
307
+
308
+ def clean(self):
309
+ self.tools.rmtree(self.builddir, ignore_errors=True)
310
+ self.tools.rmtree(self.installdir, ignore_errors=True)
284
311
 
285
312
  def configure(self, sourcedir, *args, **kwargs):
286
313
  sourcedir = self.tools.expand_path(sourcedir)
287
- self.tools.run("meson --prefix=/ {0} {1}", sourcedir, self.builddir,
314
+ options = " ".join([f"-D{arg}" for arg in args]) + " "
315
+ options += " ".join(["-D{0}={1}".format(key, self.tools.expand(val)) for key, val in kwargs.items()])
316
+ self.tools.run("meson setup --prefix={0} {1} {2} {3}", self.prefix, sourcedir, self.builddir, options,
288
317
  output=True)
289
318
 
290
319
  def build(self, *args, **kwargs):
291
320
  self.tools.run("ninja -C {0} ", self.builddir, output=True)
292
321
 
293
322
  def install(self, *args, **kwargs):
294
- self.tools.run("DESTDIR={0} ninja -C {1} install",
295
- self.installdir, self.builddir,
296
- output=True)
323
+ with self.tools.environ(DESTDIR=self.installdir):
324
+ self.tools.run("ninja -C {0} install", self.builddir, output=True)
297
325
 
298
- def publish(self, artifact, files='*', *args, **kwargs):
299
- with self.tools.cwd(self.installdir):
300
- artifact.collect(files, *args, **kwargs)
326
+ def publish(self, artifact, files='*', symlinks=True, *args, **kwargs):
327
+ with self.tools.cwd(self.installdir, "jolt-prefix"):
328
+ artifact.collect(files, *args, symlinks=symlinks, **kwargs)
329
+ artifact.strings.install_prefix = self.prefix
301
330
 
302
331
 
303
332
  class _AutoTools(object):
304
- def __init__(self, deps, tools):
333
+ def __init__(self, deps, tools, incremental=False):
305
334
  self.deps = deps
306
335
  self.tools = tools
307
- self.builddir = self.tools.builddir()
308
- self.installdir = self.tools.builddir("install")
336
+ self.builddir = self.tools.builddir(incremental=incremental)
337
+ self.installdir = self.tools.builddir("install", incremental=False)
338
+ self.prefix = "jolt-prefix"
309
339
 
310
- def configure(self, sourcedir, *args, **kwargs):
340
+ def clean(self):
341
+ self.tools.rmtree(self.builddir, ignore_errors=True)
342
+ self.tools.rmtree(self.installdir, ignore_errors=True)
343
+
344
+ def configure(self, sourcedir, *args):
311
345
  sourcedir = self.tools.expand_path(sourcedir)
312
- prefix = kwargs.get("prefix", "/")
313
346
 
314
347
  if not fs.path.exists(fs.path.join(sourcedir, "configure")):
315
348
  with self.tools.cwd(sourcedir):
316
349
  self.tools.run("autoreconf -visf", output=True)
317
350
 
318
351
  with self.tools.cwd(self.builddir), self.tools.environ(DESTDIR=self.installdir):
319
- self.tools.run("{0}/configure --prefix={1} {2}",
320
- sourcedir, prefix,
321
- self.tools.getenv("CONFIGURE_FLAGS"),
352
+ self.tools.run("{0}/configure --prefix=/{1} {2} {3}",
353
+ sourcedir,
354
+ self.prefix,
355
+ self.tools.getenv("CONFIGURE_FLAGS", ""),
356
+ " ".join(args),
322
357
  output=True)
323
358
 
324
359
  def build(self, *args, **kwargs):
@@ -326,13 +361,14 @@ class _AutoTools(object):
326
361
  self.tools.run("make VERBOSE=yes Q= V=1 -j{0}",
327
362
  self.tools.cpu_count(), output=True)
328
363
 
329
- def install(self, target="install", **kwargs):
330
- with self.tools.cwd(self.builddir), self.tools.environ(DESTDIR=self.installdir):
331
- self.tools.run("make {}", target, output=True)
364
+ def install(self, target="install"):
365
+ with self.tools.cwd(self.builddir):
366
+ self.tools.run("make DESTDIR={} {}", self.installdir, target, output=True)
332
367
 
333
- def publish(self, artifact, files='*', *args, **kwargs):
334
- with self.tools.cwd(self.installdir):
335
- artifact.collect(files, *args, **kwargs)
368
+ def publish(self, artifact, files='*', symlinks=True, *args, **kwargs):
369
+ with self.tools.cwd(self.installdir, self.prefix):
370
+ artifact.collect(files, *args, symlinks=symlinks, **kwargs)
371
+ artifact.strings.install_prefix = "/" + self.prefix
336
372
 
337
373
 
338
374
  class ZipFile(zipfile.ZipFile):
@@ -377,11 +413,18 @@ class JinjaTaskContext(Context):
377
413
  Attempts to resolves any missing keywords by looking up task class attributes.
378
414
  """
379
415
  def resolve_or_missing(self, key):
416
+ if key in self.vars:
417
+ return self.vars[key]
418
+
419
+ if key in self.parent:
420
+ return self.parent[key]
421
+
380
422
  if key != "task":
381
423
  task = self.get("task")
382
424
  if task and hasattr(task, key):
383
425
  return getattr(task, key)
384
- return super(JinjaTaskContext, self).resolve_or_missing(key)
426
+
427
+ return missing
385
428
 
386
429
 
387
430
  class Namespace(object):
@@ -452,8 +495,10 @@ class Tools(object):
452
495
 
453
496
  def __init__(self, task=None, cwd=None, env=None):
454
497
  self._chroot = None
498
+ self._chroot_prefix = []
499
+ self._chroot_path = []
500
+ self._deadline = None
455
501
  self._run_prefix = []
456
- self._preexec_fn = None
457
502
  self._cwd = fs.path.normpath(fs.path.join(config.get_workdir(), cwd or config.get_workdir()))
458
503
  self._env = copy.deepcopy(env or os.environ)
459
504
  self._task = task
@@ -502,24 +547,50 @@ class Tools(object):
502
547
  zf.write(path, zippath)
503
548
  return filename
504
549
 
550
+ def _make_7zfile(self, filename, fmt, rootdir):
551
+ self.mkdirname(filename)
552
+ with py7zr.SevenZipFile(filename, 'w') as archive:
553
+ archive.writeall(rootdir, ".")
554
+ return filename
555
+
505
556
  def _make_tarfile(self, filename, fmt, rootdir):
506
- dirname = os.path.dirname(filename)
507
- if not os.path.exists(dirname):
508
- fs.makedirs(dirname)
557
+ self.mkdirname(filename)
509
558
  with tarfile.open(filename, 'w|%s' % fmt) as tar:
510
559
  tar.add(rootdir, ".")
511
560
  return filename
512
561
 
562
+ def _make_tarzstd(self, filename, rootdir):
563
+ self.mkdirname(filename)
564
+ with open(filename, 'wb') as zstd_file:
565
+ compressor = zstandard.ZstdCompressor(threads=self.thread_count())
566
+ with compressor.stream_writer(zstd_file) as stream:
567
+ with tarfile.open(mode="w|", fileobj=stream) as tar:
568
+ tar.add(rootdir, ".")
569
+ return filename
570
+
571
+ def _extract_tarzstd(self, filename, pathname, files=None):
572
+ with open(filename, 'rb') as zstd_file:
573
+ decompressor = zstandard.ZstdDecompressor()
574
+ with decompressor.stream_reader(zstd_file) as stream:
575
+ with tarfile.open(mode="r|", fileobj=stream) as tar:
576
+ if files:
577
+ for file in files:
578
+ tar.extract(file, pathname)
579
+ else:
580
+ tar.extractall(pathname)
581
+
513
582
  def archive(self, pathname, filename):
514
583
  """ Creates a (compressed) archive.
515
584
 
516
585
  The type of archive to create is determined by the filename extension.
517
586
  Supported formats are:
518
587
 
588
+ - 7z
519
589
  - tar
520
590
  - tar.bz2
521
591
  - tar.gz
522
592
  - tar.xz
593
+ - tar.zst
523
594
  - zip
524
595
 
525
596
  Args:
@@ -535,12 +606,14 @@ class Tools(object):
535
606
  elif filename.endswith(".tar"):
536
607
  fmt = "tar"
537
608
  elif filename.endswith(".tar.gz"):
538
- if shutil.which("tar") and shutil.which("pigz"):
609
+ if self.which("tar") and self.which("pigz"):
539
610
  self.run("tar -I pigz -cf {} -C {} .", filename, pathname)
540
611
  return filename
541
612
  fmt = "targz"
613
+ elif filename.endswith(".tar.zst"):
614
+ return self._make_tarzstd(filename, rootdir=pathname)
542
615
  elif filename.endswith(".tgz"):
543
- if shutil.which("tar") and shutil.which("pigz"):
616
+ if self.which("tar") and self.which("pigz"):
544
617
  self.run("tar -I pigz -cf {} -C {} .", filename, pathname)
545
618
  return filename
546
619
  fmt = "targz"
@@ -548,12 +621,16 @@ class Tools(object):
548
621
  fmt = "tarbz2"
549
622
  elif filename.endswith(".tar.xz"):
550
623
  fmt = "tarxz"
624
+ elif filename.endswith(".7z"):
625
+ fmt = "7z"
551
626
  raise_task_error_if(
552
627
  not fmt, self._task,
553
628
  "unknown archive type '{0}'", fs.path.basename(filename))
554
629
  try:
555
630
  if fmt == "zip":
556
631
  outfile = self._make_zipfile(filename, fmt, rootdir=pathname)
632
+ elif fmt == "7z":
633
+ outfile = self._make_7zfile(filename, fmt, rootdir=pathname)
557
634
  else:
558
635
  outfile = self._make_tarfile(filename, fmt[3:], rootdir=pathname)
559
636
  if outfile != filename:
@@ -562,9 +639,9 @@ class Tools(object):
562
639
  except Exception:
563
640
  raise_task_error(self._task, "failed to create archive from directory '{0}'", pathname)
564
641
 
565
- def autotools(self, deps=None):
642
+ def autotools(self, deps=None, incremental=False):
566
643
  """ Creates an AutoTools invokation helper """
567
- return _AutoTools(deps, self)
644
+ return _AutoTools(deps, self, incremental=incremental)
568
645
 
569
646
  @utils.locked(lock='_builddir_lock')
570
647
  def builddir(self, name=None, incremental=False, unique=True):
@@ -591,8 +668,16 @@ class Tools(object):
591
668
  name += "-" + utils.canonical(self._task.short_qualified_name)
592
669
 
593
670
  dirname = fs.path.join(self.getcwd(), name)
671
+ if incremental:
672
+ dirname += "-inc"
673
+
674
+ # Check if incremental build directories are disabled in the configuration
675
+ if incremental not in ["always"] and not config.is_incremental_build():
676
+ incremental = False
594
677
 
595
678
  if incremental:
679
+ # Create a unique build directory for each task
680
+ # and store the task name in a hidden file.
596
681
  if self._task is not None and unique:
597
682
  meta_task = fs.path.join(dirname, ".task")
598
683
  if not fs.path.exists(meta_task) \
@@ -600,6 +685,7 @@ class Tools(object):
600
685
  fs.rmtree(dirname, ignore_errors=True)
601
686
  fs.makedirs(dirname)
602
687
 
688
+ # Remove the build directory if the task taint has changed (--force or --salt)
603
689
  if self._task.taint is not None:
604
690
  meta = fs.path.join(dirname, ".taint")
605
691
  if not fs.path.exists(meta) or self.read_file(meta) != str(self._task.taint):
@@ -613,18 +699,16 @@ class Tools(object):
613
699
  return dirname
614
700
 
615
701
  if name not in self._builddir:
616
- fs.makedirs(fs.path.dirname(dirname))
617
- self._builddir[name] = fs.mkdtemp(
618
- prefix=fs.path.basename(dirname) + "-",
619
- dir=fs.path.dirname(dirname))
702
+ fs.makedirs(dirname)
703
+ self._builddir[name] = dirname
620
704
 
621
705
  return self._builddir[name]
622
706
 
623
707
  @property
624
708
  def buildroot(self):
625
709
  """ Return the root path of all build directories """
626
- from jolt.loader import get_workspacedir
627
- return fs.path.normpath(fs.path.join(get_workspacedir(), "build"))
710
+ from jolt.loader import JoltLoader
711
+ return fs.path.normpath(JoltLoader.get().build_path)
628
712
 
629
713
  def checksum_file(self, filelist, concat=False, hashfn=hashlib.sha1, filterfn=None):
630
714
  """ Calculate a checksum of one or multiple files.
@@ -681,6 +765,7 @@ class Tools(object):
681
765
  - .bz2
682
766
  - .gz
683
767
  - .xz
768
+ - .zst
684
769
 
685
770
  Args:
686
771
  src (str): Source file to be compressed.
@@ -697,7 +782,7 @@ class Tools(object):
697
782
  for block in iter(lambda: infp.read(0x10000), b''):
698
783
  outfp.write(block)
699
784
  elif ext == "gz":
700
- if shutil.which("pigz"):
785
+ if self.which("pigz"):
701
786
  return self.run("pigz -p {} {}", self.thread_count(), src)
702
787
  with open(src, 'rb') as infp:
703
788
  with gzip.open(dst, 'wb') as outfp:
@@ -708,6 +793,13 @@ class Tools(object):
708
793
  with lzma.open(dst, 'wb') as outfp:
709
794
  for block in iter(lambda: infp.read(0x10000), b''):
710
795
  outfp.write(block)
796
+ elif ext == "zst":
797
+ with open(src, 'rb') as infp:
798
+ with open(dst, 'wb') as outfp:
799
+ compressor = zstandard.ZstdCompressor(threads=self.thread_count())
800
+ with compressor.stream_writer(outfp) as stream:
801
+ for block in iter(lambda: infp.read(0x10000), b''):
802
+ stream.write(block)
711
803
 
712
804
  def copy(self, src, dst, symlinks=False):
713
805
  """ Copies file and directories (recursively).
@@ -776,10 +868,19 @@ class Tools(object):
776
868
  finally:
777
869
  self._cwd = prev
778
870
 
779
- def download(self, url, pathname, exceptions=True, **kwargs):
871
+ def download(self, url, pathname, exceptions=True, auth=None, **kwargs):
780
872
  """
781
873
  Downloads a file using HTTP.
782
874
 
875
+ Automatically expands any {keyword} arguments in the URL and pathname.
876
+
877
+ Basic authentication is supported by including the credentials in the URL.
878
+ Environment variables can be used to hide sensitive information. Specify
879
+ the environment variable name in the URI as e.g.
880
+ ``http://{environ[USER]}:{environ[PASS]}@host``.
881
+ Alternatively, the auth parameter can be used to provide an authentication
882
+ object that is passed to the requests.get() function.
883
+
783
884
  Throws a JoltError exception on failure.
784
885
 
785
886
  Args:
@@ -792,17 +893,32 @@ class Tools(object):
792
893
 
793
894
  url = self.expand(url)
794
895
  pathname = self.expand_path(pathname)
795
- try:
796
- from requests.api import get
797
896
 
798
- response = get(url, stream=True, **kwargs)
897
+ url_parsed = urlparse(url)
898
+ raise_task_error_if(
899
+ not url_parsed.scheme or not url_parsed.netloc,
900
+ self._task,
901
+ "Invalid URL: '{}'", url)
902
+
903
+ if auth is None and url_parsed.username and url_parsed.password:
904
+ auth = HTTPBasicAuth(url_parsed.username, url_parsed.password)
905
+
906
+ # Redact password from URL if present
907
+ if url_parsed.password:
908
+ url_parsed = url_parsed._replace(netloc=url_parsed.netloc.replace(url_parsed.password, "****"))
909
+
910
+ url_cleaned = urlunparse(url_parsed)
911
+
912
+ try:
913
+ response = http_session.get(url, stream=True, auth=auth, **kwargs)
799
914
  raise_error_if(
800
915
  exceptions and response.status_code not in [200],
801
- f"Download from '{url}' failed with status '{response.status_code}'")
916
+ f"Download from '{url_cleaned}' failed with status '{response.status_code}'")
802
917
 
803
918
  name = fs.path.basename(pathname)
804
919
  size = int(response.headers.get('content-length', 0))
805
- with log.progress("Downloading {0}".format(name), size, "B") as pbar:
920
+ with log.progress("Downloading {0}".format(utils.shorten(name)), size, "B") as pbar:
921
+ log.verbose("{} -> {}", url_cleaned, pathname)
806
922
  with open(pathname, 'wb') as out_file:
807
923
  chunk_size = 4096
808
924
  for data in response.iter_content(chunk_size=chunk_size):
@@ -810,7 +926,7 @@ class Tools(object):
810
926
  pbar.update(len(data))
811
927
  actual_size = self.file_size(pathname)
812
928
  raise_error_if(
813
- size != 0 and size != actual_size,
929
+ size != 0 and size > actual_size,
814
930
  f"Downloaded file was truncated to {actual_size}/{size} bytes: {name}")
815
931
 
816
932
  return response.status_code == 200
@@ -839,16 +955,29 @@ class Tools(object):
839
955
  with tools.environ(CC="clang"):
840
956
  tools.run("make all")
841
957
  """
842
- for key, value in kwargs.items():
843
- kwargs[key] = self.expand(value)
844
-
845
958
  restore = {key: value for key, value in self._env.items()}
846
- self._env.update(kwargs)
847
- yield self._env
959
+
848
960
  for key, value in kwargs.items():
849
- if key not in restore:
850
- del self._env[key]
851
- self._env.update(restore)
961
+ if value is not None:
962
+ self._env[key] = self.expand(value)
963
+ else:
964
+ self._env.pop(key, None)
965
+
966
+ try:
967
+ yield self._env
968
+ finally:
969
+ self._env = restore
970
+
971
+ def exists(self, pathname):
972
+ """ Checks if a file or directory exists.
973
+
974
+ Args:
975
+ pathname (str): Path to file or directory.
976
+
977
+ Returns:
978
+ bool: True if the file or directory exists, False otherwise.
979
+ """
980
+ return fs.path.exists(self.expand_path(pathname))
852
981
 
853
982
  def expand(self, string, *args, **kwargs):
854
983
  """ Expands keyword arguments/macros in a format string.
@@ -941,8 +1070,10 @@ class Tools(object):
941
1070
  str: Expanded string.
942
1071
  """
943
1072
 
1073
+ if not relpath:
1074
+ relpath = self._task.joltdir if self._task else self.getcwd()
944
1075
  pathname = self.expand(pathname, *args, **kwargs)
945
- relpath = self.expand(relpath or self._task.joltdir, *args, **kwargs)
1076
+ relpath = self.expand(relpath, *args, **kwargs)
946
1077
  pathname = fs.path.join(self.getcwd(), pathname)
947
1078
  # Ensure to retain any trailing path separator which is used as
948
1079
  # indicator of directory paths
@@ -954,10 +1085,12 @@ class Tools(object):
954
1085
 
955
1086
  Supported formats are:
956
1087
 
1088
+ - 7z
957
1089
  - tar
958
1090
  - tar.bz2
959
1091
  - tar.gz
960
1092
  - tar.xz
1093
+ - tar.zst
961
1094
  - zip
962
1095
 
963
1096
  Args:
@@ -984,7 +1117,7 @@ class Tools(object):
984
1117
  else:
985
1118
  tar.extractall(filepath)
986
1119
  elif filename.endswith(".tar.gz") or filename.endswith(".tgz"):
987
- if shutil.which("tar") and shutil.which("pigz"):
1120
+ if self.which("tar") and self.which("pigz"):
988
1121
  self.run("tar -I pigz {} -xf {} -C {} {}",
989
1122
  ignore_owner_tar, filename, filepath,
990
1123
  " ".join(files) if files else "")
@@ -1011,6 +1144,18 @@ class Tools(object):
1011
1144
  tar.extract(file, filepath)
1012
1145
  else:
1013
1146
  tar.extractall(filepath)
1147
+ elif filename.endswith(".tar.zst"):
1148
+ try:
1149
+ self._extract_tarzstd(filename, filepath, files)
1150
+ except tarfile.StreamError as e:
1151
+ raise_task_error(self._task, "failed to extract archive '{0}': {1}", filename, str(e))
1152
+ elif filename.endswith(".7z"):
1153
+ with py7zr.SevenZipFile(filename, 'r') as archive:
1154
+ if files:
1155
+ for file in files:
1156
+ archive.extract(file, filepath)
1157
+ else:
1158
+ archive.extractall(filepath)
1014
1159
  else:
1015
1160
  raise_task_error(self._task, "unknown archive type '{0}'", fs.path.basename(filename))
1016
1161
  except Exception:
@@ -1040,7 +1185,7 @@ class Tools(object):
1040
1185
  """ Returns the current working directory. """
1041
1186
  return fs.path.normpath(self._cwd)
1042
1187
 
1043
- def getenv(self, key, default=""):
1188
+ def getenv(self, key, default=None):
1044
1189
  """ Returns the value of an environment variable.
1045
1190
 
1046
1191
  Only child processes spawned by the same tools object can see
@@ -1077,7 +1222,19 @@ class Tools(object):
1077
1222
  files = [self.expand_path(file) for file in files]
1078
1223
  elif not fs.path.isabs(pathname):
1079
1224
  files = [self.expand_relpath(file, self.getcwd()) for file in files]
1080
- return files
1225
+ return list(sorted(files))
1226
+
1227
+ def isdir(self, pathname):
1228
+ """ Determines if a path is a directory.
1229
+
1230
+ Args:
1231
+ pathname (str): Path to a file or directory.
1232
+
1233
+ Returns:
1234
+ boolean: True if the path is a directory, False otherwise.
1235
+ """
1236
+ pathname = self.expand_path(pathname)
1237
+ return fs.path.isdir(pathname)
1081
1238
 
1082
1239
  def mkdir(self, pathname, recursively=True):
1083
1240
  """ Create directory. """
@@ -1150,9 +1307,80 @@ class Tools(object):
1150
1307
  """
1151
1308
  return utils.map_concurrent(callable, iterable, max_workers)
1152
1309
 
1153
- def meson(self, deps=None):
1310
+ def meson(self, deps=None, incremental=False):
1154
1311
  """ Creates a Meson invokation helper """
1155
- return _Meson(deps, self)
1312
+ return _Meson(deps, self, incremental=incremental)
1313
+
1314
+ @contextmanager
1315
+ def nixpkgs(self, nixfile=None, packages=None, pure=False, path=None, options=None):
1316
+ """
1317
+ Creates a Nix environment with the specified packages.
1318
+
1319
+ Args:
1320
+ nixfile (str): Path to a Nix expression file.
1321
+ packages (list): List of Nix packages to include in environment.
1322
+ pure (boolean): Create a pure environment.
1323
+ path (list): List of Nix expression paths.
1324
+ options (dict): Nix configuration options.
1325
+
1326
+ Example:
1327
+
1328
+ .. code-block:: python
1329
+
1330
+ def run(self, deps, tools):
1331
+ with tools.nixpkgs(packages=["gcc13"]):
1332
+ tools.run("gcc --version")
1333
+
1334
+ """
1335
+
1336
+ # Check if Nix is available
1337
+ raise_task_error_if(
1338
+ not self.which("nix-shell"),
1339
+ self._task,
1340
+ "Nix not available on this system")
1341
+
1342
+ nixfile = self.expand_path(nixfile) if nixfile else ""
1343
+ pathflags = " ".join([f"-I {path}" for path in path or []])
1344
+ options = " ".join([f"--option {k} {v}" for k, v in (options or {}).items()])
1345
+ pureflag = "--pure" if pure else ""
1346
+ packages = "-p " + " ".join(packages) if packages else ""
1347
+
1348
+ # Expand all placeholders
1349
+ options = self.expand(options)
1350
+ packages = self.expand(packages)
1351
+ pathflags = self.expand(pathflags)
1352
+
1353
+ # Use cached-nix-shell is available
1354
+ nixshell = "cached-nix-shell" if self.which("cached-nix-shell") else "nix-shell"
1355
+
1356
+ # Run nix-shell to stage packages and environment
1357
+ env = self.run(
1358
+ "{} {} {} {} --run 'env -0' {}",
1359
+ nixshell,
1360
+ pathflags,
1361
+ pureflag,
1362
+ packages,
1363
+ nixfile,
1364
+ output_on_error=True)
1365
+ env = env.strip().strip("\x00")
1366
+ env = dict(line.split("=", 1) for line in env.split('\x00'))
1367
+
1368
+ # Add host path first to environment PATH
1369
+ host_path = env.get("HOST_PATH", None)
1370
+ if host_path:
1371
+ env["PATH"] = host_path + os.pathsep + env["PATH"]
1372
+
1373
+ # Enter the environment
1374
+ old_env = self._env
1375
+ try:
1376
+ if pure:
1377
+ self._env = env
1378
+ else:
1379
+ self._env = copy.deepcopy(env)
1380
+ self._env.update(env)
1381
+ yield
1382
+ finally:
1383
+ self._env = old_env
1156
1384
 
1157
1385
  def render(self, template, **kwargs):
1158
1386
  """ Render a Jinja template string.
@@ -1169,10 +1397,12 @@ class Tools(object):
1169
1397
  try:
1170
1398
  env = Environment(
1171
1399
  loader=FileSystemLoader(self.getcwd()),
1172
- autoescape=select_autoescape(),
1400
+ autoescape=False,
1173
1401
  trim_blocks=True,
1174
1402
  lstrip_blocks=True)
1175
1403
  env.context_class = JinjaTaskContext
1404
+ env.filters["prefix"] = utils.prefix
1405
+ env.filters["suffix"] = utils.suffix
1176
1406
  tmpl = env.from_string(template)
1177
1407
  return tmpl.render(task=self._task, tools=self, **kwargs)
1178
1408
  except TemplateError as e:
@@ -1194,7 +1424,7 @@ class Tools(object):
1194
1424
  try:
1195
1425
  env = Environment(
1196
1426
  loader=FileSystemLoader(self.getcwd()),
1197
- autoescape=select_autoescape(),
1427
+ autoescape=False,
1198
1428
  trim_blocks=True,
1199
1429
  lstrip_blocks=True)
1200
1430
  env.context_class = JinjaTaskContext
@@ -1355,6 +1585,9 @@ class Tools(object):
1355
1585
  refuses to terminate, it will be killed after an additional
1356
1586
  10 seconds have passed. Default: None.
1357
1587
 
1588
+ Returns:
1589
+ str: stdout from command unless output=False
1590
+
1358
1591
  Example:
1359
1592
 
1360
1593
  .. code-block:: python
@@ -1366,7 +1599,22 @@ class Tools(object):
1366
1599
  tools.run("make {target} VERBOSE={verbose} JOBS={0}", tools.cpu_count())
1367
1600
 
1368
1601
  """
1602
+ kwargs.setdefault("shell", True)
1603
+
1604
+ # Append command prefix before expanding string
1605
+ if self._chroot_prefix or self._run_prefix:
1606
+ if type(cmd) is list:
1607
+ cmd = self._chroot_prefix + self._run_prefix + cmd
1608
+ else:
1609
+ cmd = " ".join(self._chroot_prefix + self._run_prefix) + " " + cmd
1610
+
1611
+ if self._deadline is not None:
1612
+ remaining = int(self._deadline - time.time() + 0.5)
1613
+ timeout = kwargs.get("timeout", remaining)
1614
+ kwargs["timeout"] = min(remaining, timeout)
1615
+
1369
1616
  cmd = self.expand(cmd, *args, **kwargs)
1617
+
1370
1618
  stdi, stdo, stde = None, None, None
1371
1619
  try:
1372
1620
  stdi, stdo, stde = None, None, None
@@ -1378,12 +1626,9 @@ class Tools(object):
1378
1626
  raise e
1379
1627
  except Exception:
1380
1628
  pass
1381
- if self._run_prefix:
1382
- if type(cmd) is list:
1383
- cmd = self._run_prefix + cmd
1384
- else:
1385
- cmd = " ".join(self._run_prefix) + " " + cmd
1386
- return _run(cmd, self._cwd, self._env, self._preexec_fn, *args, **kwargs)
1629
+
1630
+ return _run(cmd, self._cwd, self._env, *args, **kwargs)
1631
+
1387
1632
  finally:
1388
1633
  if stdi:
1389
1634
  termios.tcsetattr(sys.stdin.fileno(), termios.TCSANOW, stdi)
@@ -1462,10 +1707,10 @@ class Tools(object):
1462
1707
  """
1463
1708
 
1464
1709
  raise_error_if(
1465
- type(artifact) is not cache.Artifact,
1710
+ type(artifact) not in [cache.Artifact, cache.ArtifactToolsProxy],
1466
1711
  "non-artifact passed as argument to Tools.sandbox()")
1467
1712
 
1468
- suffix = utils.canonical(artifact.get_task().short_qualified_name)
1713
+ suffix = utils.canonical(artifact.task.short_qualified_name)
1469
1714
 
1470
1715
  if reflect:
1471
1716
  sandbox_name = "sandboxes-reflected/" + suffix
@@ -1495,8 +1740,16 @@ class Tools(object):
1495
1740
  fs.rmtree(path)
1496
1741
  fs.makedirs(path)
1497
1742
  for relsrcpath, reldstpath in artifact.files.items():
1498
- srcpath = fs.path.normpath(fs.path.join(artifact.get_task().joltdir, relsrcpath))
1743
+ srcpath = fs.path.normpath(fs.path.join(artifact.task.joltdir, relsrcpath))
1744
+ srcpath = self.expand_path(srcpath)
1499
1745
  dstpath = fs.path.normpath(fs.path.join(path, reldstpath))
1746
+ dstpath = self.expand_path(dstpath)
1747
+
1748
+ if dstpath != fs.path.realpath(dstpath):
1749
+ log.debug("Cannot symlink '{} -> {}', parent directory already symlinked",
1750
+ srcpath, dstpath)
1751
+ continue
1752
+
1500
1753
  if fs.path.isdir(dstpath):
1501
1754
  files = fs.scandir(srcpath)
1502
1755
  for file in files:
@@ -1506,7 +1759,8 @@ class Tools(object):
1506
1759
  self.symlink(srcpath, dstpath)
1507
1760
 
1508
1761
  # Restore missing srcfiles if they resided in a build directory
1509
- if srcpath.startswith(artifact.get_task().tools.buildroot) and \
1762
+ buildroot_abs = self.expand_path(artifact.tools.buildroot)
1763
+ if srcpath.startswith(buildroot_abs) and \
1510
1764
  not fs.path.exists(srcpath):
1511
1765
  fs.copy(fs.path.join(artifact.path, reldstpath), srcpath, symlinks=True)
1512
1766
  self.write_file(meta, artifact.path)
@@ -1553,7 +1807,41 @@ class Tools(object):
1553
1807
  fs.makedirs(dstdir)
1554
1808
  fs.symlink(src, dst)
1555
1809
 
1556
- def tmpdir(self, name):
1810
+ @contextmanager
1811
+ def timeout(self, seconds):
1812
+ """ Context manager to set a timeout for a block of code.
1813
+
1814
+ A TimeoutError exception is raised if the block of code does not
1815
+ complete within the specified time.
1816
+
1817
+ Args:
1818
+ seconds (int): Timeout in seconds.
1819
+
1820
+ Example:
1821
+
1822
+ .. code-block:: python
1823
+
1824
+ with tools.timeout(5):
1825
+ tools.run("sleep 10")
1826
+
1827
+ """
1828
+ if seconds is None:
1829
+ yield
1830
+ return
1831
+
1832
+ with utils.timeout(seconds, JoltTimeoutError):
1833
+ old_deadline = self._deadline
1834
+ try:
1835
+ if old_deadline is None:
1836
+ self._deadline = time.time() + seconds
1837
+ else:
1838
+ self._deadline = min(old_deadline, time.time() + seconds)
1839
+ yield
1840
+ finally:
1841
+ self._deadline = old_deadline
1842
+
1843
+ @contextmanager
1844
+ def tmpdir(self, name=None):
1557
1845
  """ Creates a temporary directory.
1558
1846
 
1559
1847
  The directory is only valid within a context and it is removed
@@ -1568,11 +1856,18 @@ class Tools(object):
1568
1856
 
1569
1857
  .. code-block:: python
1570
1858
 
1571
- with tools.tmpdir("temp") as tmp, tools.cwd(tmp.path):
1859
+ with tools.tmpdir() as tmp, tools.cwd(tmp):
1572
1860
  tools.write_file("tempfile", "tempdata")
1573
1861
 
1574
1862
  """
1575
- return _tmpdir(name, cwd=self._cwd)
1863
+ dirname = None
1864
+ try:
1865
+ self.mkdir(self.buildroot)
1866
+ dirname = fs.mkdtemp(prefix=(name or "tmpdir") + "-", dir=self.buildroot)
1867
+ yield fs.path.normpath(dirname)
1868
+ finally:
1869
+ if dirname:
1870
+ self.rmtree(dirname, ignore_errors=True)
1576
1871
 
1577
1872
  def unlink(self, pathname, *args, **kwargs):
1578
1873
  """Removes a file from disk.
@@ -1587,7 +1882,8 @@ class Tools(object):
1587
1882
  return fs.unlink(pathname, ignore_errors=kwargs.get("ignore_errors", False))
1588
1883
 
1589
1884
  @contextmanager
1590
- def chroot(self, chroot, *args, **kwargs):
1885
+ @utils.deprecated
1886
+ def chroot(self, chroot, *args, path=None, **kwargs):
1591
1887
  """
1592
1888
  Experimental: Use chroot as root filesystem when running commands.
1593
1889
 
@@ -1599,6 +1895,9 @@ class Tools(object):
1599
1895
  Args:
1600
1896
  chroot (str, artifact): Path to rootfs directory, or an artifact
1601
1897
  with a 'rootfs' metadata path (artifact.paths.rootfs).
1898
+ path (list): List of directory paths within the chroot to add to
1899
+ the PATH environment variable, e.g. ["/usr/bin", "/bin"].
1900
+ By default, the current PATH is used also within the chroot.
1602
1901
 
1603
1902
  Example:
1604
1903
 
@@ -1610,9 +1909,17 @@ class Tools(object):
1610
1909
  """
1611
1910
  raise_error_if(platform.system() != "Linux", "Tools.chroot() is only supported on Linux")
1612
1911
 
1613
- if type(chroot) is cache.Artifact:
1912
+ raise_task_error_if(
1913
+ not self.which("newuidmap") and not self.which("/usr/bin/newuidmap"), self._task,
1914
+ "No usable 'newuidmap' found in PATH")
1915
+
1916
+ raise_task_error_if(
1917
+ not self.which("newgidmap") and not self.which("/usr/bin/newuidmap"), self._task,
1918
+ "No usable 'newgidmap' found in PATH")
1919
+
1920
+ if type(chroot) in [cache.Artifact, cache.ArtifactToolsProxy]:
1614
1921
  raise_task_error_if(
1615
- not str(chroot.paths.rootfs), self._task,
1922
+ not chroot.paths.rootfs, self._task,
1616
1923
  "No 'rootfs' path in artifact")
1617
1924
  chroot = chroot.paths.rootfs
1618
1925
 
@@ -1621,6 +1928,8 @@ class Tools(object):
1621
1928
  not fs.path.exists(chroot) or not fs.path.isdir(chroot),
1622
1929
  self._task, "failed to change root to '{0}'", chroot)
1623
1930
 
1931
+ bind = []
1932
+
1624
1933
  mount_dev = kwargs.get("mount_dev", True)
1625
1934
  mount_etc = kwargs.get("mount_etc", True)
1626
1935
  mount_home = kwargs.get("mount_home", False)
@@ -1634,93 +1943,68 @@ class Tools(object):
1634
1943
  self._task, "Expected a list as mount argument to Tools.chroot()")
1635
1944
  mount = [self.expand(m) for m in mount]
1636
1945
 
1637
- overlaydir = self.builddir("overlay")
1638
- overlayrootdir = fs.path.join(overlaydir, "root")
1639
- with self.cwd(overlaydir):
1640
- self.mkdir("root")
1641
- self.mkdir("work")
1642
- self.mkdir("uppr")
1643
- overlayopts = f"upperdir={overlaydir}/uppr,workdir={overlaydir}/work,lowerdir={chroot}"
1644
-
1645
- def unshare_chroot(overlayrootdir):
1646
- uid = os.geteuid()
1647
- gid = os.geteuid()
1648
- self._unshare([(uid, uid, 1)], [(gid, gid, 1)])
1649
-
1650
- from ctypes import CDLL, c_char_p
1651
- libc = CDLL("libc.so.6")
1652
-
1653
- MS_BIND = 4096
1654
- MS_REC = 16384
1655
-
1656
- def mount_overlay():
1657
- return libc.mount(
1658
- c_char_p("overlay".encode("utf-8")),
1659
- c_char_p(overlayrootdir.encode("utf-8")),
1660
- c_char_p("overlay".encode("utf-8")),
1661
- 0,
1662
- c_char_p(overlayopts.encode("utf-8"))) == 0
1663
-
1664
- # If the overlay mount fails, just don't use one.
1665
- if not mount_overlay():
1666
- overlayrootdir = chroot
1667
-
1668
- def mount_bind(path):
1669
- if os.path.isdir(path):
1670
- os.makedirs(overlayrootdir + path, exist_ok=True)
1671
- else:
1672
- os.makedirs(os.path.dirname(overlayrootdir + path), exist_ok=True)
1673
- if not os.path.exists(overlayrootdir + path):
1674
- with open(overlayrootdir + path, "a"):
1675
- pass
1676
- assert libc.mount(
1677
- c_char_p(path.encode("utf-8")),
1678
- c_char_p((overlayrootdir + path).encode("utf-8")),
1679
- None,
1680
- MS_BIND | MS_REC,
1681
- None) == 0
1682
-
1683
- if mount_etc:
1684
- mount_bind("/etc/group")
1685
- mount_bind("/etc/hostname")
1686
- mount_bind("/etc/hosts")
1687
- mount_bind("/etc/passwd")
1688
- mount_bind("/etc/resolv.conf")
1689
- if mount_home:
1690
- mount_bind("/home")
1691
- if mount_joltdir and self._task:
1692
- from jolt.loader import get_workspacedir
1693
- mount_bind(get_workspacedir())
1694
- if mount_cachedir:
1695
- mount_bind(config.get_cachedir())
1696
- if mount_builddir:
1697
- mount_bind(self.buildroot)
1698
- if mount:
1699
- for m in mount:
1700
- mount_bind(m)
1701
- if mount_dev:
1702
- mount_bind("/dev")
1703
- if mount_proc:
1704
- mount_bind("/proc")
1705
- os.chroot(overlayrootdir)
1706
- os.chdir(self.getcwd())
1707
-
1708
- def unshare_chroot_catch():
1709
- try:
1710
- unshare_chroot(overlayrootdir)
1711
- except Exception as e:
1712
- log.exception(e)
1713
- raise e
1946
+ if mount_etc:
1947
+ bind.append("/etc/group")
1948
+ bind.append("/etc/hostname")
1949
+ bind.append("/etc/hosts")
1950
+ bind.append("/etc/passwd")
1951
+ bind.append("/etc/resolv.conf")
1952
+
1953
+ if mount_home:
1954
+ bind.append("/home")
1955
+
1956
+ if mount_joltdir and self._task:
1957
+ from jolt.loader import get_workspacedir
1958
+ bind.append(get_workspacedir())
1959
+
1960
+ if mount_cachedir:
1961
+ bind.append(config.get_cachedir())
1962
+
1963
+ if mount_builddir:
1964
+ bind.append(self.buildroot)
1965
+
1966
+ if mount:
1967
+ for m in mount:
1968
+ bind.append(m)
1969
+
1970
+ if mount_dev:
1971
+ bind.append("/dev")
1972
+
1973
+ if mount_proc:
1974
+ bind.append("/proc")
1975
+
1976
+ unshare = os.path.join(os.path.dirname(__file__), "chroot.py")
1714
1977
 
1715
1978
  old_chroot = self._chroot
1716
- old_preexec_fn = self._preexec_fn
1979
+ old_chroot_path = self._chroot_path
1980
+ old_chroot_prefix = self._chroot_prefix
1717
1981
  self._chroot = chroot
1718
- self._preexec_fn = unshare_chroot_catch
1982
+
1983
+ if path:
1984
+ self._chroot_path = path
1985
+ else:
1986
+ self._chroot_path = self._env.get("PATH")
1987
+ self._chroot_path = self._chroot_path.split(fs.pathsep) if self._chroot_path else []
1988
+
1719
1989
  try:
1720
- yield self._chroot
1990
+ with self.tmpdir("chroot") as bindroot:
1991
+ self._chroot_prefix = [
1992
+ sys.executable,
1993
+ unshare,
1994
+ "-b",
1995
+ ] + bind + [
1996
+ "-c",
1997
+ chroot,
1998
+ "-t",
1999
+ bindroot,
2000
+ "--shell={shell}",
2001
+ "--",
2002
+ ]
2003
+ yield
1721
2004
  finally:
1722
2005
  self._chroot = old_chroot
1723
- self._preexec_fn = old_preexec_fn
2006
+ self._chroot_path = old_chroot_path
2007
+ self._chroot_prefix = old_chroot_prefix
1724
2008
 
1725
2009
  def _unshare(self, uidmap, gidmap):
1726
2010
  from ctypes import CDLL
@@ -1735,6 +2019,7 @@ class Tools(object):
1735
2019
  raise_task_error_if(
1736
2020
  not newuidmap, self._task,
1737
2021
  "No usable 'newuidmap' found in PATH")
2022
+
1738
2023
  newgidmap = self.which("newgidmap")
1739
2024
  raise_task_error_if(
1740
2025
  not newgidmap, self._task,
@@ -1809,6 +2094,12 @@ class Tools(object):
1809
2094
  raise_task_error_if(
1810
2095
  not uidmap, self._task,
1811
2096
  "Invalid uid map: {}", uidmap)
2097
+ raise_task_error_if(
2098
+ not self.which("newuidmap"), self._task,
2099
+ "No usable 'newuidmap' found in PATH")
2100
+ raise_task_error_if(
2101
+ not self.which("newgidmap"), self._task,
2102
+ "No usable 'newgidmap' found in PATH")
1812
2103
 
1813
2104
  msgq = multiprocessing.JoinableQueue()
1814
2105
  pid = os.fork()
@@ -1840,6 +2131,15 @@ class Tools(object):
1840
2131
  """
1841
2132
  Uploads a file using HTTP (PUT).
1842
2133
 
2134
+ Automatically expands any {keyword} arguments in the URL and pathname.
2135
+
2136
+ Basic authentication is supported by including the credentials in the URL.
2137
+ Environment variables can be used to hide sensitive information. Specify
2138
+ the environment variable name in the URI as e.g.
2139
+ ``http://{environ[USER]}:{environ[PASS]}@host``.
2140
+ Alternatively, the auth parameter can be used to provide an authentication
2141
+ object that is passed to the requests.get() function.
2142
+
1843
2143
  Throws a JoltError exception on failure.
1844
2144
 
1845
2145
  Args:
@@ -1852,18 +2152,38 @@ class Tools(object):
1852
2152
 
1853
2153
  """
1854
2154
  pathname = self.expand_path(pathname)
2155
+ url = self.expand(url)
1855
2156
  name = fs.path.basename(pathname)
1856
2157
  size = self.file_size(pathname)
1857
- with log.progress("Uploading " + name, size, "B") as pbar, open(pathname, 'rb') as fileobj:
2158
+
2159
+ url_parsed = urlparse(url)
2160
+ raise_task_error_if(
2161
+ not url_parsed.scheme or not url_parsed.netloc,
2162
+ self._task,
2163
+ "Invalid URL: '{}'", url)
2164
+
2165
+ if auth is None and url_parsed.username and url_parsed.password:
2166
+ auth = HTTPBasicAuth(url_parsed.username, url_parsed.password)
2167
+
2168
+ # Redact password from URL if present
2169
+ if url_parsed.password:
2170
+ url_parsed = url_parsed._replace(netloc=url_parsed.netloc.replace(url_parsed.password, "****"))
2171
+
2172
+ url_cleaned = urlunparse(url_parsed)
2173
+
2174
+ with log.progress("Uploading " + utils.shorten(name), size, "B") as pbar, \
2175
+ open(pathname, 'rb') as fileobj:
2176
+ log.verbose("{} -> {}", pathname, url_cleaned)
2177
+
1858
2178
  def read():
1859
2179
  data = fileobj.read(4096)
1860
2180
  pbar.update(len(data))
1861
2181
  return data
1862
- from requests.api import put
1863
- response = put(url, data=iter(read, b''), auth=auth, **kwargs)
2182
+
2183
+ response = http_session.put(url, data=iter(read, b''), auth=auth, **kwargs)
1864
2184
  raise_error_if(
1865
2185
  exceptions and response.status_code not in [201, 204],
1866
- f"Upload to '{url}' failed with status '{response.status_code}'")
2186
+ f"Upload to '{url_cleaned}' failed with status '{response.status_code}'")
1867
2187
  return response.status_code in [201, 204]
1868
2188
 
1869
2189
  def read_file(self, pathname, binary=False):
@@ -1912,6 +2232,20 @@ class Tools(object):
1912
2232
 
1913
2233
  return deps
1914
2234
 
2235
+ def read_json(self, pathname):
2236
+ """
2237
+ Reads a JSON file.
2238
+
2239
+ Args:
2240
+ pathname (str): Name/path of file to be read.
2241
+
2242
+ Returns:
2243
+ dict: Dictionary of JSON data.
2244
+ """
2245
+ pathname = self.expand_path(pathname)
2246
+ with open(pathname) as f:
2247
+ return json.load(f)
2248
+
1915
2249
  def which(self, executable):
1916
2250
  """ Find executable in PATH.
1917
2251
 
@@ -1923,9 +2257,12 @@ class Tools(object):
1923
2257
  """
1924
2258
  executable = self.expand(executable)
1925
2259
  path = self._env.get("PATH")
2260
+
1926
2261
  if self._chroot:
1927
- path = fs.pathsep.join(
1928
- [self._chroot + p for p in path.split(fs.pathsep)]) + fs.pathsep + path
2262
+ path = path.split(fs.pathsep) if path else []
2263
+ path = [os.path.join(self._chroot, p.lstrip(fs.sep)) for p in self._chroot_path] + path
2264
+ path = fs.pathsep.join(path)
2265
+
1929
2266
  result = shutil.which(executable, path=path)
1930
2267
  if result and self._chroot and result.startswith(self._chroot):
1931
2268
  result = result[len(self._chroot):]
@@ -1952,3 +2289,22 @@ class Tools(object):
1952
2289
  content = self.expand(content, **kwargs)
1953
2290
  with open(pathname, "wb") as f:
1954
2291
  f.write(content.encode())
2292
+
2293
+ def write_json(self, pathname, data, indent=4, sort_keys=True):
2294
+ """
2295
+ Writes a JSON file.
2296
+
2297
+ Args:
2298
+ pathname (str): Name/path of file to be written.
2299
+ data (dict): Dictionary of JSON data.
2300
+ indent (int): Indentation level for JSON output.
2301
+ """
2302
+ pathname = self.expand_path(pathname)
2303
+ with open(pathname, "w") as f:
2304
+ json.dump(data, f, indent=indent, sort_keys=sort_keys)
2305
+
2306
+ @property
2307
+ def wsroot(self):
2308
+ """ Return the root path of all build directories """
2309
+ from jolt.loader import get_workspacedir
2310
+ return fs.path.normpath(get_workspacedir())