jolt 0.9.76__py3-none-any.whl → 0.9.429__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (201) hide show
  1. jolt/__init__.py +88 -7
  2. jolt/__main__.py +9 -1
  3. jolt/bin/fstree-darwin-x86_64 +0 -0
  4. jolt/bin/fstree-linux-x86_64 +0 -0
  5. jolt/cache.py +839 -367
  6. jolt/chroot.py +156 -0
  7. jolt/cli.py +362 -143
  8. jolt/common_pb2.py +63 -0
  9. jolt/common_pb2_grpc.py +4 -0
  10. jolt/config.py +99 -42
  11. jolt/error.py +19 -4
  12. jolt/expires.py +2 -2
  13. jolt/filesystem.py +8 -6
  14. jolt/graph.py +705 -117
  15. jolt/hooks.py +63 -1
  16. jolt/influence.py +129 -6
  17. jolt/loader.py +369 -121
  18. jolt/log.py +225 -63
  19. jolt/manifest.py +28 -38
  20. jolt/options.py +35 -10
  21. jolt/pkgs/abseil.py +42 -0
  22. jolt/pkgs/asio.py +25 -0
  23. jolt/pkgs/autoconf.py +41 -0
  24. jolt/pkgs/automake.py +41 -0
  25. jolt/pkgs/b2.py +31 -0
  26. jolt/pkgs/boost.py +111 -0
  27. jolt/pkgs/boringssl.py +32 -0
  28. jolt/pkgs/busybox.py +39 -0
  29. jolt/pkgs/bzip2.py +43 -0
  30. jolt/pkgs/cares.py +29 -0
  31. jolt/pkgs/catch2.py +36 -0
  32. jolt/pkgs/cbindgen.py +17 -0
  33. jolt/pkgs/cista.py +19 -0
  34. jolt/pkgs/clang.py +44 -0
  35. jolt/pkgs/cli11.py +23 -0
  36. jolt/pkgs/cmake.py +48 -0
  37. jolt/pkgs/cpython.py +196 -0
  38. jolt/pkgs/crun.py +29 -0
  39. jolt/pkgs/curl.py +38 -0
  40. jolt/pkgs/dbus.py +18 -0
  41. jolt/pkgs/double_conversion.py +24 -0
  42. jolt/pkgs/fastfloat.py +21 -0
  43. jolt/pkgs/ffmpeg.py +28 -0
  44. jolt/pkgs/flatbuffers.py +29 -0
  45. jolt/pkgs/fmt.py +27 -0
  46. jolt/pkgs/fstree.py +20 -0
  47. jolt/pkgs/gflags.py +18 -0
  48. jolt/pkgs/glib.py +18 -0
  49. jolt/pkgs/glog.py +25 -0
  50. jolt/pkgs/glslang.py +21 -0
  51. jolt/pkgs/golang.py +16 -11
  52. jolt/pkgs/googlebenchmark.py +18 -0
  53. jolt/pkgs/googletest.py +46 -0
  54. jolt/pkgs/gperf.py +15 -0
  55. jolt/pkgs/grpc.py +73 -0
  56. jolt/pkgs/hdf5.py +19 -0
  57. jolt/pkgs/help2man.py +14 -0
  58. jolt/pkgs/inja.py +28 -0
  59. jolt/pkgs/jsoncpp.py +31 -0
  60. jolt/pkgs/libarchive.py +43 -0
  61. jolt/pkgs/libcap.py +44 -0
  62. jolt/pkgs/libdrm.py +44 -0
  63. jolt/pkgs/libedit.py +42 -0
  64. jolt/pkgs/libevent.py +31 -0
  65. jolt/pkgs/libexpat.py +27 -0
  66. jolt/pkgs/libfastjson.py +21 -0
  67. jolt/pkgs/libffi.py +16 -0
  68. jolt/pkgs/libglvnd.py +30 -0
  69. jolt/pkgs/libogg.py +28 -0
  70. jolt/pkgs/libpciaccess.py +18 -0
  71. jolt/pkgs/libseccomp.py +21 -0
  72. jolt/pkgs/libtirpc.py +24 -0
  73. jolt/pkgs/libtool.py +42 -0
  74. jolt/pkgs/libunwind.py +35 -0
  75. jolt/pkgs/libva.py +18 -0
  76. jolt/pkgs/libvorbis.py +33 -0
  77. jolt/pkgs/libxml2.py +35 -0
  78. jolt/pkgs/libxslt.py +17 -0
  79. jolt/pkgs/libyajl.py +16 -0
  80. jolt/pkgs/llvm.py +81 -0
  81. jolt/pkgs/lua.py +54 -0
  82. jolt/pkgs/lz4.py +26 -0
  83. jolt/pkgs/m4.py +14 -0
  84. jolt/pkgs/make.py +17 -0
  85. jolt/pkgs/mesa.py +81 -0
  86. jolt/pkgs/meson.py +17 -0
  87. jolt/pkgs/mstch.py +28 -0
  88. jolt/pkgs/mysql.py +60 -0
  89. jolt/pkgs/nasm.py +49 -0
  90. jolt/pkgs/ncurses.py +30 -0
  91. jolt/pkgs/ng_log.py +25 -0
  92. jolt/pkgs/ninja.py +45 -0
  93. jolt/pkgs/nlohmann_json.py +25 -0
  94. jolt/pkgs/nodejs.py +19 -11
  95. jolt/pkgs/opencv.py +24 -0
  96. jolt/pkgs/openjdk.py +26 -0
  97. jolt/pkgs/openssl.py +103 -0
  98. jolt/pkgs/paho.py +76 -0
  99. jolt/pkgs/patchelf.py +16 -0
  100. jolt/pkgs/perl.py +42 -0
  101. jolt/pkgs/pkgconfig.py +64 -0
  102. jolt/pkgs/poco.py +39 -0
  103. jolt/pkgs/protobuf.py +77 -0
  104. jolt/pkgs/pugixml.py +27 -0
  105. jolt/pkgs/python.py +19 -0
  106. jolt/pkgs/qt.py +35 -0
  107. jolt/pkgs/rapidjson.py +26 -0
  108. jolt/pkgs/rapidyaml.py +28 -0
  109. jolt/pkgs/re2.py +30 -0
  110. jolt/pkgs/re2c.py +17 -0
  111. jolt/pkgs/readline.py +15 -0
  112. jolt/pkgs/rust.py +41 -0
  113. jolt/pkgs/sdl.py +28 -0
  114. jolt/pkgs/simdjson.py +27 -0
  115. jolt/pkgs/soci.py +46 -0
  116. jolt/pkgs/spdlog.py +29 -0
  117. jolt/pkgs/spirv_llvm.py +21 -0
  118. jolt/pkgs/spirv_tools.py +24 -0
  119. jolt/pkgs/sqlite.py +83 -0
  120. jolt/pkgs/ssl.py +12 -0
  121. jolt/pkgs/texinfo.py +15 -0
  122. jolt/pkgs/tomlplusplus.py +22 -0
  123. jolt/pkgs/wayland.py +26 -0
  124. jolt/pkgs/x11.py +58 -0
  125. jolt/pkgs/xerces_c.py +20 -0
  126. jolt/pkgs/xorg.py +360 -0
  127. jolt/pkgs/xz.py +29 -0
  128. jolt/pkgs/yamlcpp.py +30 -0
  129. jolt/pkgs/zeromq.py +47 -0
  130. jolt/pkgs/zlib.py +69 -0
  131. jolt/pkgs/zstd.py +33 -0
  132. jolt/plugins/alias.py +3 -0
  133. jolt/plugins/allure.py +5 -2
  134. jolt/plugins/autotools.py +66 -0
  135. jolt/plugins/cache.py +133 -0
  136. jolt/plugins/cmake.py +74 -6
  137. jolt/plugins/conan.py +238 -0
  138. jolt/plugins/cxx.py +698 -0
  139. jolt/plugins/cxxinfo.py +7 -0
  140. jolt/plugins/dashboard.py +1 -1
  141. jolt/plugins/docker.py +91 -23
  142. jolt/plugins/email.py +5 -2
  143. jolt/plugins/email.xslt +144 -101
  144. jolt/plugins/environ.py +11 -0
  145. jolt/plugins/fetch.py +141 -0
  146. jolt/plugins/gdb.py +44 -21
  147. jolt/plugins/gerrit.py +1 -14
  148. jolt/plugins/git.py +316 -101
  149. jolt/plugins/googletest.py +522 -1
  150. jolt/plugins/http.py +36 -38
  151. jolt/plugins/libtool.py +63 -0
  152. jolt/plugins/linux.py +990 -0
  153. jolt/plugins/logstash.py +4 -4
  154. jolt/plugins/meson.py +61 -0
  155. jolt/plugins/ninja-compdb.py +107 -31
  156. jolt/plugins/ninja.py +929 -134
  157. jolt/plugins/paths.py +11 -1
  158. jolt/plugins/pkgconfig.py +219 -0
  159. jolt/plugins/podman.py +148 -91
  160. jolt/plugins/python.py +137 -0
  161. jolt/plugins/remote_execution/__init__.py +0 -0
  162. jolt/plugins/remote_execution/administration_pb2.py +46 -0
  163. jolt/plugins/remote_execution/administration_pb2_grpc.py +170 -0
  164. jolt/plugins/remote_execution/log_pb2.py +32 -0
  165. jolt/plugins/remote_execution/log_pb2_grpc.py +68 -0
  166. jolt/plugins/remote_execution/scheduler_pb2.py +41 -0
  167. jolt/plugins/remote_execution/scheduler_pb2_grpc.py +141 -0
  168. jolt/plugins/remote_execution/worker_pb2.py +38 -0
  169. jolt/plugins/remote_execution/worker_pb2_grpc.py +112 -0
  170. jolt/plugins/report.py +12 -2
  171. jolt/plugins/rust.py +25 -0
  172. jolt/plugins/scheduler.py +710 -0
  173. jolt/plugins/selfdeploy/setup.py +9 -4
  174. jolt/plugins/selfdeploy.py +138 -88
  175. jolt/plugins/strings.py +35 -22
  176. jolt/plugins/symlinks.py +26 -11
  177. jolt/plugins/telemetry.py +5 -2
  178. jolt/plugins/timeline.py +13 -3
  179. jolt/plugins/volume.py +46 -48
  180. jolt/scheduler.py +591 -191
  181. jolt/tasks.py +1783 -245
  182. jolt/templates/export.sh.template +12 -6
  183. jolt/templates/timeline.html.template +44 -47
  184. jolt/timer.py +22 -0
  185. jolt/tools.py +749 -302
  186. jolt/utils.py +245 -18
  187. jolt/version.py +1 -1
  188. jolt/version_utils.py +2 -2
  189. jolt/xmldom.py +12 -2
  190. {jolt-0.9.76.dist-info → jolt-0.9.429.dist-info}/METADATA +98 -38
  191. jolt-0.9.429.dist-info/RECORD +207 -0
  192. {jolt-0.9.76.dist-info → jolt-0.9.429.dist-info}/WHEEL +1 -1
  193. jolt/plugins/amqp.py +0 -834
  194. jolt/plugins/debian.py +0 -338
  195. jolt/plugins/ftp.py +0 -181
  196. jolt/plugins/ninja-cache.py +0 -64
  197. jolt/plugins/ninjacli.py +0 -271
  198. jolt/plugins/repo.py +0 -253
  199. jolt-0.9.76.dist-info/RECORD +0 -79
  200. {jolt-0.9.76.dist-info → jolt-0.9.429.dist-info}/entry_points.txt +0 -0
  201. {jolt-0.9.76.dist-info → jolt-0.9.429.dist-info}/top_level.txt +0 -0
jolt/tools.py CHANGED
@@ -1,27 +1,36 @@
1
+ import py7zr
1
2
  import bz2
2
3
  import copy
3
4
  import getpass
4
5
  import gzip
6
+ import json
5
7
  import lzma
6
8
  import subprocess
7
9
  import os
8
10
  import platform
9
11
  import sys
10
12
  import threading
13
+ import time
11
14
  if os.name != "nt":
12
15
  import termios
13
16
  import glob
14
17
  import multiprocessing
18
+ import re
15
19
  import shutil
16
20
  import tarfile
17
21
  import zipfile
18
22
  import bz2file
19
23
  import hashlib
24
+ import zstandard
20
25
  from contextlib import contextmanager
21
26
  from psutil import NoSuchProcess, Process
22
-
23
- from jinja2 import Environment, FileSystemLoader, select_autoescape
27
+ from jinja2 import Environment, FileSystemLoader
28
+ from jinja2.exceptions import TemplateError
24
29
  from jinja2.runtime import Context
30
+ from jinja2.utils import missing
31
+ from requests import Session
32
+ from requests.auth import HTTPBasicAuth
33
+ from urllib.parse import urlparse, urlunparse
25
34
 
26
35
 
27
36
  from jolt import cache
@@ -29,11 +38,17 @@ from jolt import filesystem as fs
29
38
  from jolt import log
30
39
  from jolt import utils
31
40
  from jolt import config
32
- from jolt.error import JoltCommandError
41
+ from jolt.error import JoltCommandError, JoltTimeoutError
33
42
  from jolt.error import raise_error_if
34
43
  from jolt.error import raise_task_error, raise_task_error_if
35
44
 
36
45
 
46
+ SUPPORTED_ARCHIVE_TYPES = [".tar", ".tar.bz2", ".tar.gz", ".tgz", ".tar.xz", ".tar.zst", ".zip"]
47
+
48
+
49
+ http_session = Session()
50
+
51
+
37
52
  def stdout_write(line):
38
53
  sys.stdout.write(line + "\n")
39
54
  sys.stdout.flush()
@@ -44,65 +59,54 @@ def stderr_write(line):
44
59
  sys.stderr.flush()
45
60
 
46
61
 
47
- def _run(cmd, cwd, env, preexec_fn, *args, **kwargs):
62
+ class Reader(threading.Thread):
63
+ def __init__(self, parent, stream, output=None, logbuf=None, output_rstrip=True):
64
+ super(Reader, self).__init__()
65
+ self.output = output
66
+ self.output_rstrip = output_rstrip
67
+ self.parent = parent
68
+ self.stream = stream
69
+ self.logbuf = logbuf if logbuf is not None else []
70
+ self.start()
71
+
72
+ def run(self):
73
+ line = ""
74
+ try:
75
+ with log.map_thread(self, self.parent):
76
+ for line in iter(self.stream.readline, b''):
77
+ if self.output_rstrip:
78
+ line = line.rstrip()
79
+ line = line.decode(errors='ignore')
80
+ if self.output:
81
+ self.output(line)
82
+ self.logbuf.append((self, line))
83
+ except Exception as e:
84
+ if self.output:
85
+ self.output("{0}", str(e))
86
+ self.output(line)
87
+ self.logbuf.append((self, line))
88
+
89
+
90
+ def _run(cmd, cwd, env, *args, **kwargs):
48
91
  output = kwargs.get("output")
49
92
  output_on_error = kwargs.get("output_on_error")
50
93
  output_rstrip = kwargs.get("output_rstrip", True)
51
94
  output_stdio = kwargs.get("output_stdio", False)
95
+ output_stderr = kwargs.get("output_stderr", True)
96
+ output_stdout = kwargs.get("output_stdout", True)
97
+ return_stderr = kwargs.get("return_stderr", False)
52
98
  output = output if output is not None else True
53
99
  output = False if output_on_error else output
54
100
  shell = kwargs.get("shell", True)
55
- timeout = kwargs.get("timeout", None)
101
+ timeout = kwargs.get("timeout", config.getint("jolt", "command_timeout", 0))
102
+ timeout = timeout if type(timeout) is int and timeout > 0 else None
56
103
 
57
104
  log.debug("Running: '{0}' (CWD: {1})", cmd, cwd)
58
105
 
59
- p = subprocess.Popen(
60
- cmd,
61
- stdin=subprocess.PIPE,
62
- stdout=subprocess.PIPE,
63
- stderr=subprocess.PIPE,
64
- shell=shell,
65
- cwd=cwd,
66
- env=env,
67
- preexec_fn=preexec_fn,
68
- )
69
-
70
- class Reader(threading.Thread):
71
- def __init__(self, parent, stream, output=None, logbuf=None):
72
- super(Reader, self).__init__()
73
- self.output = output
74
- self.parent = parent
75
- self.stream = stream
76
- self.logbuf = logbuf if logbuf is not None else []
77
- self.start()
78
-
79
- def run(self):
80
- line = ""
81
- try:
82
- with log.map_thread(self, self.parent):
83
- for line in iter(self.stream.readline, b''):
84
- if output_rstrip:
85
- line = line.rstrip()
86
- line = line.decode(errors='ignore')
87
- if self.output:
88
- self.output(line)
89
- self.logbuf.append((self, line))
90
- except Exception as e:
91
- if self.output:
92
- self.output("{0}", str(e))
93
- self.output(line)
94
- self.logbuf.append((self, line))
95
-
96
- stdout_func = log.stdout if not output_stdio else stdout_write
97
- stderr_func = log.stderr if not output_stdio else stderr_write
98
-
99
- logbuf = []
100
- stdout = Reader(
101
- threading.current_thread(), p.stdout,
102
- output=stdout_func if output else None, logbuf=logbuf)
103
- stderr = Reader(
104
- threading.current_thread(), p.stderr,
105
- output=stderr_func if output else None, logbuf=logbuf)
106
+ p = None
107
+ stdout = None
108
+ stderr = None
109
+ timedout = False
106
110
 
107
111
  def terminate(pid):
108
112
  try:
@@ -122,23 +126,73 @@ def _run(cmd, cwd, env, preexec_fn, *args, **kwargs):
122
126
  except NoSuchProcess:
123
127
  pass
124
128
 
125
- timedout = False
126
129
  try:
130
+ with utils.delayed_interrupt():
131
+ p = subprocess.Popen(
132
+ cmd,
133
+ stdin=subprocess.PIPE,
134
+ stdout=subprocess.PIPE,
135
+ stderr=subprocess.PIPE,
136
+ shell=shell,
137
+ cwd=cwd,
138
+ env=env,
139
+ )
140
+
141
+ if output_stdout:
142
+ stdout_func = log.stdout if not output_stdio else stdout_write
143
+ else:
144
+ stdout_func = None
145
+
146
+ if output_stderr:
147
+ stderr_func = log.stderr if not output_stdio else stderr_write
148
+ else:
149
+ stderr_func = None
150
+
151
+ logbuf = []
152
+ stdout = Reader(
153
+ threading.current_thread(),
154
+ p.stdout,
155
+ output=stdout_func if output else None,
156
+ logbuf=logbuf,
157
+ output_rstrip=output_rstrip)
158
+ stderr = Reader(
159
+ threading.current_thread(),
160
+ p.stderr,
161
+ output=stderr_func if output else None,
162
+ logbuf=logbuf,
163
+ output_rstrip=output_rstrip)
164
+
127
165
  p.wait(timeout=timeout)
128
- except subprocess.TimeoutExpired:
166
+
167
+ except KeyboardInterrupt:
168
+ if not p:
169
+ raise
170
+ try:
171
+ terminate(p.pid)
172
+ p.wait(10)
173
+ except subprocess.TimeoutExpired:
174
+ kill(p.pid)
175
+ utils.call_and_catch(p.wait, 10)
176
+ raise
177
+
178
+ except (subprocess.TimeoutExpired, JoltTimeoutError):
129
179
  timedout = True
130
180
  try:
131
181
  terminate(p.pid)
132
182
  p.wait(10)
133
183
  except subprocess.TimeoutExpired:
134
184
  kill(p.pid)
135
- p.wait()
185
+ utils.call_and_catch(p.wait, 10)
186
+
136
187
  finally:
137
- stdout.join()
138
- stderr.join()
139
- p.stdin.close()
140
- p.stdout.close()
141
- p.stderr.close()
188
+ if stdout:
189
+ stdout.join()
190
+ if stderr:
191
+ stderr.join()
192
+ if p:
193
+ p.stdin.close()
194
+ p.stdout.close()
195
+ p.stderr.close()
142
196
 
143
197
  if p.returncode != 0 and output_on_error:
144
198
  for reader, line in logbuf:
@@ -157,11 +211,16 @@ def _run(cmd, cwd, env, preexec_fn, *args, **kwargs):
157
211
 
158
212
  if p.returncode != 0:
159
213
  stderrbuf = [line for reader, line in logbuf if reader is stderr]
160
- raise JoltCommandError(
161
- "Command {0}: {1}".format(
162
- "timeout" if timedout else "failed",
163
- " ".join(cmd) if type(cmd) == list else cmd.format(*args, **kwargs)),
164
- stdoutbuf, stderrbuf, p.returncode)
214
+ if timedout:
215
+ raise JoltTimeoutError(
216
+ "Command timeout: " + (" ".join(cmd) if type(cmd) is list else cmd))
217
+ else:
218
+ raise JoltCommandError(
219
+ "Command failed: " + (" ".join(cmd) if type(cmd) is list else cmd),
220
+ stdoutbuf, stderrbuf, p.returncode)
221
+ if return_stderr:
222
+ return "\n".join(stdoutbuf) if output_rstrip else "".join(stdoutbuf), \
223
+ "\n".join(stderrbuf) if output_rstrip else "".join(stderrbuf)
165
224
  return "\n".join(stdoutbuf) if output_rstrip else "".join(stdoutbuf)
166
225
 
167
226
 
@@ -195,36 +254,6 @@ class _String(object):
195
254
  return self._str.startswith(substr)
196
255
 
197
256
 
198
- class _tmpdir(object):
199
- def __init__(self, name, cwd=None):
200
- self._name = name
201
- self._path = None
202
- self._cwd = cwd or os.getcwd()
203
-
204
- def __enter__(self):
205
- try:
206
- dirname = self._cwd
207
- fs.makedirs(fs.path.join(dirname, fs.path.dirname(self._name)))
208
- self._path = fs.mkdtemp(prefix=self._name + "-", dir=dirname)
209
- except KeyboardInterrupt as e:
210
- raise e
211
- except Exception as e:
212
- raise e
213
- raise_error_if(not self._path, "failed to create temporary directory")
214
- return self
215
-
216
- def __exit__(self, type, value, tb):
217
- if self._path:
218
- fs.rmtree(self._path, ignore_errors=True)
219
-
220
- @property
221
- def path(self):
222
- return self.get_path()
223
-
224
- def get_path(self):
225
- return self._path
226
-
227
-
228
257
  class _CMake(object):
229
258
  def __init__(self, deps, tools, incremental=False):
230
259
  self.deps = deps
@@ -232,6 +261,10 @@ class _CMake(object):
232
261
  self.builddir = self.tools.builddir(incremental=incremental)
233
262
  self.installdir = self.tools.builddir("install", incremental=False)
234
263
 
264
+ def clean(self):
265
+ self.tools.rmtree(self.builddir, ignore_errors=True)
266
+ self.tools.rmtree(self.installdir, ignore_errors=True)
267
+
235
268
  def configure(self, sourcedir, *args, generator=None, **kwargs):
236
269
  sourcedir = self.tools.expand_path(sourcedir)
237
270
 
@@ -242,81 +275,85 @@ class _CMake(object):
242
275
 
243
276
  with self.tools.cwd(self.builddir):
244
277
  self.tools.run(
245
- "cmake {0} -B {1} -DCMAKE_INSTALL_PREFIX={2} {3} {4}",
278
+ "cmake {0} {1} -DCMAKE_INSTALL_PREFIX=/jolt-prefix {1} {2} {3}",
246
279
  sourcedir,
247
- self.builddir,
248
- self.installdir,
280
+ utils.option("-B", self.builddir),
249
281
  utils.option("-G", generator),
250
282
  extra_args,
251
283
  output=True)
252
284
 
253
- def build(self, release=True, *args, **kwargs):
254
- threading_args = ''
255
- try:
256
- threading_args = ' -j {}'.format(kwargs.get("threads", self.tools.thread_count())) \
257
- if "--parallel" in self.tools.run("cmake --help-manual cmake 2>&1", output=False) \
258
- else ''
259
- except Exception:
260
- pass
261
-
285
+ def build(self, *args, config="Release", **kwargs):
286
+ threading_args = ' -j {}'.format(kwargs.get("threads", self.tools.thread_count()))
262
287
  with self.tools.cwd(self.builddir):
263
- release = "--config Release" if release else ""
264
- self.tools.run("cmake --build . {0}{1}", release, threading_args, output=True)
288
+ self.tools.run("cmake --build . --config {0} {1}", config, threading_args, output=True)
265
289
 
266
- def install(self, release=True, *args, **kwargs):
267
- with self.tools.cwd(self.builddir):
268
- release = "--config Release" if release else ""
269
- self.tools.run("cmake --build . --target install {0}", release, output=True)
290
+ def install(self, target="install", config="Release", **kwargs):
291
+ with self.tools.cwd(self.builddir), self.tools.environ(DESTDIR=self.installdir):
292
+ self.tools.run("cmake --build . --config {0} --target {1}", config, target, output=True)
270
293
 
271
- def publish(self, artifact, files='*', *args, **kwargs):
272
- with self.tools.cwd(self.installdir):
273
- artifact.collect(files, *args, **kwargs)
294
+ def publish(self, artifact, files='*', symlinks=True, *args, **kwargs):
295
+ with self.tools.cwd(self.installdir, "jolt-prefix"):
296
+ artifact.collect(files, *args, symlinks=symlinks, **kwargs)
297
+ artifact.strings.install_prefix = "/jolt-prefix"
274
298
 
275
299
 
276
300
  class _Meson(object):
277
- def __init__(self, deps, tools):
301
+ def __init__(self, deps, tools, incremental=False):
278
302
  self.deps = deps
279
303
  self.tools = tools
280
- self.builddir = self.tools.builddir()
281
- self.installdir = self.tools.builddir("install")
304
+ self.builddir = self.tools.builddir(incremental=incremental)
305
+ self.installdir = self.tools.builddir("install", incremental=False)
306
+ self.prefix = "/jolt-prefix" if os.name != "nt" else "C:\\jolt-prefix"
307
+
308
+ def clean(self):
309
+ self.tools.rmtree(self.builddir, ignore_errors=True)
310
+ self.tools.rmtree(self.installdir, ignore_errors=True)
282
311
 
283
312
  def configure(self, sourcedir, *args, **kwargs):
284
313
  sourcedir = self.tools.expand_path(sourcedir)
285
- self.tools.run("meson --prefix=/ {0} {1}", sourcedir, self.builddir,
314
+ options = " ".join([f"-D{arg}" for arg in args]) + " "
315
+ options += " ".join(["-D{0}={1}".format(key, self.tools.expand(val)) for key, val in kwargs.items()])
316
+ self.tools.run("meson setup --prefix={0} {1} {2} {3}", self.prefix, sourcedir, self.builddir, options,
286
317
  output=True)
287
318
 
288
319
  def build(self, *args, **kwargs):
289
320
  self.tools.run("ninja -C {0} ", self.builddir, output=True)
290
321
 
291
322
  def install(self, *args, **kwargs):
292
- self.tools.run("DESTDIR={0} ninja -C {1} install",
293
- self.installdir, self.builddir,
294
- output=True)
323
+ with self.tools.environ(DESTDIR=self.installdir):
324
+ self.tools.run("ninja -C {0} install", self.builddir, output=True)
295
325
 
296
- def publish(self, artifact, files='*', *args, **kwargs):
297
- with self.tools.cwd(self.installdir):
298
- artifact.collect(files, *args, **kwargs)
326
+ def publish(self, artifact, files='*', symlinks=True, *args, **kwargs):
327
+ with self.tools.cwd(self.installdir, "jolt-prefix"):
328
+ artifact.collect(files, *args, symlinks=symlinks, **kwargs)
329
+ artifact.strings.install_prefix = self.prefix
299
330
 
300
331
 
301
332
  class _AutoTools(object):
302
- def __init__(self, deps, tools):
333
+ def __init__(self, deps, tools, incremental=False):
303
334
  self.deps = deps
304
335
  self.tools = tools
305
- self.builddir = self.tools.builddir()
306
- self.installdir = self.tools.builddir("install")
336
+ self.builddir = self.tools.builddir(incremental=incremental)
337
+ self.installdir = self.tools.builddir("install", incremental=False)
338
+ self.prefix = "jolt-prefix"
307
339
 
308
- def configure(self, sourcedir, *args, **kwargs):
340
+ def clean(self):
341
+ self.tools.rmtree(self.builddir, ignore_errors=True)
342
+ self.tools.rmtree(self.installdir, ignore_errors=True)
343
+
344
+ def configure(self, sourcedir, *args):
309
345
  sourcedir = self.tools.expand_path(sourcedir)
310
- prefix = kwargs.get("prefix", "/")
311
346
 
312
347
  if not fs.path.exists(fs.path.join(sourcedir, "configure")):
313
348
  with self.tools.cwd(sourcedir):
314
349
  self.tools.run("autoreconf -visf", output=True)
315
350
 
316
351
  with self.tools.cwd(self.builddir), self.tools.environ(DESTDIR=self.installdir):
317
- self.tools.run("{0}/configure --prefix={1} {2}",
318
- sourcedir, prefix,
319
- self.tools.getenv("CONFIGURE_FLAGS"),
352
+ self.tools.run("{0}/configure --prefix=/{1} {2} {3}",
353
+ sourcedir,
354
+ self.prefix,
355
+ self.tools.getenv("CONFIGURE_FLAGS", ""),
356
+ " ".join(args),
320
357
  output=True)
321
358
 
322
359
  def build(self, *args, **kwargs):
@@ -324,13 +361,14 @@ class _AutoTools(object):
324
361
  self.tools.run("make VERBOSE=yes Q= V=1 -j{0}",
325
362
  self.tools.cpu_count(), output=True)
326
363
 
327
- def install(self, target="install", **kwargs):
328
- with self.tools.cwd(self.builddir), self.tools.environ(DESTDIR=self.installdir):
329
- self.tools.run("make {}", target, output=True)
364
+ def install(self, target="install"):
365
+ with self.tools.cwd(self.builddir):
366
+ self.tools.run("make DESTDIR={} {}", self.installdir, target, output=True)
330
367
 
331
- def publish(self, artifact, files='*', *args, **kwargs):
332
- with self.tools.cwd(self.installdir):
333
- artifact.collect(files, *args, **kwargs)
368
+ def publish(self, artifact, files='*', symlinks=True, *args, **kwargs):
369
+ with self.tools.cwd(self.installdir, self.prefix):
370
+ artifact.collect(files, *args, symlinks=symlinks, **kwargs)
371
+ artifact.strings.install_prefix = "/" + self.prefix
334
372
 
335
373
 
336
374
  class ZipFile(zipfile.ZipFile):
@@ -375,11 +413,18 @@ class JinjaTaskContext(Context):
375
413
  Attempts to resolves any missing keywords by looking up task class attributes.
376
414
  """
377
415
  def resolve_or_missing(self, key):
416
+ if key in self.vars:
417
+ return self.vars[key]
418
+
419
+ if key in self.parent:
420
+ return self.parent[key]
421
+
378
422
  if key != "task":
379
423
  task = self.get("task")
380
424
  if task and hasattr(task, key):
381
425
  return getattr(task, key)
382
- return super(JinjaTaskContext, self).resolve_or_missing(key)
426
+
427
+ return missing
383
428
 
384
429
 
385
430
  class Namespace(object):
@@ -450,8 +495,10 @@ class Tools(object):
450
495
 
451
496
  def __init__(self, task=None, cwd=None, env=None):
452
497
  self._chroot = None
498
+ self._chroot_prefix = []
499
+ self._chroot_path = []
500
+ self._deadline = None
453
501
  self._run_prefix = []
454
- self._preexec_fn = None
455
502
  self._cwd = fs.path.normpath(fs.path.join(config.get_workdir(), cwd or config.get_workdir()))
456
503
  self._env = copy.deepcopy(env or os.environ)
457
504
  self._task = task
@@ -500,24 +547,50 @@ class Tools(object):
500
547
  zf.write(path, zippath)
501
548
  return filename
502
549
 
550
+ def _make_7zfile(self, filename, fmt, rootdir):
551
+ self.mkdirname(filename)
552
+ with py7zr.SevenZipFile(filename, 'w') as archive:
553
+ archive.writeall(rootdir, ".")
554
+ return filename
555
+
503
556
  def _make_tarfile(self, filename, fmt, rootdir):
504
- dirname = os.path.dirname(filename)
505
- if not os.path.exists(dirname):
506
- fs.makedirs(dirname)
557
+ self.mkdirname(filename)
507
558
  with tarfile.open(filename, 'w|%s' % fmt) as tar:
508
559
  tar.add(rootdir, ".")
509
560
  return filename
510
561
 
562
+ def _make_tarzstd(self, filename, rootdir):
563
+ self.mkdirname(filename)
564
+ with open(filename, 'wb') as zstd_file:
565
+ compressor = zstandard.ZstdCompressor(threads=self.thread_count())
566
+ with compressor.stream_writer(zstd_file) as stream:
567
+ with tarfile.open(mode="w|", fileobj=stream) as tar:
568
+ tar.add(rootdir, ".")
569
+ return filename
570
+
571
+ def _extract_tarzstd(self, filename, pathname, files=None):
572
+ with open(filename, 'rb') as zstd_file:
573
+ decompressor = zstandard.ZstdDecompressor()
574
+ with decompressor.stream_reader(zstd_file) as stream:
575
+ with tarfile.open(mode="r|", fileobj=stream) as tar:
576
+ if files:
577
+ for file in files:
578
+ tar.extract(file, pathname)
579
+ else:
580
+ tar.extractall(pathname)
581
+
511
582
  def archive(self, pathname, filename):
512
583
  """ Creates a (compressed) archive.
513
584
 
514
585
  The type of archive to create is determined by the filename extension.
515
586
  Supported formats are:
516
587
 
588
+ - 7z
517
589
  - tar
518
590
  - tar.bz2
519
591
  - tar.gz
520
592
  - tar.xz
593
+ - tar.zst
521
594
  - zip
522
595
 
523
596
  Args:
@@ -533,12 +606,14 @@ class Tools(object):
533
606
  elif filename.endswith(".tar"):
534
607
  fmt = "tar"
535
608
  elif filename.endswith(".tar.gz"):
536
- if shutil.which("tar") and shutil.which("pigz"):
609
+ if self.which("tar") and self.which("pigz"):
537
610
  self.run("tar -I pigz -cf {} -C {} .", filename, pathname)
538
611
  return filename
539
612
  fmt = "targz"
613
+ elif filename.endswith(".tar.zst"):
614
+ return self._make_tarzstd(filename, rootdir=pathname)
540
615
  elif filename.endswith(".tgz"):
541
- if shutil.which("tar") and shutil.which("pigz"):
616
+ if self.which("tar") and self.which("pigz"):
542
617
  self.run("tar -I pigz -cf {} -C {} .", filename, pathname)
543
618
  return filename
544
619
  fmt = "targz"
@@ -546,12 +621,16 @@ class Tools(object):
546
621
  fmt = "tarbz2"
547
622
  elif filename.endswith(".tar.xz"):
548
623
  fmt = "tarxz"
624
+ elif filename.endswith(".7z"):
625
+ fmt = "7z"
549
626
  raise_task_error_if(
550
627
  not fmt, self._task,
551
628
  "unknown archive type '{0}'", fs.path.basename(filename))
552
629
  try:
553
630
  if fmt == "zip":
554
631
  outfile = self._make_zipfile(filename, fmt, rootdir=pathname)
632
+ elif fmt == "7z":
633
+ outfile = self._make_7zfile(filename, fmt, rootdir=pathname)
555
634
  else:
556
635
  outfile = self._make_tarfile(filename, fmt[3:], rootdir=pathname)
557
636
  if outfile != filename:
@@ -560,9 +639,9 @@ class Tools(object):
560
639
  except Exception:
561
640
  raise_task_error(self._task, "failed to create archive from directory '{0}'", pathname)
562
641
 
563
- def autotools(self, deps=None):
642
+ def autotools(self, deps=None, incremental=False):
564
643
  """ Creates an AutoTools invokation helper """
565
- return _AutoTools(deps, self)
644
+ return _AutoTools(deps, self, incremental=incremental)
566
645
 
567
646
  @utils.locked(lock='_builddir_lock')
568
647
  def builddir(self, name=None, incremental=False, unique=True):
@@ -589,8 +668,16 @@ class Tools(object):
589
668
  name += "-" + utils.canonical(self._task.short_qualified_name)
590
669
 
591
670
  dirname = fs.path.join(self.getcwd(), name)
671
+ if incremental:
672
+ dirname += "-inc"
673
+
674
+ # Check if incremental build directories are disabled in the configuration
675
+ if incremental not in ["always"] and not config.is_incremental_build():
676
+ incremental = False
592
677
 
593
678
  if incremental:
679
+ # Create a unique build directory for each task
680
+ # and store the task name in a hidden file.
594
681
  if self._task is not None and unique:
595
682
  meta_task = fs.path.join(dirname, ".task")
596
683
  if not fs.path.exists(meta_task) \
@@ -598,6 +685,7 @@ class Tools(object):
598
685
  fs.rmtree(dirname, ignore_errors=True)
599
686
  fs.makedirs(dirname)
600
687
 
688
+ # Remove the build directory if the task taint has changed (--force or --salt)
601
689
  if self._task.taint is not None:
602
690
  meta = fs.path.join(dirname, ".taint")
603
691
  if not fs.path.exists(meta) or self.read_file(meta) != str(self._task.taint):
@@ -611,18 +699,16 @@ class Tools(object):
611
699
  return dirname
612
700
 
613
701
  if name not in self._builddir:
614
- fs.makedirs(fs.path.dirname(dirname))
615
- self._builddir[name] = fs.mkdtemp(
616
- prefix=fs.path.basename(dirname) + "-",
617
- dir=fs.path.dirname(dirname))
702
+ fs.makedirs(dirname)
703
+ self._builddir[name] = dirname
618
704
 
619
705
  return self._builddir[name]
620
706
 
621
707
  @property
622
708
  def buildroot(self):
623
709
  """ Return the root path of all build directories """
624
- from jolt.loader import get_workspacedir
625
- return fs.path.normpath(fs.path.join(get_workspacedir(), "build"))
710
+ from jolt.loader import JoltLoader
711
+ return fs.path.normpath(JoltLoader.get().build_path)
626
712
 
627
713
  def checksum_file(self, filelist, concat=False, hashfn=hashlib.sha1, filterfn=None):
628
714
  """ Calculate a checksum of one or multiple files.
@@ -654,7 +740,7 @@ class Tools(object):
654
740
  if not concat:
655
741
  checksum = hashfn()
656
742
 
657
- return result[-1] if concat or type(filelist) == str else result
743
+ return result[-1] if concat or type(filelist) is str else result
658
744
 
659
745
  def chmod(self, pathname, mode):
660
746
  """ Changes permissions of files and directories.
@@ -679,6 +765,7 @@ class Tools(object):
679
765
  - .bz2
680
766
  - .gz
681
767
  - .xz
768
+ - .zst
682
769
 
683
770
  Args:
684
771
  src (str): Source file to be compressed.
@@ -695,7 +782,7 @@ class Tools(object):
695
782
  for block in iter(lambda: infp.read(0x10000), b''):
696
783
  outfp.write(block)
697
784
  elif ext == "gz":
698
- if shutil.which("pigz"):
785
+ if self.which("pigz"):
699
786
  return self.run("pigz -p {} {}", self.thread_count(), src)
700
787
  with open(src, 'rb') as infp:
701
788
  with gzip.open(dst, 'wb') as outfp:
@@ -706,6 +793,13 @@ class Tools(object):
706
793
  with lzma.open(dst, 'wb') as outfp:
707
794
  for block in iter(lambda: infp.read(0x10000), b''):
708
795
  outfp.write(block)
796
+ elif ext == "zst":
797
+ with open(src, 'rb') as infp:
798
+ with open(dst, 'wb') as outfp:
799
+ compressor = zstandard.ZstdCompressor(threads=self.thread_count())
800
+ with compressor.stream_writer(outfp) as stream:
801
+ for block in iter(lambda: infp.read(0x10000), b''):
802
+ stream.write(block)
709
803
 
710
804
  def copy(self, src, dst, symlinks=False):
711
805
  """ Copies file and directories (recursively).
@@ -774,10 +868,19 @@ class Tools(object):
774
868
  finally:
775
869
  self._cwd = prev
776
870
 
777
- def download(self, url, pathname, exceptions=True, **kwargs):
871
+ def download(self, url, pathname, exceptions=True, auth=None, **kwargs):
778
872
  """
779
873
  Downloads a file using HTTP.
780
874
 
875
+ Automatically expands any {keyword} arguments in the URL and pathname.
876
+
877
+ Basic authentication is supported by including the credentials in the URL.
878
+ Environment variables can be used to hide sensitive information. Specify
879
+ the environment variable name in the URI as e.g.
880
+ ``http://{environ[USER]}:{environ[PASS]}@host``.
881
+ Alternatively, the auth parameter can be used to provide an authentication
882
+ object that is passed to the requests.get() function.
883
+
781
884
  Throws a JoltError exception on failure.
782
885
 
783
886
  Args:
@@ -790,17 +893,32 @@ class Tools(object):
790
893
 
791
894
  url = self.expand(url)
792
895
  pathname = self.expand_path(pathname)
793
- try:
794
- from requests.api import get
795
896
 
796
- response = get(url, stream=True, **kwargs)
897
+ url_parsed = urlparse(url)
898
+ raise_task_error_if(
899
+ not url_parsed.scheme or not url_parsed.netloc,
900
+ self._task,
901
+ "Invalid URL: '{}'", url)
902
+
903
+ if auth is None and url_parsed.username and url_parsed.password:
904
+ auth = HTTPBasicAuth(url_parsed.username, url_parsed.password)
905
+
906
+ # Redact password from URL if present
907
+ if url_parsed.password:
908
+ url_parsed = url_parsed._replace(netloc=url_parsed.netloc.replace(url_parsed.password, "****"))
909
+
910
+ url_cleaned = urlunparse(url_parsed)
911
+
912
+ try:
913
+ response = http_session.get(url, stream=True, auth=auth, **kwargs)
797
914
  raise_error_if(
798
915
  exceptions and response.status_code not in [200],
799
- f"Download from '{url}' failed with status '{response.status_code}'")
916
+ f"Download from '{url_cleaned}' failed with status '{response.status_code}'")
800
917
 
801
918
  name = fs.path.basename(pathname)
802
919
  size = int(response.headers.get('content-length', 0))
803
- with log.progress("Downloading {0}".format(name), size, "B") as pbar:
920
+ with log.progress("Downloading {0}".format(utils.shorten(name)), size, "B") as pbar:
921
+ log.verbose("{} -> {}", url_cleaned, pathname)
804
922
  with open(pathname, 'wb') as out_file:
805
923
  chunk_size = 4096
806
924
  for data in response.iter_content(chunk_size=chunk_size):
@@ -808,7 +926,7 @@ class Tools(object):
808
926
  pbar.update(len(data))
809
927
  actual_size = self.file_size(pathname)
810
928
  raise_error_if(
811
- size != 0 and size != actual_size,
929
+ size != 0 and size > actual_size,
812
930
  f"Downloaded file was truncated to {actual_size}/{size} bytes: {name}")
813
931
 
814
932
  return response.status_code == 200
@@ -837,16 +955,29 @@ class Tools(object):
837
955
  with tools.environ(CC="clang"):
838
956
  tools.run("make all")
839
957
  """
840
- for key, value in kwargs.items():
841
- kwargs[key] = self.expand(value)
842
-
843
958
  restore = {key: value for key, value in self._env.items()}
844
- self._env.update(kwargs)
845
- yield self._env
959
+
846
960
  for key, value in kwargs.items():
847
- if key not in restore:
848
- del self._env[key]
849
- self._env.update(restore)
961
+ if value is not None:
962
+ self._env[key] = self.expand(value)
963
+ else:
964
+ self._env.pop(key, None)
965
+
966
+ try:
967
+ yield self._env
968
+ finally:
969
+ self._env = restore
970
+
971
+ def exists(self, pathname):
972
+ """ Checks if a file or directory exists.
973
+
974
+ Args:
975
+ pathname (str): Path to file or directory.
976
+
977
+ Returns:
978
+ bool: True if the file or directory exists, False otherwise.
979
+ """
980
+ return fs.path.exists(self.expand_path(pathname))
850
981
 
851
982
  def expand(self, string, *args, **kwargs):
852
983
  """ Expands keyword arguments/macros in a format string.
@@ -907,6 +1038,9 @@ class Tools(object):
907
1038
  str: Expanded string.
908
1039
  """
909
1040
 
1041
+ if type(pathname) is list:
1042
+ return [self.expand_path(path) for path in pathname]
1043
+
910
1044
  path = fs.path.join(self.getcwd(), self.expand(pathname, *args, **kwargs))
911
1045
  # Ensure to retain any trailing path separator which is used as
912
1046
  # indicator of directory paths
@@ -936,8 +1070,10 @@ class Tools(object):
936
1070
  str: Expanded string.
937
1071
  """
938
1072
 
1073
+ if not relpath:
1074
+ relpath = self._task.joltdir if self._task else self.getcwd()
939
1075
  pathname = self.expand(pathname, *args, **kwargs)
940
- relpath = self.expand(relpath or self._task.joltdir, *args, **kwargs)
1076
+ relpath = self.expand(relpath, *args, **kwargs)
941
1077
  pathname = fs.path.join(self.getcwd(), pathname)
942
1078
  # Ensure to retain any trailing path separator which is used as
943
1079
  # indicator of directory paths
@@ -949,10 +1085,12 @@ class Tools(object):
949
1085
 
950
1086
  Supported formats are:
951
1087
 
1088
+ - 7z
952
1089
  - tar
953
1090
  - tar.bz2
954
1091
  - tar.gz
955
1092
  - tar.xz
1093
+ - tar.zst
956
1094
  - zip
957
1095
 
958
1096
  Args:
@@ -979,7 +1117,7 @@ class Tools(object):
979
1117
  else:
980
1118
  tar.extractall(filepath)
981
1119
  elif filename.endswith(".tar.gz") or filename.endswith(".tgz"):
982
- if shutil.which("tar") and shutil.which("pigz"):
1120
+ if self.which("tar") and self.which("pigz"):
983
1121
  self.run("tar -I pigz {} -xf {} -C {} {}",
984
1122
  ignore_owner_tar, filename, filepath,
985
1123
  " ".join(files) if files else "")
@@ -1006,6 +1144,18 @@ class Tools(object):
1006
1144
  tar.extract(file, filepath)
1007
1145
  else:
1008
1146
  tar.extractall(filepath)
1147
+ elif filename.endswith(".tar.zst"):
1148
+ try:
1149
+ self._extract_tarzstd(filename, filepath, files)
1150
+ except tarfile.StreamError as e:
1151
+ raise_task_error(self._task, "failed to extract archive '{0}': {1}", filename, str(e))
1152
+ elif filename.endswith(".7z"):
1153
+ with py7zr.SevenZipFile(filename, 'r') as archive:
1154
+ if files:
1155
+ for file in files:
1156
+ archive.extract(file, filepath)
1157
+ else:
1158
+ archive.extractall(filepath)
1009
1159
  else:
1010
1160
  raise_task_error(self._task, "unknown archive type '{0}'", fs.path.basename(filename))
1011
1161
  except Exception:
@@ -1035,7 +1185,7 @@ class Tools(object):
1035
1185
  """ Returns the current working directory. """
1036
1186
  return fs.path.normpath(self._cwd)
1037
1187
 
1038
- def getenv(self, key, default=""):
1188
+ def getenv(self, key, default=None):
1039
1189
  """ Returns the value of an environment variable.
1040
1190
 
1041
1191
  Only child processes spawned by the same tools object can see
@@ -1072,7 +1222,19 @@ class Tools(object):
1072
1222
  files = [self.expand_path(file) for file in files]
1073
1223
  elif not fs.path.isabs(pathname):
1074
1224
  files = [self.expand_relpath(file, self.getcwd()) for file in files]
1075
- return files
1225
+ return list(sorted(files))
1226
+
1227
+ def isdir(self, pathname):
1228
+ """ Determines if a path is a directory.
1229
+
1230
+ Args:
1231
+ pathname (str): Path to a file or directory.
1232
+
1233
+ Returns:
1234
+ boolean: True if the path is a directory, False otherwise.
1235
+ """
1236
+ pathname = self.expand_path(pathname)
1237
+ return fs.path.isdir(pathname)
1076
1238
 
1077
1239
  def mkdir(self, pathname, recursively=True):
1078
1240
  """ Create directory. """
@@ -1083,6 +1245,33 @@ class Tools(object):
1083
1245
  else:
1084
1246
  fs.mkdir(pathname)
1085
1247
 
1248
+ def mkdirname(self, pathname, recursively=True):
1249
+ """ Create parent directory. """
1250
+
1251
+ pathname = self.expand_path(pathname)
1252
+ pathname = fs.path.dirname(pathname)
1253
+ if pathname:
1254
+ self.mkdir(pathname, recursively)
1255
+
1256
+ def move(self, src, dst):
1257
+ """
1258
+ Move/rename file.
1259
+
1260
+ Args:
1261
+ src (str): Path to a file or directory to be moved.
1262
+ dest (str): Destination path. If the destination is
1263
+ an existing directory, then src is moved inside
1264
+ that directory. If the destination already exists
1265
+ but is not a directory, it may be overwritten.
1266
+ If the destination is not on the same filesystem, the
1267
+ source file or directory is copied to the destination
1268
+ and then removed.
1269
+ """
1270
+
1271
+ src = self.expand_path(src)
1272
+ dst = self.expand_path(dst)
1273
+ return shutil.move(src, dst)
1274
+
1086
1275
  def map_consecutive(self, callable, iterable):
1087
1276
  """ Same as ``map()``. """
1088
1277
  return utils.map_consecutive(callable, iterable)
@@ -1118,9 +1307,80 @@ class Tools(object):
1118
1307
  """
1119
1308
  return utils.map_concurrent(callable, iterable, max_workers)
1120
1309
 
1121
- def meson(self, deps=None):
1310
+ def meson(self, deps=None, incremental=False):
1122
1311
  """ Creates a Meson invokation helper """
1123
- return _Meson(deps, self)
1312
+ return _Meson(deps, self, incremental=incremental)
1313
+
1314
+ @contextmanager
1315
+ def nixpkgs(self, nixfile=None, packages=None, pure=False, path=None, options=None):
1316
+ """
1317
+ Creates a Nix environment with the specified packages.
1318
+
1319
+ Args:
1320
+ nixfile (str): Path to a Nix expression file.
1321
+ packages (list): List of Nix packages to include in environment.
1322
+ pure (boolean): Create a pure environment.
1323
+ path (list): List of Nix expression paths.
1324
+ options (dict): Nix configuration options.
1325
+
1326
+ Example:
1327
+
1328
+ .. code-block:: python
1329
+
1330
+ def run(self, deps, tools):
1331
+ with tools.nixpkgs(packages=["gcc13"]):
1332
+ tools.run("gcc --version")
1333
+
1334
+ """
1335
+
1336
+ # Check if Nix is available
1337
+ raise_task_error_if(
1338
+ not self.which("nix-shell"),
1339
+ self._task,
1340
+ "Nix not available on this system")
1341
+
1342
+ nixfile = self.expand_path(nixfile) if nixfile else ""
1343
+ pathflags = " ".join([f"-I {path}" for path in path or []])
1344
+ options = " ".join([f"--option {k} {v}" for k, v in (options or {}).items()])
1345
+ pureflag = "--pure" if pure else ""
1346
+ packages = "-p " + " ".join(packages) if packages else ""
1347
+
1348
+ # Expand all placeholders
1349
+ options = self.expand(options)
1350
+ packages = self.expand(packages)
1351
+ pathflags = self.expand(pathflags)
1352
+
1353
+ # Use cached-nix-shell is available
1354
+ nixshell = "cached-nix-shell" if self.which("cached-nix-shell") else "nix-shell"
1355
+
1356
+ # Run nix-shell to stage packages and environment
1357
+ env = self.run(
1358
+ "{} {} {} {} --run 'env -0' {}",
1359
+ nixshell,
1360
+ pathflags,
1361
+ pureflag,
1362
+ packages,
1363
+ nixfile,
1364
+ output_on_error=True)
1365
+ env = env.strip().strip("\x00")
1366
+ env = dict(line.split("=", 1) for line in env.split('\x00'))
1367
+
1368
+ # Add host path first to environment PATH
1369
+ host_path = env.get("HOST_PATH", None)
1370
+ if host_path:
1371
+ env["PATH"] = host_path + os.pathsep + env["PATH"]
1372
+
1373
+ # Enter the environment
1374
+ old_env = self._env
1375
+ try:
1376
+ if pure:
1377
+ self._env = env
1378
+ else:
1379
+ self._env = copy.deepcopy(env)
1380
+ self._env.update(env)
1381
+ yield
1382
+ finally:
1383
+ self._env = old_env
1124
1384
 
1125
1385
  def render(self, template, **kwargs):
1126
1386
  """ Render a Jinja template string.
@@ -1134,14 +1394,20 @@ class Tools(object):
1134
1394
  str: Renderered template data.
1135
1395
 
1136
1396
  """
1137
- env = Environment(
1138
- loader=FileSystemLoader(self.getcwd()),
1139
- autoescape=select_autoescape(),
1140
- trim_blocks=True,
1141
- lstrip_blocks=True)
1142
- env.context_class = JinjaTaskContext
1143
- tmpl = env.from_string(template)
1144
- return tmpl.render(task=self._task, tools=self, **kwargs)
1397
+ try:
1398
+ env = Environment(
1399
+ loader=FileSystemLoader(self.getcwd()),
1400
+ autoescape=False,
1401
+ trim_blocks=True,
1402
+ lstrip_blocks=True)
1403
+ env.context_class = JinjaTaskContext
1404
+ env.filters["prefix"] = utils.prefix
1405
+ env.filters["suffix"] = utils.suffix
1406
+ tmpl = env.from_string(template)
1407
+ return tmpl.render(task=self._task, tools=self, **kwargs)
1408
+ except TemplateError as e:
1409
+ log.debug("Template error: {}", template)
1410
+ raise_task_error(self._task, "Template error: {}", e)
1145
1411
 
1146
1412
  def render_file(self, template, **kwargs):
1147
1413
  """ Render a Jinja template file.
@@ -1155,22 +1421,29 @@ class Tools(object):
1155
1421
  str: Renderered template data.
1156
1422
 
1157
1423
  """
1158
- env = Environment(
1159
- loader=FileSystemLoader(self.getcwd()),
1160
- autoescape=select_autoescape(),
1161
- trim_blocks=True,
1162
- lstrip_blocks=True)
1163
- env.context_class = JinjaTaskContext
1164
- tmpl = env.get_template(self.expand(template))
1165
- return tmpl.render(task=self._task, tools=self, **kwargs)
1166
-
1167
- def replace_in_file(self, pathname, search, replace):
1424
+ try:
1425
+ env = Environment(
1426
+ loader=FileSystemLoader(self.getcwd()),
1427
+ autoescape=False,
1428
+ trim_blocks=True,
1429
+ lstrip_blocks=True)
1430
+ env.context_class = JinjaTaskContext
1431
+ tmpl = env.get_template(self.expand(template))
1432
+ return tmpl.render(task=self._task, tools=self, **kwargs)
1433
+ except TemplateError as e:
1434
+ log.debug("Template error: {}", template)
1435
+ raise_task_error(self._task, "Template error: {}", e)
1436
+
1437
+ def replace_in_file(self, pathname, search, replace, regex=False):
1168
1438
  """ Replaces all occurrences of a substring in a file.
1169
1439
 
1170
1440
  Args:
1171
1441
  pathname (str): Name/path of file to modify.
1172
1442
  search (str): Substring to be replaced.
1173
1443
  replace (str): Replacement substring.
1444
+ regex (boolean): Interpret search parameter as
1445
+ a regular expression matching the string to
1446
+ be replaced.
1174
1447
 
1175
1448
  Example:
1176
1449
 
@@ -1187,13 +1460,16 @@ class Tools(object):
1187
1460
  try:
1188
1461
  with open(pathname, "rb") as f:
1189
1462
  data = f.read()
1190
- data = data.replace(search.encode(), replace.encode())
1463
+ if regex:
1464
+ data = re.sub(search.encode(), replace.encode(), data)
1465
+ else:
1466
+ data = data.replace(search.encode(), replace.encode())
1191
1467
  with open(pathname, "wb") as f:
1192
1468
  f.write(data)
1193
1469
  except KeyboardInterrupt as e:
1194
1470
  raise e
1195
1471
  except Exception:
1196
- raise_task_error(self._task, "failed to replace string in file '{0}'", pathname)
1472
+ raise_task_error(self._task, "Failed to replace string in file '{0}'", pathname)
1197
1473
 
1198
1474
  def rmtree(self, pathname, *args, **kwargs):
1199
1475
  """Removes a directory tree from disk.
@@ -1309,6 +1585,9 @@ class Tools(object):
1309
1585
  refuses to terminate, it will be killed after an additional
1310
1586
  10 seconds have passed. Default: None.
1311
1587
 
1588
+ Returns:
1589
+ str: stdout from command unless output=False
1590
+
1312
1591
  Example:
1313
1592
 
1314
1593
  .. code-block:: python
@@ -1320,7 +1599,22 @@ class Tools(object):
1320
1599
  tools.run("make {target} VERBOSE={verbose} JOBS={0}", tools.cpu_count())
1321
1600
 
1322
1601
  """
1602
+ kwargs.setdefault("shell", True)
1603
+
1604
+ # Append command prefix before expanding string
1605
+ if self._chroot_prefix or self._run_prefix:
1606
+ if type(cmd) is list:
1607
+ cmd = self._chroot_prefix + self._run_prefix + cmd
1608
+ else:
1609
+ cmd = " ".join(self._chroot_prefix + self._run_prefix) + " " + cmd
1610
+
1611
+ if self._deadline is not None:
1612
+ remaining = int(self._deadline - time.time() + 0.5)
1613
+ timeout = kwargs.get("timeout", remaining)
1614
+ kwargs["timeout"] = min(remaining, timeout)
1615
+
1323
1616
  cmd = self.expand(cmd, *args, **kwargs)
1617
+
1324
1618
  stdi, stdo, stde = None, None, None
1325
1619
  try:
1326
1620
  stdi, stdo, stde = None, None, None
@@ -1332,12 +1626,9 @@ class Tools(object):
1332
1626
  raise e
1333
1627
  except Exception:
1334
1628
  pass
1335
- if self._run_prefix:
1336
- if type(cmd) == list:
1337
- cmd = self._run_prefix + cmd
1338
- else:
1339
- cmd = " ".join(self._run_prefix) + " " + cmd
1340
- return _run(cmd, self._cwd, self._env, self._preexec_fn, *args, **kwargs)
1629
+
1630
+ return _run(cmd, self._cwd, self._env, *args, **kwargs)
1631
+
1341
1632
  finally:
1342
1633
  if stdi:
1343
1634
  termios.tcsetattr(sys.stdin.fileno(), termios.TCSANOW, stdi)
@@ -1377,7 +1668,7 @@ class Tools(object):
1377
1668
 
1378
1669
  """
1379
1670
  cmdprefix = self.expand(cmdprefix, *args, **kwargs)
1380
- if type(cmdprefix) == str:
1671
+ if type(cmdprefix) is str:
1381
1672
  cmdprefix = cmdprefix.split()
1382
1673
 
1383
1674
  old_prefix = copy.copy(self._run_prefix)
@@ -1416,10 +1707,10 @@ class Tools(object):
1416
1707
  """
1417
1708
 
1418
1709
  raise_error_if(
1419
- type(artifact) is not cache.Artifact,
1710
+ type(artifact) not in [cache.Artifact, cache.ArtifactToolsProxy],
1420
1711
  "non-artifact passed as argument to Tools.sandbox()")
1421
1712
 
1422
- suffix = utils.canonical(artifact.get_task().short_qualified_name)
1713
+ suffix = utils.canonical(artifact.task.short_qualified_name)
1423
1714
 
1424
1715
  if reflect:
1425
1716
  sandbox_name = "sandboxes-reflected/" + suffix
@@ -1449,8 +1740,16 @@ class Tools(object):
1449
1740
  fs.rmtree(path)
1450
1741
  fs.makedirs(path)
1451
1742
  for relsrcpath, reldstpath in artifact.files.items():
1452
- srcpath = fs.path.normpath(fs.path.join(artifact.get_task().joltdir, relsrcpath))
1743
+ srcpath = fs.path.normpath(fs.path.join(artifact.task.joltdir, relsrcpath))
1744
+ srcpath = self.expand_path(srcpath)
1453
1745
  dstpath = fs.path.normpath(fs.path.join(path, reldstpath))
1746
+ dstpath = self.expand_path(dstpath)
1747
+
1748
+ if dstpath != fs.path.realpath(dstpath):
1749
+ log.debug("Cannot symlink '{} -> {}', parent directory already symlinked",
1750
+ srcpath, dstpath)
1751
+ continue
1752
+
1454
1753
  if fs.path.isdir(dstpath):
1455
1754
  files = fs.scandir(srcpath)
1456
1755
  for file in files:
@@ -1460,7 +1759,8 @@ class Tools(object):
1460
1759
  self.symlink(srcpath, dstpath)
1461
1760
 
1462
1761
  # Restore missing srcfiles if they resided in a build directory
1463
- if srcpath.startswith(artifact.get_task().tools.buildroot) and \
1762
+ buildroot_abs = self.expand_path(artifact.tools.buildroot)
1763
+ if srcpath.startswith(buildroot_abs) and \
1464
1764
  not fs.path.exists(srcpath):
1465
1765
  fs.copy(fs.path.join(artifact.path, reldstpath), srcpath, symlinks=True)
1466
1766
  self.write_file(meta, artifact.path)
@@ -1507,7 +1807,41 @@ class Tools(object):
1507
1807
  fs.makedirs(dstdir)
1508
1808
  fs.symlink(src, dst)
1509
1809
 
1510
- def tmpdir(self, name):
1810
+ @contextmanager
1811
+ def timeout(self, seconds):
1812
+ """ Context manager to set a timeout for a block of code.
1813
+
1814
+ A TimeoutError exception is raised if the block of code does not
1815
+ complete within the specified time.
1816
+
1817
+ Args:
1818
+ seconds (int): Timeout in seconds.
1819
+
1820
+ Example:
1821
+
1822
+ .. code-block:: python
1823
+
1824
+ with tools.timeout(5):
1825
+ tools.run("sleep 10")
1826
+
1827
+ """
1828
+ if seconds is None:
1829
+ yield
1830
+ return
1831
+
1832
+ with utils.timeout(seconds, JoltTimeoutError):
1833
+ old_deadline = self._deadline
1834
+ try:
1835
+ if old_deadline is None:
1836
+ self._deadline = time.time() + seconds
1837
+ else:
1838
+ self._deadline = min(old_deadline, time.time() + seconds)
1839
+ yield
1840
+ finally:
1841
+ self._deadline = old_deadline
1842
+
1843
+ @contextmanager
1844
+ def tmpdir(self, name=None):
1511
1845
  """ Creates a temporary directory.
1512
1846
 
1513
1847
  The directory is only valid within a context and it is removed
@@ -1522,11 +1856,18 @@ class Tools(object):
1522
1856
 
1523
1857
  .. code-block:: python
1524
1858
 
1525
- with tools.tmpdir("temp") as tmp, tools.cwd(tmp.path):
1859
+ with tools.tmpdir() as tmp, tools.cwd(tmp):
1526
1860
  tools.write_file("tempfile", "tempdata")
1527
1861
 
1528
1862
  """
1529
- return _tmpdir(name, cwd=self._cwd)
1863
+ dirname = None
1864
+ try:
1865
+ self.mkdir(self.buildroot)
1866
+ dirname = fs.mkdtemp(prefix=(name or "tmpdir") + "-", dir=self.buildroot)
1867
+ yield fs.path.normpath(dirname)
1868
+ finally:
1869
+ if dirname:
1870
+ self.rmtree(dirname, ignore_errors=True)
1530
1871
 
1531
1872
  def unlink(self, pathname, *args, **kwargs):
1532
1873
  """Removes a file from disk.
@@ -1538,10 +1879,11 @@ class Tools(object):
1538
1879
 
1539
1880
  """
1540
1881
  pathname = self.expand_path(pathname, *args, **kwargs)
1541
- return fs.unlink(pathname)
1882
+ return fs.unlink(pathname, ignore_errors=kwargs.get("ignore_errors", False))
1542
1883
 
1543
1884
  @contextmanager
1544
- def chroot(self, chroot, *args, **kwargs):
1885
+ @utils.deprecated
1886
+ def chroot(self, chroot, *args, path=None, **kwargs):
1545
1887
  """
1546
1888
  Experimental: Use chroot as root filesystem when running commands.
1547
1889
 
@@ -1553,6 +1895,9 @@ class Tools(object):
1553
1895
  Args:
1554
1896
  chroot (str, artifact): Path to rootfs directory, or an artifact
1555
1897
  with a 'rootfs' metadata path (artifact.paths.rootfs).
1898
+ path (list): List of directory paths within the chroot to add to
1899
+ the PATH environment variable, e.g. ["/usr/bin", "/bin"].
1900
+ By default, the current PATH is used also within the chroot.
1556
1901
 
1557
1902
  Example:
1558
1903
 
@@ -1564,9 +1909,17 @@ class Tools(object):
1564
1909
  """
1565
1910
  raise_error_if(platform.system() != "Linux", "Tools.chroot() is only supported on Linux")
1566
1911
 
1567
- if type(chroot) == cache.Artifact:
1912
+ raise_task_error_if(
1913
+ not self.which("newuidmap") and not self.which("/usr/bin/newuidmap"), self._task,
1914
+ "No usable 'newuidmap' found in PATH")
1915
+
1916
+ raise_task_error_if(
1917
+ not self.which("newgidmap") and not self.which("/usr/bin/newuidmap"), self._task,
1918
+ "No usable 'newgidmap' found in PATH")
1919
+
1920
+ if type(chroot) in [cache.Artifact, cache.ArtifactToolsProxy]:
1568
1921
  raise_task_error_if(
1569
- not str(chroot.paths.rootfs), self._task,
1922
+ not chroot.paths.rootfs, self._task,
1570
1923
  "No 'rootfs' path in artifact")
1571
1924
  chroot = chroot.paths.rootfs
1572
1925
 
@@ -1575,6 +1928,8 @@ class Tools(object):
1575
1928
  not fs.path.exists(chroot) or not fs.path.isdir(chroot),
1576
1929
  self._task, "failed to change root to '{0}'", chroot)
1577
1930
 
1931
+ bind = []
1932
+
1578
1933
  mount_dev = kwargs.get("mount_dev", True)
1579
1934
  mount_etc = kwargs.get("mount_etc", True)
1580
1935
  mount_home = kwargs.get("mount_home", False)
@@ -1583,93 +1938,73 @@ class Tools(object):
1583
1938
  mount_cachedir = kwargs.get("mount_cachedir", True)
1584
1939
  mount_builddir = kwargs.get("mount_builddir", True)
1585
1940
  mount = kwargs.get("mount", [])
1941
+ raise_task_error_if(
1942
+ type(mount) is not list,
1943
+ self._task, "Expected a list as mount argument to Tools.chroot()")
1944
+ mount = [self.expand(m) for m in mount]
1586
1945
 
1587
- overlaydir = self.builddir("overlay")
1588
- overlayrootdir = fs.path.join(overlaydir, "root")
1589
- with self.cwd(overlaydir):
1590
- self.mkdir("root")
1591
- self.mkdir("work")
1592
- self.mkdir("uppr")
1593
- overlayopts = f"upperdir={overlaydir}/uppr,workdir={overlaydir}/work,lowerdir={chroot}"
1594
-
1595
- def unshare_chroot(overlayrootdir):
1596
- uid = os.geteuid()
1597
- gid = os.geteuid()
1598
- self._unshare([(uid, uid, 1)], [(gid, gid, 1)])
1599
-
1600
- from ctypes import CDLL, c_char_p
1601
- libc = CDLL("libc.so.6")
1602
-
1603
- MS_BIND = 4096
1604
- MS_REC = 16384
1605
-
1606
- def mount_overlay():
1607
- return libc.mount(
1608
- c_char_p("overlay".encode("utf-8")),
1609
- c_char_p(overlayrootdir.encode("utf-8")),
1610
- c_char_p("overlay".encode("utf-8")),
1611
- 0,
1612
- c_char_p(overlayopts.encode("utf-8"))) == 0
1613
-
1614
- # If the overlay mount fails, just don't use one.
1615
- if not mount_overlay():
1616
- overlayrootdir = chroot
1617
-
1618
- def mount_bind(path):
1619
- if os.path.isdir(path):
1620
- os.makedirs(overlayrootdir + path, exist_ok=True)
1621
- else:
1622
- os.makedirs(os.path.dirname(overlayrootdir + path), exist_ok=True)
1623
- if not os.path.exists(overlayrootdir + path):
1624
- with open(overlayrootdir + path, "a"):
1625
- pass
1626
- assert libc.mount(
1627
- c_char_p(path.encode("utf-8")),
1628
- c_char_p((overlayrootdir + path).encode("utf-8")),
1629
- None,
1630
- MS_BIND | MS_REC,
1631
- None) == 0
1632
-
1633
- if mount_etc:
1634
- mount_bind("/etc/group")
1635
- mount_bind("/etc/hostname")
1636
- mount_bind("/etc/hosts")
1637
- mount_bind("/etc/passwd")
1638
- mount_bind("/etc/resolv.conf")
1639
- if mount_home:
1640
- mount_bind("/home")
1641
- if mount_joltdir and self._task:
1642
- mount_bind(self._task.joltdir)
1643
- if mount_cachedir:
1644
- mount_bind(config.get_cachedir())
1645
- if mount_builddir:
1646
- mount_bind(self.buildroot)
1647
- if mount:
1648
- for m in mount:
1649
- mount_bind(m)
1650
- if mount_dev:
1651
- mount_bind("/dev")
1652
- if mount_proc:
1653
- mount_bind("/proc")
1654
- os.chroot(overlayrootdir)
1655
- os.chdir(self.getcwd())
1656
-
1657
- def unshare_chroot_catch():
1658
- try:
1659
- unshare_chroot(overlayrootdir)
1660
- except Exception as e:
1661
- log.exception(e)
1662
- raise e
1946
+ if mount_etc:
1947
+ bind.append("/etc/group")
1948
+ bind.append("/etc/hostname")
1949
+ bind.append("/etc/hosts")
1950
+ bind.append("/etc/passwd")
1951
+ bind.append("/etc/resolv.conf")
1952
+
1953
+ if mount_home:
1954
+ bind.append("/home")
1955
+
1956
+ if mount_joltdir and self._task:
1957
+ from jolt.loader import get_workspacedir
1958
+ bind.append(get_workspacedir())
1959
+
1960
+ if mount_cachedir:
1961
+ bind.append(config.get_cachedir())
1962
+
1963
+ if mount_builddir:
1964
+ bind.append(self.buildroot)
1965
+
1966
+ if mount:
1967
+ for m in mount:
1968
+ bind.append(m)
1969
+
1970
+ if mount_dev:
1971
+ bind.append("/dev")
1972
+
1973
+ if mount_proc:
1974
+ bind.append("/proc")
1975
+
1976
+ unshare = os.path.join(os.path.dirname(__file__), "chroot.py")
1663
1977
 
1664
1978
  old_chroot = self._chroot
1665
- old_preexec_fn = self._preexec_fn
1979
+ old_chroot_path = self._chroot_path
1980
+ old_chroot_prefix = self._chroot_prefix
1666
1981
  self._chroot = chroot
1667
- self._preexec_fn = unshare_chroot_catch
1982
+
1983
+ if path:
1984
+ self._chroot_path = path
1985
+ else:
1986
+ self._chroot_path = self._env.get("PATH")
1987
+ self._chroot_path = self._chroot_path.split(fs.pathsep) if self._chroot_path else []
1988
+
1668
1989
  try:
1669
- yield self._chroot
1990
+ with self.tmpdir("chroot") as bindroot:
1991
+ self._chroot_prefix = [
1992
+ sys.executable,
1993
+ unshare,
1994
+ "-b",
1995
+ ] + bind + [
1996
+ "-c",
1997
+ chroot,
1998
+ "-t",
1999
+ bindroot,
2000
+ "--shell={shell}",
2001
+ "--",
2002
+ ]
2003
+ yield
1670
2004
  finally:
1671
2005
  self._chroot = old_chroot
1672
- self._preexec_fn = old_preexec_fn
2006
+ self._chroot_path = old_chroot_path
2007
+ self._chroot_prefix = old_chroot_prefix
1673
2008
 
1674
2009
  def _unshare(self, uidmap, gidmap):
1675
2010
  from ctypes import CDLL
@@ -1684,6 +2019,7 @@ class Tools(object):
1684
2019
  raise_task_error_if(
1685
2020
  not newuidmap, self._task,
1686
2021
  "No usable 'newuidmap' found in PATH")
2022
+
1687
2023
  newgidmap = self.which("newgidmap")
1688
2024
  raise_task_error_if(
1689
2025
  not newgidmap, self._task,
@@ -1758,6 +2094,12 @@ class Tools(object):
1758
2094
  raise_task_error_if(
1759
2095
  not uidmap, self._task,
1760
2096
  "Invalid uid map: {}", uidmap)
2097
+ raise_task_error_if(
2098
+ not self.which("newuidmap"), self._task,
2099
+ "No usable 'newuidmap' found in PATH")
2100
+ raise_task_error_if(
2101
+ not self.which("newgidmap"), self._task,
2102
+ "No usable 'newgidmap' found in PATH")
1761
2103
 
1762
2104
  msgq = multiprocessing.JoinableQueue()
1763
2105
  pid = os.fork()
@@ -1789,6 +2131,15 @@ class Tools(object):
1789
2131
  """
1790
2132
  Uploads a file using HTTP (PUT).
1791
2133
 
2134
+ Automatically expands any {keyword} arguments in the URL and pathname.
2135
+
2136
+ Basic authentication is supported by including the credentials in the URL.
2137
+ Environment variables can be used to hide sensitive information. Specify
2138
+ the environment variable name in the URI as e.g.
2139
+ ``http://{environ[USER]}:{environ[PASS]}@host``.
2140
+ Alternatively, the auth parameter can be used to provide an authentication
2141
+ object that is passed to the requests.get() function.
2142
+
1792
2143
  Throws a JoltError exception on failure.
1793
2144
 
1794
2145
  Args:
@@ -1801,18 +2152,38 @@ class Tools(object):
1801
2152
 
1802
2153
  """
1803
2154
  pathname = self.expand_path(pathname)
2155
+ url = self.expand(url)
1804
2156
  name = fs.path.basename(pathname)
1805
2157
  size = self.file_size(pathname)
1806
- with log.progress("Uploading " + name, size, "B") as pbar, open(pathname, 'rb') as fileobj:
2158
+
2159
+ url_parsed = urlparse(url)
2160
+ raise_task_error_if(
2161
+ not url_parsed.scheme or not url_parsed.netloc,
2162
+ self._task,
2163
+ "Invalid URL: '{}'", url)
2164
+
2165
+ if auth is None and url_parsed.username and url_parsed.password:
2166
+ auth = HTTPBasicAuth(url_parsed.username, url_parsed.password)
2167
+
2168
+ # Redact password from URL if present
2169
+ if url_parsed.password:
2170
+ url_parsed = url_parsed._replace(netloc=url_parsed.netloc.replace(url_parsed.password, "****"))
2171
+
2172
+ url_cleaned = urlunparse(url_parsed)
2173
+
2174
+ with log.progress("Uploading " + utils.shorten(name), size, "B") as pbar, \
2175
+ open(pathname, 'rb') as fileobj:
2176
+ log.verbose("{} -> {}", pathname, url_cleaned)
2177
+
1807
2178
  def read():
1808
2179
  data = fileobj.read(4096)
1809
2180
  pbar.update(len(data))
1810
2181
  return data
1811
- from requests.api import put
1812
- response = put(url, data=iter(read, b''), auth=auth, **kwargs)
2182
+
2183
+ response = http_session.put(url, data=iter(read, b''), auth=auth, **kwargs)
1813
2184
  raise_error_if(
1814
2185
  exceptions and response.status_code not in [201, 204],
1815
- f"Upload to '{url}' failed with status '{response.status_code}'")
2186
+ f"Upload to '{url_cleaned}' failed with status '{response.status_code}'")
1816
2187
  return response.status_code in [201, 204]
1817
2188
 
1818
2189
  def read_file(self, pathname, binary=False):
@@ -1821,6 +2192,60 @@ class Tools(object):
1821
2192
  with open(pathname, "rb" if binary else "r") as f:
1822
2193
  return f.read()
1823
2194
 
2195
+ def read_depfile(self, pathname):
2196
+ """
2197
+ Reads a Make dependency file.
2198
+
2199
+ Returns:
2200
+ dict: Dictionary of files and their dependencies.
2201
+ """
2202
+ pathname = self.expand_path(pathname)
2203
+ with open(pathname) as f:
2204
+ data = f.read()
2205
+
2206
+ data = data.strip()
2207
+ data = data.replace("\\\n", "")
2208
+ data = data.splitlines()
2209
+
2210
+ deps = {}
2211
+
2212
+ for line in data:
2213
+ # Skip empty lines and comments
2214
+ if not line or line[0] == "#":
2215
+ continue
2216
+
2217
+ parts = line.split(":", 1)
2218
+ raise_error_if(len(parts) != 2, "Depfile parse error: '{}'", line)
2219
+ outputs, inputs = parts[0], parts[1]
2220
+ outputs, inputs = outputs.strip(), inputs.strip()
2221
+ # Temporarily replace escaped spaces in names so that
2222
+ # the list of dependencies can be split.
2223
+ outputs, inputs = outputs.replace("\\ ", "\x00"), inputs.replace("\\ ", "\x00")
2224
+
2225
+ for output in outputs.split():
2226
+ output = output.replace("\x00", " ")
2227
+ for input in inputs.split():
2228
+ input = input.replace("\x00", " ")
2229
+ if output not in deps:
2230
+ deps[output] = []
2231
+ deps[output].append(input)
2232
+
2233
+ return deps
2234
+
2235
+ def read_json(self, pathname):
2236
+ """
2237
+ Reads a JSON file.
2238
+
2239
+ Args:
2240
+ pathname (str): Name/path of file to be read.
2241
+
2242
+ Returns:
2243
+ dict: Dictionary of JSON data.
2244
+ """
2245
+ pathname = self.expand_path(pathname)
2246
+ with open(pathname) as f:
2247
+ return json.load(f)
2248
+
1824
2249
  def which(self, executable):
1825
2250
  """ Find executable in PATH.
1826
2251
 
@@ -1832,9 +2257,12 @@ class Tools(object):
1832
2257
  """
1833
2258
  executable = self.expand(executable)
1834
2259
  path = self._env.get("PATH")
2260
+
1835
2261
  if self._chroot:
1836
- path = fs.pathsep.join(
1837
- [self._chroot + p for p in path.split(fs.pathsep)]) + fs.pathsep + path
2262
+ path = path.split(fs.pathsep) if path else []
2263
+ path = [os.path.join(self._chroot, p.lstrip(fs.sep)) for p in self._chroot_path] + path
2264
+ path = fs.pathsep.join(path)
2265
+
1838
2266
  result = shutil.which(executable, path=path)
1839
2267
  if result and self._chroot and result.startswith(self._chroot):
1840
2268
  result = result[len(self._chroot):]
@@ -1861,3 +2289,22 @@ class Tools(object):
1861
2289
  content = self.expand(content, **kwargs)
1862
2290
  with open(pathname, "wb") as f:
1863
2291
  f.write(content.encode())
2292
+
2293
+ def write_json(self, pathname, data, indent=4, sort_keys=True):
2294
+ """
2295
+ Writes a JSON file.
2296
+
2297
+ Args:
2298
+ pathname (str): Name/path of file to be written.
2299
+ data (dict): Dictionary of JSON data.
2300
+ indent (int): Indentation level for JSON output.
2301
+ """
2302
+ pathname = self.expand_path(pathname)
2303
+ with open(pathname, "w") as f:
2304
+ json.dump(data, f, indent=indent, sort_keys=sort_keys)
2305
+
2306
+ @property
2307
+ def wsroot(self):
2308
+ """ Return the root path of all build directories """
2309
+ from jolt.loader import get_workspacedir
2310
+ return fs.path.normpath(get_workspacedir())