jolt 0.9.123__py3-none-any.whl → 0.9.435__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (196) hide show
  1. jolt/__init__.py +80 -7
  2. jolt/__main__.py +9 -1
  3. jolt/bin/fstree-darwin-x86_64 +0 -0
  4. jolt/bin/fstree-linux-x86_64 +0 -0
  5. jolt/cache.py +832 -362
  6. jolt/chroot.py +156 -0
  7. jolt/cli.py +281 -162
  8. jolt/common_pb2.py +63 -0
  9. jolt/common_pb2_grpc.py +4 -0
  10. jolt/config.py +98 -41
  11. jolt/error.py +19 -4
  12. jolt/filesystem.py +2 -6
  13. jolt/graph.py +705 -117
  14. jolt/hooks.py +43 -0
  15. jolt/influence.py +122 -3
  16. jolt/loader.py +369 -121
  17. jolt/log.py +225 -63
  18. jolt/manifest.py +28 -38
  19. jolt/options.py +35 -10
  20. jolt/pkgs/abseil.py +42 -0
  21. jolt/pkgs/asio.py +25 -0
  22. jolt/pkgs/autoconf.py +41 -0
  23. jolt/pkgs/automake.py +41 -0
  24. jolt/pkgs/b2.py +31 -0
  25. jolt/pkgs/boost.py +111 -0
  26. jolt/pkgs/boringssl.py +32 -0
  27. jolt/pkgs/busybox.py +39 -0
  28. jolt/pkgs/bzip2.py +43 -0
  29. jolt/pkgs/cares.py +29 -0
  30. jolt/pkgs/catch2.py +36 -0
  31. jolt/pkgs/cbindgen.py +17 -0
  32. jolt/pkgs/cista.py +19 -0
  33. jolt/pkgs/clang.py +44 -0
  34. jolt/pkgs/cli11.py +24 -0
  35. jolt/pkgs/cmake.py +48 -0
  36. jolt/pkgs/cpython.py +196 -0
  37. jolt/pkgs/crun.py +29 -0
  38. jolt/pkgs/curl.py +38 -0
  39. jolt/pkgs/dbus.py +18 -0
  40. jolt/pkgs/double_conversion.py +24 -0
  41. jolt/pkgs/fastfloat.py +21 -0
  42. jolt/pkgs/ffmpeg.py +28 -0
  43. jolt/pkgs/flatbuffers.py +29 -0
  44. jolt/pkgs/fmt.py +27 -0
  45. jolt/pkgs/fstree.py +20 -0
  46. jolt/pkgs/gflags.py +18 -0
  47. jolt/pkgs/glib.py +18 -0
  48. jolt/pkgs/glog.py +25 -0
  49. jolt/pkgs/glslang.py +21 -0
  50. jolt/pkgs/golang.py +16 -11
  51. jolt/pkgs/googlebenchmark.py +18 -0
  52. jolt/pkgs/googletest.py +46 -0
  53. jolt/pkgs/gperf.py +15 -0
  54. jolt/pkgs/grpc.py +73 -0
  55. jolt/pkgs/hdf5.py +19 -0
  56. jolt/pkgs/help2man.py +14 -0
  57. jolt/pkgs/inja.py +28 -0
  58. jolt/pkgs/jsoncpp.py +31 -0
  59. jolt/pkgs/libarchive.py +43 -0
  60. jolt/pkgs/libcap.py +44 -0
  61. jolt/pkgs/libdrm.py +44 -0
  62. jolt/pkgs/libedit.py +42 -0
  63. jolt/pkgs/libevent.py +31 -0
  64. jolt/pkgs/libexpat.py +27 -0
  65. jolt/pkgs/libfastjson.py +21 -0
  66. jolt/pkgs/libffi.py +16 -0
  67. jolt/pkgs/libglvnd.py +30 -0
  68. jolt/pkgs/libogg.py +28 -0
  69. jolt/pkgs/libpciaccess.py +18 -0
  70. jolt/pkgs/libseccomp.py +21 -0
  71. jolt/pkgs/libtirpc.py +24 -0
  72. jolt/pkgs/libtool.py +42 -0
  73. jolt/pkgs/libunwind.py +35 -0
  74. jolt/pkgs/libva.py +18 -0
  75. jolt/pkgs/libvorbis.py +33 -0
  76. jolt/pkgs/libxml2.py +35 -0
  77. jolt/pkgs/libxslt.py +17 -0
  78. jolt/pkgs/libyajl.py +16 -0
  79. jolt/pkgs/llvm.py +81 -0
  80. jolt/pkgs/lua.py +54 -0
  81. jolt/pkgs/lz4.py +26 -0
  82. jolt/pkgs/m4.py +14 -0
  83. jolt/pkgs/make.py +17 -0
  84. jolt/pkgs/mesa.py +81 -0
  85. jolt/pkgs/meson.py +17 -0
  86. jolt/pkgs/mstch.py +28 -0
  87. jolt/pkgs/mysql.py +60 -0
  88. jolt/pkgs/nasm.py +49 -0
  89. jolt/pkgs/ncurses.py +30 -0
  90. jolt/pkgs/ng_log.py +25 -0
  91. jolt/pkgs/ninja.py +45 -0
  92. jolt/pkgs/nlohmann_json.py +25 -0
  93. jolt/pkgs/nodejs.py +19 -11
  94. jolt/pkgs/opencv.py +24 -0
  95. jolt/pkgs/openjdk.py +26 -0
  96. jolt/pkgs/openssl.py +103 -0
  97. jolt/pkgs/paho.py +76 -0
  98. jolt/pkgs/patchelf.py +16 -0
  99. jolt/pkgs/perl.py +42 -0
  100. jolt/pkgs/pkgconfig.py +64 -0
  101. jolt/pkgs/poco.py +39 -0
  102. jolt/pkgs/protobuf.py +77 -0
  103. jolt/pkgs/pugixml.py +27 -0
  104. jolt/pkgs/python.py +19 -0
  105. jolt/pkgs/qt.py +35 -0
  106. jolt/pkgs/rapidjson.py +26 -0
  107. jolt/pkgs/rapidyaml.py +28 -0
  108. jolt/pkgs/re2.py +30 -0
  109. jolt/pkgs/re2c.py +17 -0
  110. jolt/pkgs/readline.py +15 -0
  111. jolt/pkgs/rust.py +41 -0
  112. jolt/pkgs/sdl.py +28 -0
  113. jolt/pkgs/simdjson.py +27 -0
  114. jolt/pkgs/soci.py +46 -0
  115. jolt/pkgs/spdlog.py +29 -0
  116. jolt/pkgs/spirv_llvm.py +21 -0
  117. jolt/pkgs/spirv_tools.py +24 -0
  118. jolt/pkgs/sqlite.py +83 -0
  119. jolt/pkgs/ssl.py +12 -0
  120. jolt/pkgs/texinfo.py +15 -0
  121. jolt/pkgs/tomlplusplus.py +22 -0
  122. jolt/pkgs/wayland.py +26 -0
  123. jolt/pkgs/x11.py +58 -0
  124. jolt/pkgs/xerces_c.py +20 -0
  125. jolt/pkgs/xorg.py +360 -0
  126. jolt/pkgs/xz.py +29 -0
  127. jolt/pkgs/yamlcpp.py +30 -0
  128. jolt/pkgs/zeromq.py +47 -0
  129. jolt/pkgs/zlib.py +87 -0
  130. jolt/pkgs/zstd.py +33 -0
  131. jolt/plugins/alias.py +3 -0
  132. jolt/plugins/allure.py +5 -2
  133. jolt/plugins/autotools.py +66 -0
  134. jolt/plugins/cache.py +133 -0
  135. jolt/plugins/cmake.py +74 -6
  136. jolt/plugins/conan.py +238 -0
  137. jolt/plugins/cxx.py +698 -0
  138. jolt/plugins/cxxinfo.py +7 -0
  139. jolt/plugins/dashboard.py +1 -1
  140. jolt/plugins/docker.py +80 -23
  141. jolt/plugins/email.py +2 -2
  142. jolt/plugins/email.xslt +144 -101
  143. jolt/plugins/environ.py +11 -0
  144. jolt/plugins/fetch.py +141 -0
  145. jolt/plugins/gdb.py +39 -19
  146. jolt/plugins/gerrit.py +1 -14
  147. jolt/plugins/git.py +283 -85
  148. jolt/plugins/googletest.py +2 -1
  149. jolt/plugins/http.py +36 -38
  150. jolt/plugins/libtool.py +63 -0
  151. jolt/plugins/linux.py +990 -0
  152. jolt/plugins/logstash.py +4 -4
  153. jolt/plugins/meson.py +61 -0
  154. jolt/plugins/ninja-compdb.py +99 -30
  155. jolt/plugins/ninja.py +468 -166
  156. jolt/plugins/paths.py +11 -1
  157. jolt/plugins/pkgconfig.py +219 -0
  158. jolt/plugins/podman.py +136 -92
  159. jolt/plugins/python.py +137 -0
  160. jolt/plugins/remote_execution/__init__.py +0 -0
  161. jolt/plugins/remote_execution/administration_pb2.py +46 -0
  162. jolt/plugins/remote_execution/administration_pb2_grpc.py +170 -0
  163. jolt/plugins/remote_execution/log_pb2.py +32 -0
  164. jolt/plugins/remote_execution/log_pb2_grpc.py +68 -0
  165. jolt/plugins/remote_execution/scheduler_pb2.py +41 -0
  166. jolt/plugins/remote_execution/scheduler_pb2_grpc.py +141 -0
  167. jolt/plugins/remote_execution/worker_pb2.py +38 -0
  168. jolt/plugins/remote_execution/worker_pb2_grpc.py +112 -0
  169. jolt/plugins/report.py +12 -2
  170. jolt/plugins/rust.py +25 -0
  171. jolt/plugins/scheduler.py +710 -0
  172. jolt/plugins/selfdeploy/setup.py +8 -4
  173. jolt/plugins/selfdeploy.py +138 -88
  174. jolt/plugins/strings.py +35 -22
  175. jolt/plugins/symlinks.py +26 -11
  176. jolt/plugins/telemetry.py +5 -2
  177. jolt/plugins/timeline.py +13 -3
  178. jolt/plugins/volume.py +46 -48
  179. jolt/scheduler.py +589 -192
  180. jolt/tasks.py +625 -121
  181. jolt/templates/timeline.html.template +44 -47
  182. jolt/timer.py +22 -0
  183. jolt/tools.py +638 -282
  184. jolt/utils.py +211 -7
  185. jolt/version.py +1 -1
  186. jolt/xmldom.py +12 -2
  187. {jolt-0.9.123.dist-info → jolt-0.9.435.dist-info}/METADATA +97 -38
  188. jolt-0.9.435.dist-info/RECORD +207 -0
  189. {jolt-0.9.123.dist-info → jolt-0.9.435.dist-info}/WHEEL +1 -1
  190. jolt/plugins/amqp.py +0 -834
  191. jolt/plugins/debian.py +0 -338
  192. jolt/plugins/ftp.py +0 -181
  193. jolt/plugins/repo.py +0 -253
  194. jolt-0.9.123.dist-info/RECORD +0 -77
  195. {jolt-0.9.123.dist-info → jolt-0.9.435.dist-info}/entry_points.txt +0 -0
  196. {jolt-0.9.123.dist-info → jolt-0.9.435.dist-info}/top_level.txt +0 -0
jolt/cache.py CHANGED
@@ -1,6 +1,6 @@
1
1
  import atexit
2
2
  import contextlib
3
- from collections import OrderedDict
3
+ from collections import namedtuple, OrderedDict
4
4
  from datetime import datetime
5
5
  import fasteners
6
6
  import json
@@ -10,18 +10,20 @@ from threading import RLock
10
10
  import uuid
11
11
 
12
12
  from jolt import config
13
+ from jolt import expires
13
14
  from jolt import filesystem as fs
14
15
  from jolt import influence
15
16
  from jolt import log
16
17
  from jolt import tools
17
18
  from jolt import utils
19
+ from jolt import tasks
18
20
  from jolt.options import JoltOptions
19
21
  from jolt.error import raise_error, raise_error_if
20
22
  from jolt.error import raise_task_error, raise_task_error_if
21
23
  from jolt.expires import ArtifactEvictionStrategyRegister
22
24
 
23
25
 
24
- DEFAULT_ARCHIVE_TYPE = ".tar.gz"
26
+ DEFAULT_ARCHIVE_TYPE = ".tar.zst"
25
27
 
26
28
 
27
29
  def locked(func):
@@ -31,179 +33,122 @@ def locked(func):
31
33
  return _f
32
34
 
33
35
 
34
- class StorageProvider(object):
35
- def download(self, node, force=False):
36
- return False
37
-
38
- def download_enabled(self):
39
- return True
40
-
41
- def upload(self, node, force=False):
42
- return False
43
-
44
- def upload_enabled(self):
45
- return True
46
-
47
- def location(self, node):
48
- return '' # URL
49
-
50
-
51
- class StorageProviderFactory(StorageProvider):
52
- def create(self):
53
- pass
54
-
55
-
56
- def RegisterStorage(cls):
57
- ArtifactCache.storage_provider_factories.append(cls)
58
-
59
-
60
- class ArtifactAttributeSet(object):
61
- def __init__(self):
62
- super(ArtifactAttributeSet, self).__setattr__("_attributes", {})
63
-
64
- def _get_attributes(self):
65
- return self._attributes
66
-
67
- def __getattr__(self, name):
68
- attributes = self._get_attributes()
69
- if name not in attributes:
70
- attributes[name] = self.create(name)
71
- return attributes[name]
72
-
73
- def __setattr__(self, name, value):
74
- attributes = self._get_attributes()
75
- if name not in attributes:
76
- attributes[name] = self.create(name)
77
- attributes[name].set_value(value)
78
- return attributes[name]
79
-
80
- def __dict__(self):
81
- return {key: str(value) for key, value in self.items()}
82
-
83
- def items(self):
84
- return self._get_attributes().items()
85
-
86
- def apply(self, task, artifact):
87
- for _, value in self.items():
88
- value.apply(task, artifact)
89
-
90
- def apply_deps(self, task, deps):
91
- pass
92
-
93
- def unapply(self, task, artifact):
94
- for _, value in self.items():
95
- value.unapply(task, artifact)
96
-
97
- def unapply_deps(self, task, deps):
98
- pass
99
-
100
- def visit(self, task, artifact, visitor):
101
- for _, value in self.items():
102
- value.visit(task, artifact, visitor)
103
-
104
-
105
36
  class ArtifactAttributeSetRegistry(object):
106
- providers = []
37
+ """
38
+ Registry for providers of artifact attribute sets.
39
+ """
40
+
41
+ providers = [] # List of objects that implement ArtifactAttributeSetProvider
107
42
 
108
43
  @staticmethod
109
44
  def create_all(artifact):
45
+ """ Create all artifact attribute sets. """
110
46
  for provider in ArtifactAttributeSetRegistry.providers:
111
47
  provider().create(artifact)
112
48
 
113
49
  @staticmethod
114
50
  def parse_all(artifact, content):
51
+ """ Parse all artifact attribute sets. """
115
52
  for provider in ArtifactAttributeSetRegistry.providers:
116
53
  provider().parse(artifact, content)
117
54
 
118
55
  @staticmethod
119
56
  def format_all(artifact, content):
57
+ """ Format all artifact attribute sets. """
120
58
  for provider in ArtifactAttributeSetRegistry.providers:
121
59
  provider().format(artifact, content)
122
60
 
123
61
  @staticmethod
124
62
  def apply_all(task, artifact):
63
+ """ Apply all artifact attribute sets. """
125
64
  for provider in ArtifactAttributeSetRegistry.providers:
126
65
  provider().apply(task, artifact)
127
66
 
128
- @staticmethod
129
- def apply_all_deps(task, deps):
130
- for provider in ArtifactAttributeSetRegistry.providers:
131
- provider().apply_deps(task, deps)
132
-
133
67
  @staticmethod
134
68
  def unapply_all(task, artifact):
69
+ """ Unapply all artifact attribute sets. """
135
70
  for provider in ArtifactAttributeSetRegistry.providers:
136
71
  provider().unapply(task, artifact)
137
72
 
138
- @staticmethod
139
- def unapply_all_deps(task, deps):
140
- for provider in ArtifactAttributeSetRegistry.providers:
141
- provider().unapply_deps(task, deps)
142
-
143
73
  @staticmethod
144
74
  def visit_all(task, artifact, visitor):
75
+ """ Visit all artifact attribute sets. """
145
76
  for provider in ArtifactAttributeSetRegistry.providers:
146
77
  provider().visit(task, artifact, visitor)
147
78
 
148
79
 
149
- def visit_artifact(task, artifact, visitor):
150
- ArtifactAttributeSetRegistry.visit_all(task, artifact, visitor)
151
-
152
-
153
- class ArtifactAttributeSetProvider(object):
154
- @staticmethod
155
- def Register(cls):
156
- ArtifactAttributeSetRegistry.providers.append(cls)
157
-
158
- def create(self, artifact):
159
- raise NotImplementedError()
160
-
161
- def parse(self, artifact, content):
162
- raise NotImplementedError()
163
-
164
- def format(self, artifact, content):
165
- raise NotImplementedError()
166
-
167
- def apply(self, task, artifact):
168
- pass
169
-
170
- def apply_deps(self, task, deps):
171
- pass
172
-
173
- def unapply(self, task, artifact):
174
- pass
175
-
176
- def unapply_deps(self, task, deps):
177
- pass
80
+ class ArtifactAttribute(object):
81
+ """
82
+ An artifact attribute.
178
83
 
179
- def visit(self, task, artifact, visitor):
180
- pass
84
+ An artifact attribute is a key-value pair that can be set and retrieved
85
+ from an artifact attribute set. Attributes are used to store metadata and other
86
+ information that is associated with an artifact. They communicate information
87
+ between tasks and store information that is used by tasks when they consume an artifact.
181
88
 
89
+ Artifact attributes can also perform actions when the artifact is consumed.
182
90
 
183
- class ArtifactAttribute(object):
91
+ """
184
92
  def __init__(self, name):
185
93
  self._name = name
186
94
 
187
95
  def get_name(self):
96
+ """ Get the name of the attribute. """
188
97
  return self._name
189
98
 
190
99
  def set_value(self, value, expand=True):
100
+ """
101
+ Set the value of the attribute.
102
+
103
+ Must be implemented by subclasses.
104
+
105
+ Args:
106
+ value: The value to set.
107
+ expand: If True, the value is macro expanded using the tools.expand() method.
108
+ """
191
109
  raise NotImplementedError()
192
110
 
193
111
  def get_value(self):
112
+ """
113
+ Get the value of the attribute.
114
+
115
+ Must be implemented by subclasses.
116
+ """
194
117
  raise NotImplementedError()
195
118
 
196
119
  def apply(self, task, artifact):
120
+ """
121
+ Perform an action when the artifact is being used.
122
+
123
+ Args:
124
+ task (Task): The task that is using the artifact.
125
+ artifact (Artifact): The artifact that is being used.
126
+
127
+ """
197
128
  pass
198
129
 
199
130
  def unapply(self, task, artifact):
131
+ """
132
+ Undo an action when the artifact is no longer being used.
133
+
134
+ Args:
135
+ task (Task): The task that is no longer using the artifact.
136
+ artifact (Artifact): The artifact that is no longer being used.
137
+ """
200
138
  pass
201
139
 
202
- def __str__(self):
140
+ def __str__(self) -> str:
141
+ """
142
+ Get a string representation of the attribute.
143
+
144
+ Must be implemented by subclasses.
145
+ """
203
146
  raise NotImplementedError()
204
147
 
205
148
 
206
149
  class ArtifactStringAttribute(ArtifactAttribute):
150
+ """ An artifact attribute that stores a string value. """
151
+
207
152
  def __init__(self, artifact, name):
208
153
  self._artifact = artifact
209
154
  self._name = name
@@ -213,27 +158,32 @@ class ArtifactStringAttribute(ArtifactAttribute):
213
158
  return self._name
214
159
 
215
160
  def set_value(self, value, expand=True):
216
- self._value = self._artifact.get_task().expand(str(value)) if expand else str(value)
161
+ self._value = self._artifact.tools.expand(str(value)) if expand else str(value)
217
162
 
218
163
  def get_value(self):
219
164
  return self._value
220
165
 
221
- def apply(self, task, artifact):
222
- pass
223
-
224
- def unapply(self, task, artifact):
225
- pass
226
-
227
- def __str__(self):
166
+ def __str__(self) -> str:
228
167
  return str(self._value)
229
168
 
230
169
 
231
170
  class ArtifactListAttribute(ArtifactAttribute):
171
+ """ An artifact attribute that stores a list of values. """
172
+
232
173
  def __init__(self, artifact, name):
233
174
  self._artifact = artifact
234
175
  self._name = name
235
176
  self._value = []
236
177
 
178
+ def __getitem__(self, key):
179
+ return self._value[key]
180
+
181
+ def __getslice__(self, i, j):
182
+ return self._value[i:j]
183
+
184
+ def __len__(self):
185
+ return len(self._value)
186
+
237
187
  def get_name(self):
238
188
  return self._name
239
189
 
@@ -241,22 +191,22 @@ class ArtifactListAttribute(ArtifactAttribute):
241
191
  if type(value) is str:
242
192
  value = value.split(":")
243
193
  raise_error_if(type(value) is not list, "Illegal value assigned to artifact list attribute")
244
- self._value = self._artifact.get_task().expand(value) if expand else value
194
+ self._value = self._artifact.tools.expand(value) if expand else value
245
195
 
246
196
  def get_value(self):
247
197
  return self._value
248
198
 
249
199
  def append(self, value):
250
200
  if type(value) is list:
251
- self._value.extend(self._artifact.get_task().expand(value))
201
+ self._value.extend(self._artifact.tools.expand(value))
252
202
  else:
253
- self._value.append(self._artifact.get_task().expand(value))
203
+ self._value.append(self._artifact.tools.expand(value))
254
204
 
255
205
  def extend(self, value):
256
206
  raise_error_if(
257
207
  type(value) is not list,
258
208
  "Illegal type passed to {}.extend() - list expected".format(self._name))
259
- self._value.extend(self._artifact.get_task().expand(value))
209
+ self._value.extend(self._artifact.tools.expand(value))
260
210
 
261
211
  def items(self):
262
212
  return list(self._value)
@@ -264,23 +214,16 @@ class ArtifactListAttribute(ArtifactAttribute):
264
214
  def count(self):
265
215
  return len(self.items())
266
216
 
267
- def apply(self, task, artifact):
268
- pass
269
-
270
- def unapply(self, task, artifact):
271
- pass
217
+ def __str__(self) -> str:
218
+ return fs.pathsep.join(str(v) for v in self._value)
272
219
 
273
220
 
274
221
  class ArtifactFileAttribute(object):
222
+ """ An attribute that stores a list of source and destination path tuples for files collected into the artifact. """
223
+
275
224
  def __init__(self):
276
225
  self._files = []
277
226
 
278
- def apply(self, task, artifact):
279
- pass
280
-
281
- def unapply(self, task, artifact):
282
- pass
283
-
284
227
  def append(self, src, dst):
285
228
  self._files.append((fs.as_posix(src), fs.as_posix(dst)))
286
229
 
@@ -291,8 +234,145 @@ class ArtifactFileAttribute(object):
291
234
  return self._files
292
235
 
293
236
 
237
+ class ArtifactAttributeSet(object):
238
+ """
239
+ A set of artifact attributes.
240
+
241
+ An attribute set is a collection of attributes. Each attribute is
242
+ accessed using the attribute name as an attribute of the set. For
243
+ example, to access an attribute named 'version' in an attribute set
244
+ named 'strings', you would write:
245
+
246
+ .. code-block:: python
247
+
248
+ artifact.strings.version = "1.0"
249
+
250
+ """
251
+
252
+ def __init__(self):
253
+ super(ArtifactAttributeSet, self).__setattr__("_attributes", {})
254
+
255
+ def _get_attributes(self):
256
+ return self._attributes
257
+
258
+ def __getattr__(self, name) -> ArtifactAttribute:
259
+ """
260
+ Get or create an attribute by name.
261
+
262
+ Args:
263
+ name (str): The name of the attribute.
264
+
265
+ Returns:
266
+ An attribute object.
267
+ """
268
+ attributes = self._get_attributes()
269
+ if name not in attributes:
270
+ attributes[name] = self.create(name)
271
+ return attributes[name]
272
+
273
+ def __setattr__(self, name, value):
274
+ """
275
+ Set an attribute by name.
276
+
277
+ Args:
278
+ name (str): The name of the attribute.
279
+ value: The value to set.
280
+ """
281
+ attributes = self._get_attributes()
282
+ if name not in attributes:
283
+ attributes[name] = self.create(name)
284
+ attributes[name].set_value(value)
285
+ return attributes[name]
286
+
287
+ def __dict__(self):
288
+ """ Get a dictionary representation of the attribute set. """
289
+ return {key: str(value) for key, value in self.items()}
290
+
291
+ def __iter__(self):
292
+ """ Iterate over the attribute set. """
293
+ return iter(self.items())
294
+
295
+ def get(self, name, default=None):
296
+ """ Get an attribute by name.
297
+
298
+ Args:
299
+ name (str): The name of the attribute.
300
+
301
+ Returns:
302
+ The attribute object, or None if it does not exist.
303
+ """
304
+ attributes = self._get_attributes()
305
+ return attributes.get(name, default)
306
+
307
+ def items(self):
308
+ """ Get a list of tuples containing the attribute name and value. """
309
+ return self._get_attributes().items()
310
+
311
+ def apply(self, task, artifact):
312
+ """ Perform attribute actions when the artifact is being used. """
313
+ for _, value in self.items():
314
+ value.apply(task, artifact)
315
+
316
+ def unapply(self, task, artifact):
317
+ """ Undo attribute actions when the artifact is no longer being used. """
318
+ for _, value in self.items():
319
+ value.unapply(task, artifact)
320
+
321
+ def visit(self, task, artifact, visitor):
322
+ """ Visit all attributes in the set. """
323
+ for _, value in self.items():
324
+ value.visit(task, artifact, visitor)
325
+
326
+
327
+ class ArtifactAttributeSetProvider(object):
328
+ """ Base class for artifact attribute set providers.
329
+
330
+ An artifact attribute set provider is a factory for creating and managing
331
+ attribute sets in an artifact.
332
+ """
333
+
334
+ @staticmethod
335
+ def Register(cls):
336
+ """ Decorator for registering a provider class. """
337
+ ArtifactAttributeSetRegistry.providers.append(cls)
338
+
339
+ def create(self, artifact):
340
+ """ Create an attribute set for an artifact. """
341
+ raise NotImplementedError()
342
+
343
+ def parse(self, artifact, content):
344
+ """
345
+ Parse an attribute set from a dictionary.
346
+
347
+ The dictionary is loaded from a JSON file embedded in the artifact.
348
+ """
349
+ raise NotImplementedError()
350
+
351
+ def format(self, artifact, content):
352
+ """
353
+ Format an attribute set to a dictionary.
354
+
355
+ The dictionary is saved to a JSON file embedded in the artifact.
356
+ """
357
+ raise NotImplementedError()
358
+
359
+ def apply(self, task, artifact):
360
+ """ Perform actions when the artifact is being used. """
361
+ pass
362
+
363
+ def unapply(self, task, artifact):
364
+ """ Undo actions when the artifact is no longer being used. """
365
+ pass
366
+
367
+ def visit(self, task, artifact, visitor):
368
+ """ Visit all attributes in the set. """
369
+ pass
370
+
371
+
294
372
  @ArtifactAttributeSetProvider.Register
295
373
  class ArtifactFileAttributeProvider(ArtifactAttributeSetProvider):
374
+ """ Provider for the artifact 'files' attribute set. """
375
+
296
376
  def create(self, artifact):
297
377
  setattr(artifact, "files", ArtifactFileAttribute())
298
378
 
@@ -306,22 +386,19 @@ class ArtifactFileAttributeProvider(ArtifactAttributeSetProvider):
306
386
  def format(self, artifact, content):
307
387
  content["files"] = [{"src": src, "dst": dst} for src, dst in artifact.files.items()]
308
388
 
309
- def apply(self, task, artifact):
310
- pass
311
-
312
- def unapply(self, task, artifact):
313
- pass
314
389
 
315
- def visit(self, task, artifact, visitor):
316
- pass
390
+ def visit_artifact(task, artifact, visitor):
391
+ ArtifactAttributeSetRegistry.visit_all(task, artifact, visitor)
317
392
 
318
393
 
319
394
  def json_serializer(obj):
395
+ """ JSON serializer for datetime objects. """
320
396
  if isinstance(obj, datetime):
321
397
  return dict(type="datetime", value=obj.strftime("%Y-%m-%d %H:%M:%S.%f"))
322
398
 
323
399
 
324
400
  def json_deserializer(dct):
401
+ """ JSON deserializer for datetime objects. """
325
402
  if dct.get("type") == "datetime":
326
403
  return datetime.strptime(dct["value"], "%Y-%m-%d %H:%M:%S.%f")
327
404
  return dct
@@ -453,26 +530,40 @@ class Artifact(object):
453
530
  artifact.strings.version = "1.2"
454
531
  """
455
532
 
456
- def __init__(self, cache, node, tools=None):
533
+ def __init__(self, cache, node, name=None, identity=None, tools=None, session=False):
457
534
  self._cache = cache
535
+ if identity:
536
+ self._identity = identity
537
+ else:
538
+ self._identity = node.identity if not session else node.instance
539
+ if name:
540
+ self._identity = name + "@" + self._identity
541
+ self._main = name == "main"
542
+ self._name = name or "main"
543
+ self._full_name = f"{self._name}@{node.short_qualified_name}" if node else self._name
544
+ self._log_name = f"{self._full_name} {node.identity[:8]}" if node else self._full_name
458
545
  self._node = node
546
+ self._session = session
547
+ self._task = node.task if node else None
459
548
  self._tools = tools or self._node.tools
460
- self._path = cache._fs_get_artifact_path(node.identity, node.canonical_name)
461
- self._temp = cache._fs_get_artifact_tmppath(node.identity, node.canonical_name)
462
- self._archive = cache._fs_get_artifact_archivepath(node.identity, node.canonical_name)
463
- self._lock_path = cache._fs_get_artifact_lockpath(node.identity)
464
- self._unpacked = False
465
- self._uploadable = True
466
- self._created = datetime.now()
467
- self._modified = datetime.now()
468
- self._expires = node.task.expires
469
- self._size = 0
470
- self._influence = None
549
+ self._path = cache._fs_get_artifact_path(self._identity, node.canonical_name if node else name)
550
+ self._temp = cache._fs_get_artifact_tmppath(self._identity, node.canonical_name if node else name)
551
+ self._archive = cache._fs_get_artifact_archivepath(self._identity, node.canonical_name if node else name)
552
+ self._lock_path = cache._fs_get_artifact_lockpath(self._identity)
471
553
  ArtifactAttributeSetRegistry.create_all(self)
472
- self._valid = False
473
- self._temporary = False
474
- self._read_manifest()
475
- self._temporary = not self._valid
554
+ self.reload()
555
+
556
+ def _info(self, fmt, *args, **kwargs):
557
+ log.info(fmt + f" ({self._log_name})", *args, **kwargs)
558
+
559
+ def _debug(self, fmt, *args, **kwargs):
560
+ log.debug(fmt + f" ({self._log_name})", *args, **kwargs)
561
+
562
+ def _warning(self, fmt, *args, **kwargs):
563
+ log.warning(fmt + f" ({self._log_name})", *args, **kwargs)
564
+
565
+ def _error(self, fmt, *args, **kwargs):
566
+ log.error(fmt + f" ({self._log_name})", *args, **kwargs)
476
567
 
477
568
  def __enter__(self):
478
569
  return self
@@ -483,19 +574,23 @@ class Artifact(object):
483
574
  def __getattr__(self, name):
484
575
  raise_task_error(self._node, "Attempt to access invalid artifact attribute '{0}'", name)
485
576
 
486
- def _write_manifest(self):
577
+ def _write_manifest(self, temporary=False):
487
578
  content = {}
488
- content["task"] = self._node.name
489
579
  content["size"] = self._get_size()
490
580
  content["unpacked"] = self._unpacked
491
581
  content["uploadable"] = self._uploadable
492
- content["identity"] = self._node.identity
493
- content["requires"] = self._node.task.requires
494
- content["parameters"] = self._node.task._get_parameters()
582
+ if self._node:
583
+ content["task"] = self._node.name
584
+ content["identity"] = self._node.identity
585
+ content["requires"] = self._node.task.requires
586
+ content["parameters"] = self._node.task._get_parameters()
587
+
495
588
  if self._influence is not None:
496
589
  content["influence"] = self._influence
497
- else:
590
+ elif self._node:
498
591
  content["influence"] = influence.HashInfluenceRegistry.get().get_strings(self._node.task)
592
+ else:
593
+ content["influence"] = []
499
594
  content["created"] = self._created
500
595
  content["modified"] = datetime.now()
501
596
  content["expires"] = self._expires.value
@@ -504,13 +599,19 @@ class Artifact(object):
504
599
 
505
600
  ArtifactAttributeSetRegistry.format_all(self, content)
506
601
 
507
- manifest = fs.path.join(self.path, ".manifest.json")
602
+ if temporary:
603
+ manifest = fs.path.join(self.temporary_path, ".manifest.json")
604
+ else:
605
+ manifest = fs.path.join(self.final_path, ".manifest.json")
508
606
  with open(manifest, "wb") as f:
509
607
  f.write(json.dumps(content, indent=2, default=json_serializer).encode())
510
608
 
511
- def _read_manifest(self):
609
+ def _read_manifest(self, temporary=False):
512
610
  try:
513
- manifest_path = fs.path.join(self.path, ".manifest.json")
611
+ if temporary:
612
+ manifest_path = fs.path.join(self.temporary_path, ".manifest.json")
613
+ else:
614
+ manifest_path = fs.path.join(self.final_path, ".manifest.json")
514
615
  with open(manifest_path) as manifest_file:
515
616
  content = json.load(manifest_file, object_hook=json_deserializer)
516
617
  self._valid = True
@@ -568,9 +669,44 @@ class Artifact(object):
568
669
  def unapply(self):
569
670
  pass
570
671
 
672
+ def is_main(self):
673
+ return self._main
674
+
675
+ def is_session(self):
676
+ return self._session
677
+
571
678
  def is_valid(self):
572
679
  return self._valid
573
680
 
681
+ def reload(self):
682
+ self._unpacked = False
683
+ self._uploadable = True
684
+ self._created = datetime.now()
685
+ self._modified = datetime.now()
686
+ self._expires = self._task.expires if not self._session else expires.Immediately()
687
+ self._size = 0
688
+ self._influence = None
689
+ self._valid = False
690
+ self._temporary = False
691
+ self._read_manifest()
692
+ self._temporary = not self._valid
693
+
694
+ def reset(self):
695
+ self._unpacked = False
696
+ self._uploadable = True
697
+ self._created = datetime.now()
698
+ self._modified = datetime.now()
699
+ self._expires = self._task.expires if not self._session else expires.Immediately()
700
+ self._size = 0
701
+ self._influence = None
702
+ self._valid = False
703
+ self._temporary = True
704
+
705
+ @property
706
+ def name(self):
707
+ """ str: The name of the artifact. Default: 'main'. """
708
+ return self._name
709
+
574
710
  @property
575
711
  def path(self):
576
712
  """ str: The current location of the artifact in the local cache. """
@@ -625,7 +761,7 @@ class Artifact(object):
625
761
  raise_task_error_if(
626
762
  not self.is_temporary(),
627
763
  self._node,
628
- "Can't collect files into an already published task artifact")
764
+ "Can't collect files into an already published task artifact ({})", self._log_name)
629
765
 
630
766
  files = self.tools.expand_path(files)
631
767
  files = self.tools.glob(files)
@@ -696,7 +832,7 @@ class Artifact(object):
696
832
  raise_task_error_if(
697
833
  self.is_temporary(),
698
834
  self._node,
699
- "Can't copy files from an unpublished task artifact")
835
+ "Can't copy files from an unpublished task artifact ({})", self._log_name)
700
836
 
701
837
  files = fs.path.join(self._path, files)
702
838
  files = self.tools.expand_path(files)
@@ -753,23 +889,24 @@ class Artifact(object):
753
889
  def get_size(self):
754
890
  return self._size
755
891
 
756
- def get_task(self):
757
- return self._node.task
758
-
759
- def get_name(self):
760
- return self._node.qualified_name
761
-
762
892
  def get_cache(self):
763
893
  return self._cache
764
894
 
765
- def get_identity(self):
766
- return self._node.identity
895
+ def get_task(self):
896
+ return self._node.task
897
+
898
+ def get_node(self):
899
+ return self._node
767
900
 
768
- def is_temporary(self):
901
+ def is_temporary(self) -> bool:
769
902
  return self._temporary
770
903
 
771
- def is_unpackable(self):
772
- return self._node.is_unpackable()
904
+ def is_unpackable(self) -> bool:
905
+ if not self._node:
906
+ return True
907
+ if self.name == "main":
908
+ return self._task.unpack.__func__ is not tasks.Task.unpack
909
+ return getattr(self._task, "unpack_" + self.name, tasks.Task.unpack) is not tasks.Task.unpack
773
910
 
774
911
  def is_unpacked(self):
775
912
  return self._unpacked
@@ -777,9 +914,55 @@ class Artifact(object):
777
914
  def is_uploadable(self):
778
915
  return self._uploadable
779
916
 
917
+ def is_cacheable(self):
918
+ if not self._node:
919
+ return True
920
+ if self.is_session():
921
+ return True
922
+ return self.task.is_cacheable()
923
+
780
924
  @property
781
925
  def identity(self):
782
- return self._node.identity
926
+ return self._identity
927
+
928
+ @property
929
+ def task(self):
930
+ if not self._node:
931
+ Task = namedtuple('Point', ['name'])
932
+ return Task(name=self.name)
933
+ return self._node.task
934
+
935
+
936
+ class ArtifactToolsProxy(object):
937
+ """
938
+ An artifact proxy that uses a specific tools object.
939
+
940
+ Used when artifacts are consumed by tasks. The proxy allows the
941
+ task to access the artifact's methods and attributes using the
942
+ task's own tools object. This is useful when the consumer task
943
+ wishes to copy files, read files, etc, using the current working
944
+ directory and environment of the task.
945
+ """
946
+
947
+ def __init__(self, artifact, tools):
948
+ self._artifact = artifact
949
+ self._tools = tools
950
+
951
+ def __getattr__(self, name):
952
+ if name == "tools":
953
+ return self._tools
954
+ if name == "_artifact":
955
+ return self._artifact
956
+ attr = getattr(self._artifact.__class__, name, None)
957
+ if attr is not None and callable(attr):
958
+ return attr.__get__(self, ArtifactToolsProxy)
959
+ return getattr(self._artifact, name)
960
+
961
+ def __setattr__(self, name, value):
962
+ if name == "_artifact" or name == "_tools":
963
+ super(ArtifactToolsProxy, self).__setattr__(name, value)
964
+ else:
965
+ setattr(self._artifact, name, value)
783
966
 
784
967
 
785
968
  class Context(object):
@@ -805,17 +988,29 @@ class Context(object):
805
988
  def __enter__(self):
806
989
  try:
807
990
  for dep in reversed(self._node.children):
808
- self._cache.unpack(dep)
809
- with self._cache.get_artifact(dep, self._node.tools) as artifact:
810
- self._artifacts[dep.qualified_name] = artifact
811
- self._artifacts_index[dep.qualified_name] = artifact
812
- self._artifacts_index[dep.short_qualified_name] = artifact
991
+ for artifact in dep.artifacts:
992
+ # Create clone with tools from this task
993
+ artifact = ArtifactToolsProxy(artifact, self._node.tools)
994
+
995
+ # Don't include session artifacts that don't exist,
996
+ # i.e. where no build has taken place due to presence
997
+ # of the persistent artifacts.
998
+ if not dep.is_resource():
999
+ if artifact.is_session() and not self._cache.is_available_locally(artifact):
1000
+ continue
1001
+
1002
+ self._cache.unpack(artifact)
1003
+
1004
+ if artifact.name == "main":
1005
+ self._artifacts_index[dep.qualified_name] = artifact
1006
+ self._artifacts_index[dep.short_qualified_name] = artifact
1007
+ self._artifacts[artifact.name + "@" + dep.qualified_name] = artifact
1008
+ self._artifacts_index[artifact.name + "@" + dep.qualified_name] = artifact
1009
+ self._artifacts_index[artifact.name + "@" + dep.short_qualified_name] = artifact
813
1010
  artifact.apply()
814
1011
  ArtifactAttributeSetRegistry.apply_all(self._node.task, artifact)
815
- ArtifactAttributeSetRegistry.apply_all_deps(self._node.task, self)
816
- except Exception as e:
1012
+ except (Exception, KeyboardInterrupt) as e:
817
1013
  # Rollback all attributes/resources except the last failing one
818
- ArtifactAttributeSetRegistry.unapply_all_deps(self._node.task, self)
819
1014
  for name, artifact in reversed(list(self._artifacts.items())[:-1]):
820
1015
  with utils.ignore_exception():
821
1016
  ArtifactAttributeSetRegistry.unapply_all(self._node.task, artifact)
@@ -824,7 +1019,6 @@ class Context(object):
824
1019
  return self
825
1020
 
826
1021
  def __exit__(self, type, value, tb):
827
- ArtifactAttributeSetRegistry.unapply_all_deps(self._node.task, self)
828
1022
  for name, artifact in reversed(self._artifacts.items()):
829
1023
  ArtifactAttributeSetRegistry.unapply_all(self._node.task, artifact)
830
1024
  artifact.unapply()
@@ -848,25 +1042,32 @@ class Context(object):
848
1042
 
849
1043
  """
850
1044
 
851
- key = self._node.task.expand(key)
1045
+ key = self._node.tools.expand(key)
852
1046
 
853
- if key not in self._artifacts_index:
854
- key = self._node.resolve_requirement_alias(key) or key
1047
+ alias, artifact, task, params = utils.parse_aliased_task_name(key)
1048
+ raise_task_error_if(alias, self._node, "Cannot define alias when indexing dependencies: {}", alias)
1049
+ task_name = utils.format_task_name(task, params)
1050
+ task_artifact_name = utils.format_task_name(task, params, artifact)
855
1051
 
856
- # Parameters may be unordered, sort them
857
- key = utils.stable_task_name(key)
1052
+ if task_name not in self._artifacts_index and \
1053
+ task_artifact_name not in self._artifacts_index and not params:
1054
+ key = self._node.resolve_requirement_alias(task_name)
1055
+ if key:
1056
+ _, _, task, params = utils.parse_aliased_task_name(key)
1057
+ task_name = utils.format_task_name(task, params)
1058
+ task_artifact_name = utils.format_task_name(task, params, artifact)
858
1059
 
859
1060
  # Parameters may be overspecified, resolve task
860
- if key not in self._artifacts_index:
1061
+ if task_artifact_name not in self._artifacts_index:
861
1062
  from jolt.tasks import TaskRegistry
862
- task = TaskRegistry.get().get_task(key)
863
- key = task.short_qualified_name if task is not None else key
1063
+ task_obj = TaskRegistry.get().get_task(task_name)
1064
+ task_name = task_obj.short_qualified_name if task_obj is not None else task
1065
+ task_artifact_name = task_name if not artifact else f"{artifact}@{task_name}"
864
1066
 
865
- raise_task_error_if(
866
- key not in self._artifacts_index,
867
- self._node,
868
- "No such dependency '{0}'", key)
869
- return self._artifacts_index[key]
1067
+ if task_artifact_name not in self._artifacts_index:
1068
+ raise KeyError("No such artifact dependency '{0}' ({1})".format(
1069
+ task_artifact_name, self._node.short_qualified_name))
1070
+ return self._artifacts_index[task_artifact_name]
870
1071
 
871
1072
  def items(self):
872
1073
  """ List all requirements and their artifacts.
@@ -879,11 +1080,148 @@ class Context(object):
879
1080
 
880
1081
 
881
1082
  class PidProvider(object):
882
- def __init__(self):
883
- self._uuid = uuid.uuid4()
884
-
885
1083
  def __call__(self):
886
- return str(self._uuid)
1084
+ pid = str(uuid.uuid4())
1085
+ log.debug("New cache lock file: {0}", pid)
1086
+ return pid
1087
+
1088
+
1089
+ class StorageProvider(object):
1090
+ """
1091
+ Base class for remote artifact storage providers.
1092
+
1093
+ A storage provider is responsible for uploading and downloading
1094
+ artifacts to and from a remote storage location. The storage
1095
+ location can be a file system path, a cloud storage service, or
1096
+ any other type of storage.
1097
+
1098
+ """
1099
+
1100
+ def download(self, artifact: Artifact, force: bool = False) -> bool:
1101
+ """
1102
+ Download an artifact from the storage location.
1103
+
1104
+ The should be downloaded to the path returned by the artifact's
1105
+ :func:`~jolt.Artifact.get_archive_path` method. The downloaded artifact
1106
+ must be in the format specified by DEFAULT_ARCHIVE_TYPE.
1107
+
1108
+ The download should be retried if it fails due to network issues.
1109
+ The method may raise an exception on errors.
1110
+
1111
+ Args:
1112
+ artifact (Artifact): The artifact to download.
1113
+ force (bool, optional): If True, the download should be forced,
1114
+ even if the artifact is already present locally, or if the
1115
+ download is disabled. The default is False.
1116
+
1117
+ Returns:
1118
+ bool: True if the download was successful, False otherwise.
1119
+
1120
+ """
1121
+ return False
1122
+
1123
+ def download_enabled(self) -> bool:
1124
+ """ Return True if downloading is enabled. Default is True. """
1125
+ return True
1126
+
1127
+ def upload(self, artifact: Artifact, force: bool = False) -> bool:
1128
+ """
1129
+ Upload an artifact to the storage location.
1130
+
1131
+ The artifact to be uploaded is located at the path returned by
1132
+ the artifact's :func:`~jolt.Artifact.get_archive_path` method. The
1133
+ uploaded artifact is in the format specified by DEFAULT_ARCHIVE_TYPE.
1134
+ The provider may choose to upload the artifact using a different
1135
+ format, but it must be able to download the artifact in the
1136
+ DEFAULT_ARCHIVE_TYPE format.
1137
+
1138
+ The upload should be retried if it fails due to network issues.
1139
+ The method may raise an exception on errors.
1140
+
1141
+ Args:
1142
+ artifact (Artifact): The artifact to upload.
1143
+ force (bool, optional): If True, the upload should be forced,
1144
+ even if the artifact is already present remotely, or if the
1145
+ upload is disabled. The default is False.
1146
+
1147
+ Returns:
1148
+ bool: True if the upload was successful, False otherwise.
1149
+
1150
+ """
1151
+ return False
1152
+
1153
+ def upload_enabled(self) -> bool:
1154
+ """ Return True if uploading is enabled. Default is True. """
1155
+ return True
1156
+
1157
+ def location(self, artifact) -> str:
1158
+ """
1159
+ Return the URL of the artifact in the storage location.
1160
+
1161
+ This method is sometimes used to identify if an artifact is
1162
+ present in the storage location. The URL should point to the
1163
+ artifact if present, or an empty string if the artifact is
1164
+ absent.
1165
+
1166
+ Args:
1167
+ artifact (Artifact): The artifact to locate.
1168
+ """
1169
+ return '' # URL
1170
+
1171
+ def availability(self, artifacts: list) -> tuple:
1172
+ """
1173
+ Check the availability of a list of artifacts.
1174
+
1175
+ This method is used to determine which artifacts are present in the
1176
+ storage location. The method should return a tuple of two lists:
1177
+ the first list contains the artifacts that are present, and the
1178
+ second list contains the artifacts that are missing.
1179
+
1180
+ The default implementation of this method calls the :func:`~jolt.StorageProvider.location`
1181
+ method for each artifact in the list. Subclasses may override this
1182
+ method to provide a more efficient implementation.
1183
+
1184
+ Args:
1185
+ artifacts (list): A list of artifacts to check.
1186
+
1187
+ Returns:
1188
+ tuple: A tuple of two lists: the first list contains the artifacts
1189
+ that are present, and the second list contains the artifacts
1190
+ that are missing.
1191
+
1192
+ """
1193
+ # Ensure artifacts is a list
1194
+ artifacts = utils.as_list(artifacts)
1195
+
1196
+ present = set()
1197
+ missing = set()
1198
+
1199
+ for artifact in artifacts:
1200
+ if self.location(artifact):
1201
+ present.add(artifact)
1202
+ else:
1203
+ missing.add(artifact)
1204
+
1205
+ return list(present), list(missing)
1206
+
1207
+
1208
+ class StorageProviderFactory(StorageProvider):
1209
+ """ A factory for store providers. """
1210
+
1211
+ def create(self) -> StorageProvider:
1212
+ """
1213
+ Create a new storage provider.
1214
+
1215
+ This method should return a new instance of a storage provider,
1216
+ which must be a subclass of :class:`~jolt.StorageProvider`.
1217
+
1218
+ """
1219
+ pass
1220
+
1221
+
1222
+ def RegisterStorage(cls):
1223
+ """ Decorator used to register a storage provider factory. """
1224
+ ArtifactCache.storage_provider_factories.append(cls)
887
1225
 
888
1226
 
889
1227
  @utils.Singleton
@@ -945,14 +1283,18 @@ class ArtifactCache(StorageProvider):
945
1283
 
946
1284
  def __init__(self, options=None, pidprovider=None):
947
1285
  self._options = options or JoltOptions()
948
- self._remote_identity_cache = set()
949
1286
  self._storage_providers = [
950
1287
  factory.create(self)
951
1288
  for factory in ArtifactCache.storage_provider_factories]
952
1289
 
1290
+ # If no storage providers supports the availability method,
1291
+ # we will not only use the local presence cache.
1292
+ self._remote_presence_cache = set()
1293
+ self._presence_cache_only = self.has_availability()
1294
+
953
1295
  # Read configuration
954
1296
  self._max_size = config.getsize(
955
- "jolt", "cachesize", os.environ.get("JOLT_CACHESIZE", 1 * 1024 ** 3))
1297
+ "jolt", "cachesize", os.environ.get("JOLT_CACHE_SIZE", 1 * 1024 ** 3))
956
1298
 
957
1299
  # Create cache directory
958
1300
  self._fs_create_cachedir()
@@ -961,10 +1303,12 @@ class ArtifactCache(StorageProvider):
961
1303
  self._cache_locked = False
962
1304
  self._lock_file = fasteners.InterProcessLock(self._fs_get_lock_file())
963
1305
  self._thread_lock = RLock()
1306
+ self._artifact_thread_lock = utils.IdLock()
964
1307
 
965
1308
  # Create process lock file
966
1309
  with self._cache_lock():
967
- self._pid = pidprovider() if pidprovider else PidProvider()()
1310
+ self._pid_provider = pidprovider or PidProvider()
1311
+ self._pid = self._pid_provider()
968
1312
  self._pid_file = fasteners.InterProcessLock(self._fs_get_pid_file(self._pid))
969
1313
  self._pid_file.acquire()
970
1314
 
@@ -996,6 +1340,7 @@ class ArtifactCache(StorageProvider):
996
1340
  db = sqlite3.connect(self._db_path, detect_types=sqlite3.PARSE_DECLTYPES)
997
1341
  try:
998
1342
  db.execute("PRAGMA journal_mode=OFF")
1343
+ # db.set_trace_callback(log.warning)
999
1344
  yield db
1000
1345
  finally:
1001
1346
  db.close()
@@ -1017,9 +1362,9 @@ class ArtifactCache(StorageProvider):
1017
1362
  cur.execute("CREATE TABLE IF NOT EXISTS artifact_lockrefs (identity text, pid text)")
1018
1363
  db.commit()
1019
1364
 
1020
- def _db_insert_artifact(self, db, identity, name, size):
1365
+ def _db_insert_artifact(self, db, identity, task_name, size):
1021
1366
  cur = db.cursor()
1022
- cur.execute("INSERT INTO artifacts VALUES (?,?,?,?)", (identity, name, size, datetime.now()))
1367
+ cur.execute("INSERT INTO artifacts VALUES (?,?,?,?)", (identity, task_name, size, datetime.now()))
1023
1368
  db.commit()
1024
1369
 
1025
1370
  def _db_update_artifact_size(self, db, identity, size):
@@ -1182,6 +1527,7 @@ class ArtifactCache(StorageProvider):
1182
1527
 
1183
1528
  def _fs_create_cachedir(self):
1184
1529
  self.root = config.get_cachedir()
1530
+ log.verbose("Jolt cache path: {}", self.root)
1185
1531
  try:
1186
1532
  fs.makedirs(self.root)
1187
1533
  except KeyboardInterrupt as e:
@@ -1189,73 +1535,103 @@ class ArtifactCache(StorageProvider):
1189
1535
  except Exception:
1190
1536
  raise_error("Failed to create cache directory '{0}'", self.root)
1191
1537
 
1192
- def _fs_get_artifact(self, node, tools=None):
1193
- return Artifact(self, node, tools)
1538
+ def _fs_get_artifact(self, node, name, tools=None, session=False):
1539
+ return Artifact(self, node, name=name, tools=tools, session=session)
1194
1540
 
1195
- def _fs_commit_artifact(self, artifact, uploadable):
1541
+ def _fs_commit_artifact(self, artifact: Artifact, uploadable: bool, temporary: bool):
1196
1542
  artifact._set_uploadable(uploadable)
1197
1543
  if not artifact.is_unpackable():
1198
1544
  artifact._set_unpacked()
1199
- artifact._write_manifest()
1200
- if artifact.is_temporary():
1545
+ if temporary:
1546
+ artifact._write_manifest(temporary=True)
1201
1547
  fs.rmtree(artifact.final_path, ignore_errors=True)
1202
- fs.rename(artifact.path, artifact.final_path)
1548
+ fs.rename(artifact.temporary_path, artifact.final_path)
1549
+ else:
1550
+ artifact._write_manifest(temporary=False)
1203
1551
 
1204
1552
  @contextlib.contextmanager
1205
1553
  def _fs_compress_artifact(self, artifact):
1206
- task = artifact.get_task()
1554
+ task = artifact.task
1207
1555
  archive = artifact.get_archive_path()
1208
1556
 
1209
1557
  raise_task_error_if(
1210
1558
  artifact.is_temporary(), task,
1211
- "Can't compress an unpublished task artifact")
1559
+ "Can't compress an unpublished task artifact ({})", artifact._log_name)
1212
1560
 
1213
1561
  try:
1214
- task.tools.archive(artifact.path, archive)
1562
+ artifact.tools.archive(artifact.path, archive)
1215
1563
  except KeyboardInterrupt as e:
1216
1564
  raise e
1217
1565
  except Exception:
1218
- raise_task_error(task, "Failed to compress task artifact")
1566
+ raise_task_error(task, "Failed to compress task artifact ({})", artifact._log_name)
1219
1567
  try:
1220
1568
  yield
1221
1569
  finally:
1222
1570
  fs.unlink(archive, ignore_errors=True)
1223
1571
 
1224
1572
  def _fs_decompress_artifact(self, artifact):
1225
- task = artifact.get_task()
1573
+ task = artifact.task
1226
1574
  archive = artifact.get_archive_path()
1227
1575
  try:
1228
1576
  task.tools.extract(archive, artifact.temporary_path, ignore_owner=True)
1577
+ artifact._read_manifest(temporary=True)
1229
1578
  except KeyboardInterrupt as e:
1579
+ fs.rmtree(artifact.temporary_path, ignore_errors=True)
1230
1580
  raise e
1231
1581
  except Exception:
1232
- raise_task_error(task, "Failed to extract task artifact archive")
1582
+ fs.rmtree(artifact.temporary_path, ignore_errors=True)
1583
+ raise_task_error(task, "Failed to extract task artifact archive ({})", artifact._log_name)
1233
1584
  finally:
1234
1585
  fs.unlink(archive, ignore_errors=True)
1235
- artifact._read_manifest()
1236
1586
 
1237
- def _fs_delete_artifact(self, identity, name, onerror=None):
1238
- fs.rmtree(self._fs_get_artifact_path(identity, name), ignore_errors=True, onerror=onerror)
1239
- fs.rmtree(self._fs_get_artifact_tmppath(identity, name), ignore_errors=True, onerror=onerror)
1240
- fs.unlink(fs.path.join(self.root, name), ignore_errors=True)
1241
-
1242
- def _fs_get_artifact_archivepath(self, identity, name):
1243
- return fs.get_archive(fs.path.join(self.root, name, identity))
1587
+ def _fs_delete_artifact(self, identity, task_name, onerror=None):
1588
+ fs.rmtree(self._fs_get_artifact_path(identity, task_name), ignore_errors=True, onerror=onerror)
1589
+ fs.rmtree(self._fs_get_artifact_tmppath(identity, task_name), ignore_errors=True, onerror=onerror)
1590
+ fs.rmtree(self._fs_get_artifact_path_legacy(identity, task_name), ignore_errors=True, onerror=onerror)
1591
+ fs.rmtree(self._fs_get_artifact_tmppath_legacy(identity, task_name), ignore_errors=True, onerror=onerror)
1592
+ fs.unlink(fs.path.join(self.root, task_name), ignore_errors=True)
1593
+
1594
+ def _fs_identity(self, identity):
1595
+ parts = identity.split("@", 1)
1596
+ if len(parts) <= 1:
1597
+ parts = ["main"] + parts
1598
+ return parts[1] + "-" + utils.canonical(parts[0])
1599
+
1600
+ def _fs_identity_legacy(self, identity):
1601
+ parts = identity.split("@", 1)
1602
+ if len(parts) <= 1:
1603
+ parts = ["main"] + parts
1604
+ return parts[0] + "@" + utils.canonical(parts[1])
1605
+
1606
+ def _fs_get_artifact_archivepath(self, identity, task_name):
1607
+ identity = self._fs_identity(identity)
1608
+ return fs.path.join(self.root, task_name, identity) + DEFAULT_ARCHIVE_TYPE
1244
1609
 
1245
1610
  def _fs_get_artifact_lockpath(self, identity):
1611
+ identity = self._fs_identity(identity)
1246
1612
  return fs.path.join(self.root, "locks", identity + ".lock")
1247
1613
 
1248
- def _fs_get_artifact_tmppath(self, identity, name):
1249
- return fs.path.join(self.root, name, "." + identity)
1614
+ def _fs_get_artifact_tmppath(self, identity, task_name):
1615
+ identity = self._fs_identity(identity)
1616
+ return fs.path.join(self.root, task_name, "." + identity)
1617
+
1618
+ def _fs_get_artifact_path(self, identity, task_name):
1619
+ identity = self._fs_identity(identity)
1620
+ return fs.path.join(self.root, task_name, identity)
1250
1621
 
1251
- def _fs_get_artifact_path(self, identity, name):
1252
- return fs.path.join(self.root, name, identity)
1622
+ def _fs_get_artifact_tmppath_legacy(self, identity, task_name):
1623
+ identity = self._fs_identity_legacy(identity)
1624
+ return fs.path.join(self.root, task_name, "." + identity)
1253
1625
 
1254
- def _fs_get_artifact_manifest_path(self, identity, name):
1255
- return fs.path.join(self._fs_get_artifact_path(identity, name), ".manifest.json")
1626
+ def _fs_get_artifact_path_legacy(self, identity, task_name):
1627
+ identity = self._fs_identity_legacy(identity)
1628
+ return fs.path.join(self.root, task_name, identity)
1256
1629
 
1257
- def _fs_get_artifact_manifest(self, identity, name):
1258
- path = self._fs_get_artifact_manifest_path(identity, name)
1630
+ def _fs_get_artifact_manifest_path(self, identity, task_name):
1631
+ return fs.path.join(self._fs_get_artifact_path(identity, task_name), ".manifest.json")
1632
+
1633
+ def _fs_get_artifact_manifest(self, identity, task_name):
1634
+ path = self._fs_get_artifact_manifest_path(identity, task_name)
1259
1635
  with open(path) as manifest_file:
1260
1636
  return json.load(manifest_file, object_hook=json_deserializer)
1261
1637
 
@@ -1268,9 +1644,9 @@ class ArtifactCache(StorageProvider):
1268
1644
  def _fs_get_pid_file(self, pid):
1269
1645
  return fs.path.join(self.root, "pids", pid)
1270
1646
 
1271
- def _fs_is_artifact_expired(self, identity, name, last_used):
1647
+ def _fs_is_artifact_expired(self, identity, task_name, last_used):
1272
1648
  try:
1273
- manifest = self._fs_get_artifact_manifest(identity, name)
1649
+ manifest = self._fs_get_artifact_manifest(identity, task_name)
1274
1650
  manifest["used"] = last_used
1275
1651
  strategy = ArtifactEvictionStrategyRegister.get().find(
1276
1652
  manifest.get("expires", "immediately"))
@@ -1320,19 +1696,39 @@ class ArtifactCache(StorageProvider):
1320
1696
  """ Discard list of artifacts. Cache lock must be held. """
1321
1697
  self._assert_cache_locked()
1322
1698
  evicted = 0
1323
- for identity, name, _, used in artifacts:
1324
- if not if_expired or self._fs_is_artifact_expired(identity, name, used):
1325
- self._db_delete_artifact(db, identity)
1326
- self._fs_delete_artifact(identity, name, onerror=onerror)
1327
- evicted += 1
1328
- log.debug("Evicted {}: {}", identity, name)
1699
+ for identity, task_name, _, used in artifacts:
1700
+ if not if_expired or self._fs_is_artifact_expired(identity, task_name, used):
1701
+ with utils.delayed_interrupt():
1702
+ self._db_delete_artifact(db, identity)
1703
+ self._fs_delete_artifact(identity, task_name, onerror=onerror)
1704
+ evicted += 1
1705
+ log.debug("Evicted {}: {}", identity, task_name)
1329
1706
  return evicted == len(artifacts)
1330
1707
 
1331
1708
  ############################################################################
1332
1709
  # Public API
1333
1710
  ############################################################################
1334
1711
 
1335
- def is_available_locally(self, node):
1712
+ def release(self):
1713
+ """
1714
+ Release references to artifacts held by the current process.
1715
+
1716
+ Effectively, a new pid lock file is created and the old one is deleted. This
1717
+ allows other processes to detect termination of the current process and
1718
+ garbage collect any references owned by the process.
1719
+ """
1720
+ with self._cache_lock(), self._db() as db:
1721
+ self._db_invalidate_locks(db, try_all=True)
1722
+ self._db_invalidate_references(db, try_all=True)
1723
+ self._fs_invalidate_pids(db, try_all=True)
1724
+ self._pid_file.release()
1725
+
1726
+ self._pid = self._pid_provider()
1727
+ self._pid_file = fasteners.InterProcessLock(self._fs_get_pid_file(self._pid))
1728
+ self._pid_file.acquire()
1729
+
1730
+ @utils.delay_interrupt
1731
+ def is_available_locally(self, artifact):
1336
1732
  """
1337
1733
  Check presence of task artifact in cache.
1338
1734
 
@@ -1340,84 +1736,115 @@ class ArtifactCache(StorageProvider):
1340
1736
  recorded for the running process to prevent eviction by other
1341
1737
  processes.
1342
1738
  """
1343
- if not node.task.is_cacheable():
1739
+ if not artifact.is_cacheable():
1344
1740
  return False
1345
1741
 
1346
- # Cache availability in node
1347
- try:
1348
- return node.__available
1349
- except AttributeError:
1350
- pass
1351
-
1352
1742
  with self._cache_lock(), self._db() as db:
1353
- if self._db_select_artifact(db, node.identity) or self._db_select_reference(db, node.identity):
1354
- with self._fs_get_artifact(node) as a:
1355
- if a.is_temporary():
1356
- self._db_delete_artifact(db, node.identity)
1357
- return False
1358
- self._db_insert_reference(db, node.identity)
1359
- node.__available = True
1743
+ if self._db_select_artifact(db, artifact.identity) or self._db_select_reference(db, artifact.identity):
1744
+ artifact.reload()
1745
+ if artifact.is_temporary():
1746
+ self._db_delete_artifact(db, artifact.identity, and_refs=False)
1747
+ return False
1748
+ self._db_insert_reference(db, artifact.identity)
1360
1749
  return True
1361
1750
  return False
1362
1751
 
1363
- def is_available_remotely(self, node):
1752
+ def is_available_remotely(self, artifact, cache=True):
1364
1753
  """
1365
1754
  Check presence of task artifact in external remote caches.
1366
1755
  """
1367
- if not node.task.is_cacheable():
1368
- return False
1369
- if node.identity in self._remote_identity_cache:
1370
- return True
1756
+ if cache:
1757
+ if artifact.identity in self._remote_presence_cache:
1758
+ return True
1759
+ if self._presence_cache_only:
1760
+ return False
1371
1761
  for provider in self._storage_providers:
1372
- if provider.location(node):
1373
- self._remote_identity_cache.add(node.identity)
1762
+ present, _ = provider.availability([artifact])
1763
+ if present:
1764
+ self._remote_presence_cache.add(artifact.identity)
1374
1765
  return True
1375
1766
  return False
1376
1767
 
1377
- def is_available(self, node):
1768
+ def is_available(self, artifact):
1378
1769
  """ Check presence of task artifact in any cache, local or remote """
1379
- return self.is_available_locally(node) or self.is_available_remotely(node)
1770
+ return self.is_available_locally(artifact) or self.is_available_remotely(artifact)
1771
+
1772
+ def has_availability(self):
1773
+ # Returns true if all storage providers implement the availability method
1774
+ return all([provider.availability.__func__ != StorageProvider.availability for provider in self._storage_providers])
1775
+
1776
+ def availability(self, artifacts, remote=True):
1777
+ """ Check presence of task artifacts in any cache, local or remote """
1778
+ present = set()
1779
+ missing = set()
1780
+
1781
+ # Make sure artifacts is a list
1782
+ artifacts = utils.as_list(artifacts)
1783
+
1784
+ # Check presence of all artifacts in the local cache
1785
+ for artifact in artifacts:
1786
+ if self.is_available_locally(artifact):
1787
+ present.add(artifact)
1788
+ else:
1789
+ missing.add(artifact)
1790
+
1791
+ if not remote:
1792
+ return list(present), list(missing)
1793
+
1794
+ # Check presence of all artifacts in the remote caches
1795
+ missing_remotely = artifacts
1796
+
1797
+ for provider in self._storage_providers:
1798
+ present_in_provider, missing_in_provider = provider.availability(missing_remotely)
1799
+ for artifact in present_in_provider:
1800
+ self._remote_presence_cache.add(artifact.identity)
1801
+ present.update(present_in_provider)
1802
+ missing_remotely = missing_in_provider
1803
+ if not missing_in_provider:
1804
+ break
1380
1805
 
1381
- def is_unpacked(self, node):
1382
- with self.get_artifact(node) as artifact:
1383
- return artifact.is_unpacked()
1806
+ missing.update(missing_remotely)
1807
+ missing = missing - present
1384
1808
 
1385
- def is_uploadable(self, node):
1386
- with self.get_artifact(node) as artifact:
1387
- return artifact.is_uploadable()
1809
+ return list(present), list(missing)
1388
1810
 
1389
1811
  def download_enabled(self):
1390
1812
  return self._options.download and \
1391
1813
  any([provider.download_enabled() for provider in self._storage_providers])
1392
1814
 
1815
+ def download_session_enabled(self):
1816
+ return self._options.download_session and \
1817
+ any([provider.download_enabled() for provider in self._storage_providers])
1818
+
1393
1819
  def upload_enabled(self):
1394
1820
  return self._options.upload and \
1395
1821
  any([provider.upload_enabled() for provider in self._storage_providers])
1396
1822
 
1397
- def download(self, node, force=False):
1823
+ def download(self, artifact, force=False):
1398
1824
  """
1399
1825
  Downloads an artifact from a remote cache to the local cache.
1400
1826
 
1401
1827
  The artifact is interprocess locked during the operation.
1402
1828
  """
1403
- if not force and not self.download_enabled():
1404
- return False
1405
- if not node.task.is_cacheable():
1829
+ if not force:
1830
+ if not artifact.is_session() and not self.download_enabled():
1831
+ return False
1832
+ if artifact.is_session() and not self.download_session_enabled():
1833
+ return False
1834
+ if not artifact.is_cacheable():
1406
1835
  return False
1407
- if not node.is_downloadable():
1408
- return True
1409
- with self.get_locked_artifact(node) as artifact:
1410
- if self.is_available_locally(node):
1411
- node.info("Download skipped, already in local cache")
1836
+ with self.lock_artifact(artifact, why="download") as artifact:
1837
+ if self.is_available_locally(artifact):
1838
+ artifact._info("Download skipped, already in local cache")
1412
1839
  return True
1413
1840
  for provider in self._storage_providers:
1414
- if provider.download(node, force):
1841
+ if provider.download(artifact, force):
1415
1842
  self._fs_decompress_artifact(artifact)
1416
- self.commit(artifact)
1843
+ self.commit(artifact, temporary=True)
1417
1844
  return True
1418
1845
  return len(self._storage_providers) == 0
1419
1846
 
1420
- def upload(self, node, force=False, locked=True):
1847
+ def upload(self, artifact, force=False, locked=True):
1421
1848
  """
1422
1849
  Uploads an artifact from the local cache to all configured remote caches.
1423
1850
 
@@ -1425,30 +1852,28 @@ class ArtifactCache(StorageProvider):
1425
1852
  """
1426
1853
  if not force and not self.upload_enabled():
1427
1854
  return False
1428
- if not node.task.is_cacheable():
1855
+ if not artifact.is_cacheable():
1429
1856
  return True
1430
1857
  raise_task_error_if(
1431
- not self.is_available_locally(node), node,
1432
- "Can't upload task artifact, no artifact present in the local cache")
1433
- with self.get_locked_artifact(node) if locked else self.get_artifact(node) as artifact:
1858
+ not self.is_available_locally(artifact), artifact.task,
1859
+ "Can't upload task artifact, no artifact present in the local cache ({})", artifact._log_name)
1860
+ with self.lock_artifact(artifact, why="upload") if locked else artifact as artifact:
1434
1861
  raise_task_error_if(
1435
- not artifact.is_uploadable(), node,
1436
- "Artifact was modified locally by another process and can no longer be uploaded, try again")
1862
+ not artifact.is_uploadable(), artifact.task,
1863
+ "Artifact was modified locally by another process and can no longer be uploaded, try again ({})", artifact._log_name)
1437
1864
  if self._storage_providers:
1438
1865
  with self._fs_compress_artifact(artifact):
1439
- return all([provider.upload(node, force) for provider in self._storage_providers])
1866
+ return all([provider.upload(artifact, force) for provider in self._storage_providers])
1440
1867
  return len(self._storage_providers) == 0
1441
1868
 
1442
- def location(self, node):
1443
- if not node.task.is_cacheable():
1444
- return ''
1869
+ def location(self, artifact):
1445
1870
  for provider in self._storage_providers:
1446
- url = provider.location(node)
1871
+ url = provider.location(artifact)
1447
1872
  if url:
1448
1873
  return url
1449
1874
  return ''
1450
1875
 
1451
- def unpack(self, node):
1876
+ def unpack(self, artifact):
1452
1877
  """
1453
1878
  Unpacks/relocates the task artifact to the local cache.
1454
1879
 
@@ -1460,40 +1885,59 @@ class ArtifactCache(StorageProvider):
1460
1885
 
1461
1886
  The artifact is interprocess locked during the operation.
1462
1887
  """
1463
- if not node.task.is_cacheable():
1464
- return False
1465
- if not node.is_unpackable():
1888
+ if not artifact.is_unpackable():
1466
1889
  return True
1467
- with self._thread_lock, self.get_locked_artifact(node) as artifact:
1468
- if not self.is_available_locally(node):
1469
- raise_task_error(node, "Locked artifact is missing in cache (forcibly removed?)")
1890
+ with self._thread_lock, self.lock_artifact(artifact, why="unpack") as artifact:
1891
+ raise_task_error_if(
1892
+ not self.is_available_locally(artifact),
1893
+ artifact.task,
1894
+ "Locked artifact is missing in cache (forcibly removed?) ({})", artifact._log_name)
1895
+
1896
+ raise_task_error_if(
1897
+ artifact.is_temporary(),
1898
+ artifact.task,
1899
+ "Can't unpack an unpublished task artifact ({})", artifact._log_name)
1900
+
1470
1901
  if artifact.is_unpacked():
1471
1902
  return True
1472
1903
 
1473
1904
  # Keep a temporary copy of the artifact if the task
1474
1905
  # unpack() method fails. The copy is removed in
1475
1906
  # get_locked_artifact() if left unused.
1476
- fs.copy(artifact.path, artifact.temporary_path, symlinks=True)
1907
+ fs.copy(artifact.final_path, artifact.temporary_path, symlinks=True)
1477
1908
 
1478
- task = artifact.get_task()
1909
+ task = artifact.task
1479
1910
  with tools.Tools(task) as t:
1480
1911
  try:
1481
1912
  # Note: unpack() will run on the original
1482
1913
  # artifact, not in the temporary copy.
1483
- node.verbose("Unpacking")
1914
+ if task.unpack.__func__ is not tasks.Task.unpack:
1915
+ artifact._info("Unpack started")
1484
1916
  artifact._set_unpacked()
1485
- task.unpack(artifact, t)
1486
- self.commit(artifact, uploadable=False)
1917
+ if artifact.name == "main":
1918
+ task.unpack(artifact, t)
1919
+ else:
1920
+ unpack = getattr(task, "unpack_" + artifact.name, None)
1921
+ raise_task_error_if(
1922
+ unpack is None, task,
1923
+ "Artifact unpack method not found: unpack_{}", artifact.name)
1924
+ unpack(artifact, t)
1925
+
1926
+ self.commit(artifact, uploadable=False, temporary=False)
1927
+
1487
1928
  except NotImplementedError:
1488
- self.commit(artifact)
1489
- except Exception as e:
1929
+ self.commit(artifact, temporary=False)
1930
+
1931
+ except (Exception, KeyboardInterrupt) as e:
1490
1932
  # Restore the temporary copy
1491
- fs.rmtree(artifact.path, ignore_errors=True)
1492
- fs.rename(artifact.temporary_path, artifact.path)
1933
+ fs.rmtree(artifact.final_path, ignore_errors=True)
1934
+ fs.rename(artifact.temporary_path, artifact.final_path)
1935
+ artifact._error("Unpack failed")
1493
1936
  raise e
1494
1937
  return True
1495
1938
 
1496
- def commit(self, artifact, uploadable=True):
1939
+ @utils.delay_interrupt
1940
+ def commit(self, artifact, uploadable=True, temporary=True):
1497
1941
  """
1498
1942
  Commits a task artifact to the cache.
1499
1943
 
@@ -1505,14 +1949,16 @@ class ArtifactCache(StorageProvider):
1505
1949
  take place if the resulting cache size exceeds the configured
1506
1950
  limit.
1507
1951
  """
1508
- if not artifact.get_task().is_cacheable():
1952
+ if not artifact.is_cacheable():
1509
1953
  return
1954
+
1510
1955
  with self._cache_lock(), self._db() as db:
1511
- self._fs_commit_artifact(artifact, uploadable)
1956
+ self._fs_commit_artifact(artifact, uploadable, temporary)
1512
1957
  with utils.ignore_exception(): # Possibly already exists in DB, e.g. unpacked
1513
- self._db_insert_artifact(db, artifact.get_task().identity, artifact.get_task().canonical_name, artifact.get_size())
1514
- self._db_update_artifact_size(db, artifact.get_task().identity, artifact.get_size())
1515
- self._db_insert_reference(db, artifact.get_task().identity)
1958
+ self._db_insert_artifact(db, artifact.identity, artifact.task.canonical_name, artifact.get_size())
1959
+ self._db_update_artifact_size(db, artifact.identity, artifact.get_size())
1960
+ self._db_insert_reference(db, artifact.identity)
1961
+ artifact.reload()
1516
1962
 
1517
1963
  evict_size = self._db_select_sum_artifact_size(db) - self._max_size
1518
1964
  if evict_size < 0:
@@ -1524,21 +1970,19 @@ class ArtifactCache(StorageProvider):
1524
1970
  if self._discard(db, [candidate], True):
1525
1971
  evict_size -= candidate[2]
1526
1972
 
1527
- def discard(self, node, if_expired=False, onerror=None):
1973
+ @utils.delay_interrupt
1974
+ def discard(self, artifact, if_expired=False, onerror=None):
1528
1975
  with self._cache_lock(), self._db() as db:
1529
1976
  self._db_invalidate_locks(db)
1530
1977
  self._db_invalidate_references(db)
1531
1978
  self._fs_invalidate_pids(db)
1532
- discarded = self._discard(
1979
+ return self._discard(
1533
1980
  db,
1534
- self._db_select_artifact_not_in_use(db, node.identity),
1981
+ self._db_select_artifact_not_in_use(db, artifact.identity),
1535
1982
  if_expired,
1536
1983
  onerror=onerror)
1537
- if discarded and hasattr(node, "_ArtifactCache__available"):
1538
- del node.__available
1539
- return discarded
1540
1984
 
1541
- def _discard_wait(self, node):
1985
+ def _discard_wait(self, artifact):
1542
1986
  """
1543
1987
  Discards an artifact without expiration consideration.
1544
1988
 
@@ -1552,13 +1996,14 @@ class ArtifactCache(StorageProvider):
1552
1996
  self._db_invalidate_locks(db)
1553
1997
  self._db_invalidate_references(db)
1554
1998
  self._fs_invalidate_pids(db)
1555
- artifacts = self._db_select_artifact(db, node.identity)
1556
- self._db_delete_artifact(db, node.identity, and_refs=False)
1557
- refpids = self._db_select_artifact_reference_pids(db, node.identity)
1558
- lockpids = self._db_select_artifact_lock_pids(db, node.identity)
1559
-
1560
- if len(refpids) > 1:
1561
- node.info("Artifact is temporarily in use, forced discard on hold")
1999
+ artifacts = self._db_select_artifact(db, artifact.identity)
2000
+ self._db_delete_artifact(db, artifact.identity, and_refs=False)
2001
+ refpids = self._db_select_artifact_reference_pids(db, artifact.identity)
2002
+ refpids = list(filter(lambda pid: pid != self._pid, refpids))
2003
+ lockpids = self._db_select_artifact_lock_pids(db, artifact.identity)
2004
+
2005
+ if len(refpids) > 0:
2006
+ artifact._info("Artifact is temporarily in use, forced discard on hold")
1562
2007
  for pid in refpids:
1563
2008
  # Loop waiting for other processes to surrender the artifact
1564
2009
  while True:
@@ -1574,13 +2019,12 @@ class ArtifactCache(StorageProvider):
1574
2019
  break
1575
2020
  except RuntimeError:
1576
2021
  with self._cache_lock(), self._db() as db:
1577
- lockpids = self._db_select_artifact_lock_pids(db, node.identity)
2022
+ lockpids = self._db_select_artifact_lock_pids(db, artifact.identity)
1578
2023
 
1579
2024
  with self._cache_lock(), self._db() as db:
1580
2025
  assert self._discard(db, artifacts, False), "Failed to discard artifact"
1581
- if hasattr(node, "_ArtifactCache__available"):
1582
- del node.__available
1583
- return self._fs_get_artifact(node)
2026
+ artifact.reset()
2027
+ return artifact
1584
2028
 
1585
2029
  def discard_all(self, if_expired=False, onerror=None):
1586
2030
  with self._cache_lock(), self._db() as db:
@@ -1596,11 +2040,30 @@ class ArtifactCache(StorageProvider):
1596
2040
  def get_context(self, node):
1597
2041
  return Context(self, node)
1598
2042
 
1599
- def get_artifact(self, node, tools=None):
1600
- return self._fs_get_artifact(node, tools)
2043
+ def get_artifact(self, node, name, tools=None, session=False):
2044
+ artifact = self._fs_get_artifact(node, name=name, tools=tools, session=session)
2045
+ if not artifact.is_temporary():
2046
+ with self._cache_lock(), self._db() as db:
2047
+ if not self._db_select_artifact(db, artifact.identity) and not self._db_select_reference(db, artifact.identity):
2048
+ log.verbose("Artifact not present in db, discarding archive ({} )", artifact.task.short_qualified_name, artifact.identity)
2049
+ fs.rmtree(artifact.final_path, ignore_errors=True)
2050
+ artifact.reload()
2051
+ return artifact
2052
+
2053
+ @contextlib.contextmanager
2054
+ def lock_artifact(self, artifact: Artifact, discard: bool = False, why: str = "publish"):
2055
+ """
2056
+ Locks the task artifact, both with process thread locks and interprocess file locks.
2057
+ """
2058
+ try:
2059
+ self._artifact_thread_lock.acquire(artifact.identity)
2060
+ with self._lock_artifact_interprocess(artifact, discard=discard, why=why) as artifact:
2061
+ yield artifact
2062
+ finally:
2063
+ self._artifact_thread_lock.release(artifact.identity)
1601
2064
 
1602
2065
  @contextlib.contextmanager
1603
- def get_locked_artifact(self, node, discard=False):
2066
+ def _lock_artifact_interprocess(self, artifact: Artifact, discard: bool = False, why: str = "publish"):
1604
2067
  """
1605
2068
  Locks the task artifact.
1606
2069
 
@@ -1610,35 +2073,42 @@ class ArtifactCache(StorageProvider):
1610
2073
  """
1611
2074
  with self._cache_lock():
1612
2075
  with self._db() as db:
1613
- self._db_insert_lock(db, node.identity)
1614
- self._db_insert_reference(db, node.identity)
1615
- lock_path = self._fs_get_artifact_lockpath(node.identity)
2076
+ self._db_insert_lock(db, artifact.identity)
2077
+ self._db_insert_reference(db, artifact.identity)
2078
+ lock_path = self._fs_get_artifact_lockpath(artifact.identity)
1616
2079
  lock = fasteners.InterProcessLock(lock_path)
1617
2080
  is_locked = lock.acquire(blocking=False)
1618
2081
  if not is_locked:
1619
- node.info("Artifact is temporarily locked by another process")
2082
+ artifact._info("Artifact is temporarily locked by another process")
1620
2083
  lock.acquire()
1621
2084
 
2085
+ artifact._debug("Artifact locked for {}", why)
2086
+
1622
2087
  try:
1623
- artifact = self.get_artifact(node)
1624
2088
  if discard:
1625
- artifact = self._discard_wait(node)
2089
+ artifact = self._discard_wait(artifact)
2090
+ else:
2091
+ artifact.reload()
2092
+
1626
2093
  if artifact.is_temporary():
1627
2094
  fs.rmtree(artifact.temporary_path, ignore_errors=True)
1628
2095
  fs.makedirs(artifact.temporary_path)
1629
2096
 
1630
- with contextlib.ExitStack() as stack:
1631
- stack.enter_context(artifact)
1632
- yield artifact
2097
+ yield artifact
1633
2098
  finally:
2099
+ artifact._debug("Artifact unlocked for {}", why)
1634
2100
  fs.rmtree(artifact.temporary_path, ignore_errors=True)
1635
2101
  with self._cache_lock():
1636
2102
  with self._db() as db:
1637
- self._db_delete_lock(db, node.identity)
2103
+ self._db_delete_lock(db, artifact.identity)
1638
2104
  lock.release()
1639
2105
  with self._db() as db:
1640
- if self._db_select_lock_count(db, node.identity) == 0:
2106
+ if self._db_select_lock_count(db, artifact.identity) == 0:
1641
2107
  fs.unlink(lock_path, ignore_errors=True)
1642
2108
 
1643
- def get_path(self, node):
1644
- return self._fs_get_artifact_path(node.identity, node.canonical_name)
2109
+ def precheck(self, artifacts, remote=True):
2110
+ """ Precheck artifacts for availability and cache status. """
2111
+ if not self.has_availability():
2112
+ return
2113
+ present, missing = self.availability(artifacts, remote=remote)
2114
+ log.verbose("Cache: {}/{} artifacts present", len(present), len(artifacts))