jolt 0.9.172__py3-none-any.whl → 0.9.435__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (185) hide show
  1. jolt/__init__.py +80 -7
  2. jolt/__main__.py +9 -1
  3. jolt/bin/fstree-darwin-x86_64 +0 -0
  4. jolt/bin/fstree-linux-x86_64 +0 -0
  5. jolt/cache.py +596 -252
  6. jolt/chroot.py +36 -11
  7. jolt/cli.py +143 -130
  8. jolt/common_pb2.py +45 -45
  9. jolt/config.py +76 -40
  10. jolt/error.py +19 -4
  11. jolt/filesystem.py +2 -6
  12. jolt/graph.py +400 -82
  13. jolt/influence.py +110 -3
  14. jolt/loader.py +338 -174
  15. jolt/log.py +127 -31
  16. jolt/manifest.py +13 -46
  17. jolt/options.py +35 -11
  18. jolt/pkgs/abseil.py +42 -0
  19. jolt/pkgs/asio.py +25 -0
  20. jolt/pkgs/autoconf.py +41 -0
  21. jolt/pkgs/automake.py +41 -0
  22. jolt/pkgs/b2.py +31 -0
  23. jolt/pkgs/boost.py +111 -0
  24. jolt/pkgs/boringssl.py +32 -0
  25. jolt/pkgs/busybox.py +39 -0
  26. jolt/pkgs/bzip2.py +43 -0
  27. jolt/pkgs/cares.py +29 -0
  28. jolt/pkgs/catch2.py +36 -0
  29. jolt/pkgs/cbindgen.py +17 -0
  30. jolt/pkgs/cista.py +19 -0
  31. jolt/pkgs/clang.py +44 -0
  32. jolt/pkgs/cli11.py +24 -0
  33. jolt/pkgs/cmake.py +48 -0
  34. jolt/pkgs/cpython.py +196 -0
  35. jolt/pkgs/crun.py +29 -0
  36. jolt/pkgs/curl.py +38 -0
  37. jolt/pkgs/dbus.py +18 -0
  38. jolt/pkgs/double_conversion.py +24 -0
  39. jolt/pkgs/fastfloat.py +21 -0
  40. jolt/pkgs/ffmpeg.py +28 -0
  41. jolt/pkgs/flatbuffers.py +29 -0
  42. jolt/pkgs/fmt.py +27 -0
  43. jolt/pkgs/fstree.py +20 -0
  44. jolt/pkgs/gflags.py +18 -0
  45. jolt/pkgs/glib.py +18 -0
  46. jolt/pkgs/glog.py +25 -0
  47. jolt/pkgs/glslang.py +21 -0
  48. jolt/pkgs/golang.py +16 -11
  49. jolt/pkgs/googlebenchmark.py +18 -0
  50. jolt/pkgs/googletest.py +46 -0
  51. jolt/pkgs/gperf.py +15 -0
  52. jolt/pkgs/grpc.py +73 -0
  53. jolt/pkgs/hdf5.py +19 -0
  54. jolt/pkgs/help2man.py +14 -0
  55. jolt/pkgs/inja.py +28 -0
  56. jolt/pkgs/jsoncpp.py +31 -0
  57. jolt/pkgs/libarchive.py +43 -0
  58. jolt/pkgs/libcap.py +44 -0
  59. jolt/pkgs/libdrm.py +44 -0
  60. jolt/pkgs/libedit.py +42 -0
  61. jolt/pkgs/libevent.py +31 -0
  62. jolt/pkgs/libexpat.py +27 -0
  63. jolt/pkgs/libfastjson.py +21 -0
  64. jolt/pkgs/libffi.py +16 -0
  65. jolt/pkgs/libglvnd.py +30 -0
  66. jolt/pkgs/libogg.py +28 -0
  67. jolt/pkgs/libpciaccess.py +18 -0
  68. jolt/pkgs/libseccomp.py +21 -0
  69. jolt/pkgs/libtirpc.py +24 -0
  70. jolt/pkgs/libtool.py +42 -0
  71. jolt/pkgs/libunwind.py +35 -0
  72. jolt/pkgs/libva.py +18 -0
  73. jolt/pkgs/libvorbis.py +33 -0
  74. jolt/pkgs/libxml2.py +35 -0
  75. jolt/pkgs/libxslt.py +17 -0
  76. jolt/pkgs/libyajl.py +16 -0
  77. jolt/pkgs/llvm.py +81 -0
  78. jolt/pkgs/lua.py +54 -0
  79. jolt/pkgs/lz4.py +26 -0
  80. jolt/pkgs/m4.py +14 -0
  81. jolt/pkgs/make.py +17 -0
  82. jolt/pkgs/mesa.py +81 -0
  83. jolt/pkgs/meson.py +17 -0
  84. jolt/pkgs/mstch.py +28 -0
  85. jolt/pkgs/mysql.py +60 -0
  86. jolt/pkgs/nasm.py +49 -0
  87. jolt/pkgs/ncurses.py +30 -0
  88. jolt/pkgs/ng_log.py +25 -0
  89. jolt/pkgs/ninja.py +45 -0
  90. jolt/pkgs/nlohmann_json.py +25 -0
  91. jolt/pkgs/nodejs.py +19 -11
  92. jolt/pkgs/opencv.py +24 -0
  93. jolt/pkgs/openjdk.py +26 -0
  94. jolt/pkgs/openssl.py +103 -0
  95. jolt/pkgs/paho.py +76 -0
  96. jolt/pkgs/patchelf.py +16 -0
  97. jolt/pkgs/perl.py +42 -0
  98. jolt/pkgs/pkgconfig.py +64 -0
  99. jolt/pkgs/poco.py +39 -0
  100. jolt/pkgs/protobuf.py +77 -0
  101. jolt/pkgs/pugixml.py +27 -0
  102. jolt/pkgs/python.py +19 -0
  103. jolt/pkgs/qt.py +35 -0
  104. jolt/pkgs/rapidjson.py +26 -0
  105. jolt/pkgs/rapidyaml.py +28 -0
  106. jolt/pkgs/re2.py +30 -0
  107. jolt/pkgs/re2c.py +17 -0
  108. jolt/pkgs/readline.py +15 -0
  109. jolt/pkgs/rust.py +41 -0
  110. jolt/pkgs/sdl.py +28 -0
  111. jolt/pkgs/simdjson.py +27 -0
  112. jolt/pkgs/soci.py +46 -0
  113. jolt/pkgs/spdlog.py +29 -0
  114. jolt/pkgs/spirv_llvm.py +21 -0
  115. jolt/pkgs/spirv_tools.py +24 -0
  116. jolt/pkgs/sqlite.py +83 -0
  117. jolt/pkgs/ssl.py +12 -0
  118. jolt/pkgs/texinfo.py +15 -0
  119. jolt/pkgs/tomlplusplus.py +22 -0
  120. jolt/pkgs/wayland.py +26 -0
  121. jolt/pkgs/x11.py +58 -0
  122. jolt/pkgs/xerces_c.py +20 -0
  123. jolt/pkgs/xorg.py +360 -0
  124. jolt/pkgs/xz.py +29 -0
  125. jolt/pkgs/yamlcpp.py +30 -0
  126. jolt/pkgs/zeromq.py +47 -0
  127. jolt/pkgs/zlib.py +87 -0
  128. jolt/pkgs/zstd.py +33 -0
  129. jolt/plugins/alias.py +3 -0
  130. jolt/plugins/allure.py +2 -2
  131. jolt/plugins/autotools.py +66 -0
  132. jolt/plugins/cache.py +1 -1
  133. jolt/plugins/cmake.py +74 -6
  134. jolt/plugins/conan.py +238 -0
  135. jolt/plugins/cxxinfo.py +7 -0
  136. jolt/plugins/docker.py +76 -19
  137. jolt/plugins/email.xslt +141 -118
  138. jolt/plugins/environ.py +11 -0
  139. jolt/plugins/fetch.py +141 -0
  140. jolt/plugins/gdb.py +33 -14
  141. jolt/plugins/gerrit.py +0 -13
  142. jolt/plugins/git.py +248 -66
  143. jolt/plugins/googletest.py +1 -1
  144. jolt/plugins/http.py +1 -1
  145. jolt/plugins/libtool.py +63 -0
  146. jolt/plugins/linux.py +990 -0
  147. jolt/plugins/logstash.py +4 -4
  148. jolt/plugins/meson.py +61 -0
  149. jolt/plugins/ninja-compdb.py +96 -28
  150. jolt/plugins/ninja.py +424 -150
  151. jolt/plugins/paths.py +11 -1
  152. jolt/plugins/pkgconfig.py +219 -0
  153. jolt/plugins/podman.py +131 -87
  154. jolt/plugins/python.py +137 -0
  155. jolt/plugins/remote_execution/administration_pb2.py +27 -19
  156. jolt/plugins/remote_execution/log_pb2.py +12 -12
  157. jolt/plugins/remote_execution/scheduler_pb2.py +23 -23
  158. jolt/plugins/remote_execution/worker_pb2.py +19 -19
  159. jolt/plugins/report.py +7 -2
  160. jolt/plugins/rust.py +25 -0
  161. jolt/plugins/scheduler.py +135 -86
  162. jolt/plugins/selfdeploy/setup.py +6 -6
  163. jolt/plugins/selfdeploy.py +49 -31
  164. jolt/plugins/strings.py +35 -22
  165. jolt/plugins/symlinks.py +11 -4
  166. jolt/plugins/telemetry.py +1 -2
  167. jolt/plugins/timeline.py +13 -3
  168. jolt/scheduler.py +467 -165
  169. jolt/tasks.py +427 -111
  170. jolt/templates/timeline.html.template +44 -47
  171. jolt/timer.py +22 -0
  172. jolt/tools.py +527 -188
  173. jolt/utils.py +183 -3
  174. jolt/version.py +1 -1
  175. jolt/xmldom.py +12 -2
  176. {jolt-0.9.172.dist-info → jolt-0.9.435.dist-info}/METADATA +97 -41
  177. jolt-0.9.435.dist-info/RECORD +207 -0
  178. {jolt-0.9.172.dist-info → jolt-0.9.435.dist-info}/WHEEL +1 -1
  179. jolt/plugins/amqp.py +0 -855
  180. jolt/plugins/debian.py +0 -338
  181. jolt/plugins/repo.py +0 -253
  182. jolt/plugins/snap.py +0 -122
  183. jolt-0.9.172.dist-info/RECORD +0 -92
  184. {jolt-0.9.172.dist-info → jolt-0.9.435.dist-info}/entry_points.txt +0 -0
  185. {jolt-0.9.172.dist-info → jolt-0.9.435.dist-info}/top_level.txt +0 -0
jolt/cache.py CHANGED
@@ -23,7 +23,7 @@ from jolt.error import raise_task_error, raise_task_error_if
23
23
  from jolt.expires import ArtifactEvictionStrategyRegister
24
24
 
25
25
 
26
- DEFAULT_ARCHIVE_TYPE = ".tar.gz"
26
+ DEFAULT_ARCHIVE_TYPE = ".tar.zst"
27
27
 
28
28
 
29
29
  def locked(func):
@@ -33,194 +33,122 @@ def locked(func):
33
33
  return _f
34
34
 
35
35
 
36
- class StorageProvider(object):
37
- def download(self, artifact, force=False):
38
- return False
39
-
40
- def download_enabled(self):
41
- return True
42
-
43
- def upload(self, artifact, force=False):
44
- return False
45
-
46
- def upload_enabled(self):
47
- return True
48
-
49
- def location(self, artifact):
50
- return '' # URL
51
-
52
- def availability(self, artifacts):
53
- # Ensure artifacts is a list
54
- artifacts = utils.as_list(artifacts)
55
-
56
- present = set()
57
- missing = set()
58
-
59
- for artifact in artifacts:
60
- if self.location(artifact):
61
- present.add(artifact)
62
- else:
63
- missing.add(artifact)
64
-
65
- return list(present), list(missing)
66
-
67
-
68
- class StorageProviderFactory(StorageProvider):
69
- def create(self):
70
- pass
71
-
72
-
73
- def RegisterStorage(cls):
74
- ArtifactCache.storage_provider_factories.append(cls)
75
-
76
-
77
- class ArtifactAttributeSet(object):
78
- def __init__(self):
79
- super(ArtifactAttributeSet, self).__setattr__("_attributes", {})
80
-
81
- def _get_attributes(self):
82
- return self._attributes
83
-
84
- def __getattr__(self, name):
85
- attributes = self._get_attributes()
86
- if name not in attributes:
87
- attributes[name] = self.create(name)
88
- return attributes[name]
89
-
90
- def __setattr__(self, name, value):
91
- attributes = self._get_attributes()
92
- if name not in attributes:
93
- attributes[name] = self.create(name)
94
- attributes[name].set_value(value)
95
- return attributes[name]
96
-
97
- def __dict__(self):
98
- return {key: str(value) for key, value in self.items()}
99
-
100
- def items(self):
101
- return self._get_attributes().items()
102
-
103
- def apply(self, task, artifact):
104
- for _, value in self.items():
105
- value.apply(task, artifact)
106
-
107
- def apply_deps(self, task, deps):
108
- pass
109
-
110
- def unapply(self, task, artifact):
111
- for _, value in self.items():
112
- value.unapply(task, artifact)
113
-
114
- def unapply_deps(self, task, deps):
115
- pass
116
-
117
- def visit(self, task, artifact, visitor):
118
- for _, value in self.items():
119
- value.visit(task, artifact, visitor)
120
-
121
-
122
36
  class ArtifactAttributeSetRegistry(object):
123
- providers = []
37
+ """
38
+ Registry for providers of artifact attribute sets.
39
+ """
40
+
41
+ providers = [] # List of objects that implement ArtifactAttributeSetProvider
124
42
 
125
43
  @staticmethod
126
44
  def create_all(artifact):
45
+ """ Create all artifact attribute sets. """
127
46
  for provider in ArtifactAttributeSetRegistry.providers:
128
47
  provider().create(artifact)
129
48
 
130
49
  @staticmethod
131
50
  def parse_all(artifact, content):
51
+ """ Parse all artifact attribute sets. """
132
52
  for provider in ArtifactAttributeSetRegistry.providers:
133
53
  provider().parse(artifact, content)
134
54
 
135
55
  @staticmethod
136
56
  def format_all(artifact, content):
57
+ """ Format all artifact attribute sets. """
137
58
  for provider in ArtifactAttributeSetRegistry.providers:
138
59
  provider().format(artifact, content)
139
60
 
140
61
  @staticmethod
141
62
  def apply_all(task, artifact):
63
+ """ Apply all artifact attribute sets. """
142
64
  for provider in ArtifactAttributeSetRegistry.providers:
143
65
  provider().apply(task, artifact)
144
66
 
145
- @staticmethod
146
- def apply_all_deps(task, deps):
147
- for provider in ArtifactAttributeSetRegistry.providers:
148
- provider().apply_deps(task, deps)
149
-
150
67
  @staticmethod
151
68
  def unapply_all(task, artifact):
69
+ """ Unapply all artifact attribute sets. """
152
70
  for provider in ArtifactAttributeSetRegistry.providers:
153
71
  provider().unapply(task, artifact)
154
72
 
155
- @staticmethod
156
- def unapply_all_deps(task, deps):
157
- for provider in ArtifactAttributeSetRegistry.providers:
158
- provider().unapply_deps(task, deps)
159
-
160
73
  @staticmethod
161
74
  def visit_all(task, artifact, visitor):
75
+ """ Visit all artifact attribute sets. """
162
76
  for provider in ArtifactAttributeSetRegistry.providers:
163
77
  provider().visit(task, artifact, visitor)
164
78
 
165
79
 
166
- def visit_artifact(task, artifact, visitor):
167
- ArtifactAttributeSetRegistry.visit_all(task, artifact, visitor)
168
-
169
-
170
- class ArtifactAttributeSetProvider(object):
171
- @staticmethod
172
- def Register(cls):
173
- ArtifactAttributeSetRegistry.providers.append(cls)
174
-
175
- def create(self, artifact):
176
- raise NotImplementedError()
177
-
178
- def parse(self, artifact, content):
179
- raise NotImplementedError()
180
-
181
- def format(self, artifact, content):
182
- raise NotImplementedError()
183
-
184
- def apply(self, task, artifact):
185
- pass
186
-
187
- def apply_deps(self, task, deps):
188
- pass
189
-
190
- def unapply(self, task, artifact):
191
- pass
192
-
193
- def unapply_deps(self, task, deps):
194
- pass
80
+ class ArtifactAttribute(object):
81
+ """
82
+ An artifact attribute.
195
83
 
196
- def visit(self, task, artifact, visitor):
197
- pass
84
+ An artifact attribute is a key-value pair that can be set and retrieved
85
+ from an artifact attribute set. Attributes are used to store metadata and other
86
+ information that is associated with an artifact. They communicate information
87
+ between tasks and store information that is used by tasks when they consume an artifact.
198
88
 
89
+ Artifact attributes can also perform actions when the artifact is consumed.
199
90
 
200
- class ArtifactAttribute(object):
91
+ """
201
92
  def __init__(self, name):
202
93
  self._name = name
203
94
 
204
95
  def get_name(self):
96
+ """ Get the name of the attribute. """
205
97
  return self._name
206
98
 
207
99
  def set_value(self, value, expand=True):
100
+ """
101
+ Set the value of the attribute.
102
+
103
+ Must be implemented by subclasses.
104
+
105
+ Args:
106
+ value: The value to set.
107
+ expand: If True, the value is macro expanded using the tools.expand() method.
108
+ """
208
109
  raise NotImplementedError()
209
110
 
210
111
  def get_value(self):
112
+ """
113
+ Get the value of the attribute.
114
+
115
+ Must be implemented by subclasses.
116
+ """
211
117
  raise NotImplementedError()
212
118
 
213
119
  def apply(self, task, artifact):
120
+ """
121
+ Perform an action when the artifact is being used.
122
+
123
+ Args:
124
+ task (Task): The task that is using the artifact.
125
+ artifact (Artifact): The artifact that is being used.
126
+
127
+ """
214
128
  pass
215
129
 
216
130
  def unapply(self, task, artifact):
131
+ """
132
+ Undo an action when the artifact is no longer being used.
133
+
134
+ Args:
135
+ task (Task): The task that is no longer using the artifact.
136
+ artifact (Artifact): The artifact that is no longer being used.
137
+ """
217
138
  pass
218
139
 
219
- def __str__(self):
140
+ def __str__(self) -> str:
141
+ """
142
+ Get a string representation of the attribute.
143
+
144
+ Must be implemented by subclasses.
145
+ """
220
146
  raise NotImplementedError()
221
147
 
222
148
 
223
149
  class ArtifactStringAttribute(ArtifactAttribute):
150
+ """ An artifact attribute that stores a string value. """
151
+
224
152
  def __init__(self, artifact, name):
225
153
  self._artifact = artifact
226
154
  self._name = name
@@ -235,22 +163,27 @@ class ArtifactStringAttribute(ArtifactAttribute):
235
163
  def get_value(self):
236
164
  return self._value
237
165
 
238
- def apply(self, task, artifact):
239
- pass
240
-
241
- def unapply(self, task, artifact):
242
- pass
243
-
244
- def __str__(self):
166
+ def __str__(self) -> str:
245
167
  return str(self._value)
246
168
 
247
169
 
248
170
  class ArtifactListAttribute(ArtifactAttribute):
171
+ """ An artifact attribute that stores a list of values. """
172
+
249
173
  def __init__(self, artifact, name):
250
174
  self._artifact = artifact
251
175
  self._name = name
252
176
  self._value = []
253
177
 
178
+ def __getitem__(self, key):
179
+ return self._value[key]
180
+
181
+ def __getslice__(self, i, j):
182
+ return self._value[i:j]
183
+
184
+ def __len__(self):
185
+ return len(self._value)
186
+
254
187
  def get_name(self):
255
188
  return self._name
256
189
 
@@ -281,23 +214,16 @@ class ArtifactListAttribute(ArtifactAttribute):
281
214
  def count(self):
282
215
  return len(self.items())
283
216
 
284
- def apply(self, task, artifact):
285
- pass
286
-
287
- def unapply(self, task, artifact):
288
- pass
217
+ def __str__(self) -> str:
218
+ return fs.pathsep.join(str(v) for v in self._value)
289
219
 
290
220
 
291
221
  class ArtifactFileAttribute(object):
222
+ """ An attribute that stores a list of source and destination path tuples for files collected into the artifact. """
223
+
292
224
  def __init__(self):
293
225
  self._files = []
294
226
 
295
- def apply(self, task, artifact):
296
- pass
297
-
298
- def unapply(self, task, artifact):
299
- pass
300
-
301
227
  def append(self, src, dst):
302
228
  self._files.append((fs.as_posix(src), fs.as_posix(dst)))
303
229
 
@@ -308,8 +234,145 @@ class ArtifactFileAttribute(object):
308
234
  return self._files
309
235
 
310
236
 
237
+ class ArtifactAttributeSet(object):
238
+ """
239
+ A set of artifact attributes.
240
+
241
+ An attribute set is a collection of attributes. Each attribute is
242
+ accessed using the attribute name as an attribute of the set. For
243
+ example, to access an attribute named 'version' in an attribute set
244
+ named 'strings', you would write:
245
+
246
+ .. code-block:: python
247
+
248
+ artifact.strings.version = "1.0"
249
+
250
+ """
251
+
252
+ def __init__(self):
253
+ super(ArtifactAttributeSet, self).__setattr__("_attributes", {})
254
+
255
+ def _get_attributes(self):
256
+ return self._attributes
257
+
258
+ def __getattr__(self, name) -> ArtifactAttribute:
259
+ """
260
+ Get or create an attribute by name.
261
+
262
+ Args:
263
+ name (str): The name of the attribute.
264
+
265
+ Returns:
266
+ An attribute object.
267
+ """
268
+ attributes = self._get_attributes()
269
+ if name not in attributes:
270
+ attributes[name] = self.create(name)
271
+ return attributes[name]
272
+
273
+ def __setattr__(self, name, value):
274
+ """
275
+ Set an attribute by name.
276
+
277
+ Args:
278
+ name (str): The name of the attribute.
279
+ value: The value to set.
280
+ """
281
+ attributes = self._get_attributes()
282
+ if name not in attributes:
283
+ attributes[name] = self.create(name)
284
+ attributes[name].set_value(value)
285
+ return attributes[name]
286
+
287
+ def __dict__(self):
288
+ """ Get a dictionary representation of the attribute set. """
289
+ return {key: str(value) for key, value in self.items()}
290
+
291
+ def __iter__(self):
292
+ """ Iterate over the attribute set. """
293
+ return iter(self.items())
294
+
295
+ def get(self, name, default=None):
296
+ """ Get an attribute by name.
297
+
298
+ Args:
299
+ name (str): The name of the attribute.
300
+
301
+ Returns:
302
+ The attribute object, or None if it does not exist.
303
+ """
304
+ attributes = self._get_attributes()
305
+ return attributes.get(name, default)
306
+
307
+ def items(self):
308
+ """ Get a list of tuples containing the attribute name and value. """
309
+ return self._get_attributes().items()
310
+
311
+ def apply(self, task, artifact):
312
+ """ Perform attribute actions when the artifact is being used. """
313
+ for _, value in self.items():
314
+ value.apply(task, artifact)
315
+
316
+ def unapply(self, task, artifact):
317
+ """ Undo attribute actions when the artifact is no longer being used. """
318
+ for _, value in self.items():
319
+ value.unapply(task, artifact)
320
+
321
+ def visit(self, task, artifact, visitor):
322
+ """ Visit all attributes in the set. """
323
+ for _, value in self.items():
324
+ value.visit(task, artifact, visitor)
325
+
326
+
327
+ class ArtifactAttributeSetProvider(object):
328
+ """ Base class for artifact attribute set providers.
329
+
330
+ An artifact attribute set provider is a factory for creating and managing
331
+ attribute sets in an artifact.
332
+ """
333
+
334
+ @staticmethod
335
+ def Register(cls):
336
+ """ Decorator for registering a provider class. """
337
+ ArtifactAttributeSetRegistry.providers.append(cls)
338
+
339
+ def create(self, artifact):
340
+ """ Create an attribute set for an artifact. """
341
+ raise NotImplementedError()
342
+
343
+ def parse(self, artifact, content):
344
+ """
345
+ Parse an attribute set from a dictionary.
346
+
347
+ The dictionary is loaded from a JSON file embedded in the artifact.
348
+ """
349
+ raise NotImplementedError()
350
+
351
+ def format(self, artifact, content):
352
+ """
353
+ Format an attribute set to a dictionary.
354
+
355
+ The dictionary is saved to a JSON file embedded in the artifact.
356
+ """
357
+ raise NotImplementedError()
358
+
359
+ def apply(self, task, artifact):
360
+ """ Perform actions when the artifact is being used. """
361
+ pass
362
+
363
+ def unapply(self, task, artifact):
364
+ """ Undo actions when the artifact is no longer being used. """
365
+ pass
366
+
367
+ def visit(self, task, artifact, visitor):
368
+ """ Visit all attributes in the set. """
369
+ pass
370
+
371
+
311
372
  @ArtifactAttributeSetProvider.Register
312
373
  class ArtifactFileAttributeProvider(ArtifactAttributeSetProvider):
374
+ """ Provider for the artifact 'files' attribute set. """
375
+
313
376
  def create(self, artifact):
314
377
  setattr(artifact, "files", ArtifactFileAttribute())
315
378
 
@@ -323,22 +386,19 @@ class ArtifactFileAttributeProvider(ArtifactAttributeSetProvider):
323
386
  def format(self, artifact, content):
324
387
  content["files"] = [{"src": src, "dst": dst} for src, dst in artifact.files.items()]
325
388
 
326
- def apply(self, task, artifact):
327
- pass
328
-
329
- def unapply(self, task, artifact):
330
- pass
331
389
 
332
- def visit(self, task, artifact, visitor):
333
- pass
390
+ def visit_artifact(task, artifact, visitor):
391
+ ArtifactAttributeSetRegistry.visit_all(task, artifact, visitor)
334
392
 
335
393
 
336
394
  def json_serializer(obj):
395
+ """ JSON serializer for datetime objects. """
337
396
  if isinstance(obj, datetime):
338
397
  return dict(type="datetime", value=obj.strftime("%Y-%m-%d %H:%M:%S.%f"))
339
398
 
340
399
 
341
400
  def json_deserializer(dct):
401
+ """ JSON deserializer for datetime objects. """
342
402
  if dct.get("type") == "datetime":
343
403
  return datetime.strptime(dct["value"], "%Y-%m-%d %H:%M:%S.%f")
344
404
  return dct
@@ -480,6 +540,8 @@ class Artifact(object):
480
540
  self._identity = name + "@" + self._identity
481
541
  self._main = name == "main"
482
542
  self._name = name or "main"
543
+ self._full_name = f"{self._name}@{node.short_qualified_name}" if node else self._name
544
+ self._log_name = f"{self._full_name} {node.identity[:8]}" if node else self._full_name
483
545
  self._node = node
484
546
  self._session = session
485
547
  self._task = node.task if node else None
@@ -491,6 +553,18 @@ class Artifact(object):
491
553
  ArtifactAttributeSetRegistry.create_all(self)
492
554
  self.reload()
493
555
 
556
+ def _info(self, fmt, *args, **kwargs):
557
+ log.info(fmt + f" ({self._log_name})", *args, **kwargs)
558
+
559
+ def _debug(self, fmt, *args, **kwargs):
560
+ log.debug(fmt + f" ({self._log_name})", *args, **kwargs)
561
+
562
+ def _warning(self, fmt, *args, **kwargs):
563
+ log.warning(fmt + f" ({self._log_name})", *args, **kwargs)
564
+
565
+ def _error(self, fmt, *args, **kwargs):
566
+ log.error(fmt + f" ({self._log_name})", *args, **kwargs)
567
+
494
568
  def __enter__(self):
495
569
  return self
496
570
 
@@ -500,7 +574,7 @@ class Artifact(object):
500
574
  def __getattr__(self, name):
501
575
  raise_task_error(self._node, "Attempt to access invalid artifact attribute '{0}'", name)
502
576
 
503
- def _write_manifest(self):
577
+ def _write_manifest(self, temporary=False):
504
578
  content = {}
505
579
  content["size"] = self._get_size()
506
580
  content["unpacked"] = self._unpacked
@@ -525,13 +599,19 @@ class Artifact(object):
525
599
 
526
600
  ArtifactAttributeSetRegistry.format_all(self, content)
527
601
 
528
- manifest = fs.path.join(self.path, ".manifest.json")
602
+ if temporary:
603
+ manifest = fs.path.join(self.temporary_path, ".manifest.json")
604
+ else:
605
+ manifest = fs.path.join(self.final_path, ".manifest.json")
529
606
  with open(manifest, "wb") as f:
530
607
  f.write(json.dumps(content, indent=2, default=json_serializer).encode())
531
608
 
532
- def _read_manifest(self):
609
+ def _read_manifest(self, temporary=False):
533
610
  try:
534
- manifest_path = fs.path.join(self.path, ".manifest.json")
611
+ if temporary:
612
+ manifest_path = fs.path.join(self.temporary_path, ".manifest.json")
613
+ else:
614
+ manifest_path = fs.path.join(self.final_path, ".manifest.json")
535
615
  with open(manifest_path) as manifest_file:
536
616
  content = json.load(manifest_file, object_hook=json_deserializer)
537
617
  self._valid = True
@@ -611,6 +691,17 @@ class Artifact(object):
611
691
  self._read_manifest()
612
692
  self._temporary = not self._valid
613
693
 
694
+ def reset(self):
695
+ self._unpacked = False
696
+ self._uploadable = True
697
+ self._created = datetime.now()
698
+ self._modified = datetime.now()
699
+ self._expires = self._task.expires if not self._session else expires.Immediately()
700
+ self._size = 0
701
+ self._influence = None
702
+ self._valid = False
703
+ self._temporary = True
704
+
614
705
  @property
615
706
  def name(self):
616
707
  """ str: The name of the artifact. Default: 'main'. """
@@ -670,7 +761,7 @@ class Artifact(object):
670
761
  raise_task_error_if(
671
762
  not self.is_temporary(),
672
763
  self._node,
673
- "Can't collect files into an already published task artifact")
764
+ "Can't collect files into an already published task artifact ({})", self._log_name)
674
765
 
675
766
  files = self.tools.expand_path(files)
676
767
  files = self.tools.glob(files)
@@ -741,7 +832,7 @@ class Artifact(object):
741
832
  raise_task_error_if(
742
833
  self.is_temporary(),
743
834
  self._node,
744
- "Can't copy files from an unpublished task artifact")
835
+ "Can't copy files from an unpublished task artifact ({})", self._log_name)
745
836
 
746
837
  files = fs.path.join(self._path, files)
747
838
  files = self.tools.expand_path(files)
@@ -807,13 +898,15 @@ class Artifact(object):
807
898
  def get_node(self):
808
899
  return self._node
809
900
 
810
- def is_temporary(self):
901
+ def is_temporary(self) -> bool:
811
902
  return self._temporary
812
903
 
813
- def is_unpackable(self):
904
+ def is_unpackable(self) -> bool:
814
905
  if not self._node:
815
906
  return True
816
- return self._node.is_unpackable()
907
+ if self.name == "main":
908
+ return self._task.unpack.__func__ is not tasks.Task.unpack
909
+ return getattr(self._task, "unpack_" + self.name, tasks.Task.unpack) is not tasks.Task.unpack
817
910
 
818
911
  def is_unpacked(self):
819
912
  return self._unpacked
@@ -824,6 +917,8 @@ class Artifact(object):
824
917
  def is_cacheable(self):
825
918
  if not self._node:
826
919
  return True
920
+ if self.is_session():
921
+ return True
827
922
  return self.task.is_cacheable()
828
923
 
829
924
  @property
@@ -838,6 +933,38 @@ class Artifact(object):
838
933
  return self._node.task
839
934
 
840
935
 
936
+ class ArtifactToolsProxy(object):
937
+ """
938
+ An artifact proxy that uses a specific tools object.
939
+
940
+ Used when artifacts are consumed by tasks. The proxy allows the
941
+ task to access the artifact's methods and attributes using the
942
+ task's own tools object. This is useful when the consumer task
943
+ wishes to copy files, read files, etc, using the current working
944
+ directory and environment of the task.
945
+ """
946
+
947
+ def __init__(self, artifact, tools):
948
+ self._artifact = artifact
949
+ self._tools = tools
950
+
951
+ def __getattr__(self, name):
952
+ if name == "tools":
953
+ return self._tools
954
+ if name == "_artifact":
955
+ return self._artifact
956
+ attr = getattr(self._artifact.__class__, name, None)
957
+ if attr is not None and callable(attr):
958
+ return attr.__get__(self, ArtifactToolsProxy)
959
+ return getattr(self._artifact, name)
960
+
961
+ def __setattr__(self, name, value):
962
+ if name == "_artifact" or name == "_tools":
963
+ super(ArtifactToolsProxy, self).__setattr__(name, value)
964
+ else:
965
+ setattr(self._artifact, name, value)
966
+
967
+
841
968
  class Context(object):
842
969
  """
843
970
  Execution context and dependency wrapper.
@@ -863,18 +990,14 @@ class Context(object):
863
990
  for dep in reversed(self._node.children):
864
991
  for artifact in dep.artifacts:
865
992
  # Create clone with tools from this task
866
- artifact = self._cache.get_artifact(
867
- dep,
868
- name=artifact.name,
869
- session=artifact.is_session(),
870
- tools=self._node.tools,
871
- )
993
+ artifact = ArtifactToolsProxy(artifact, self._node.tools)
872
994
 
873
995
  # Don't include session artifacts that don't exist,
874
- # i.e. were no build has taken place due to presence
996
+ # i.e. where no build has taken place due to presence
875
997
  # of the persistent artifacts.
876
- if artifact.is_session() and not self._cache.is_available_locally(artifact):
877
- continue
998
+ if not dep.is_resource():
999
+ if artifact.is_session() and not self._cache.is_available_locally(artifact):
1000
+ continue
878
1001
 
879
1002
  self._cache.unpack(artifact)
880
1003
 
@@ -886,10 +1009,8 @@ class Context(object):
886
1009
  self._artifacts_index[artifact.name + "@" + dep.short_qualified_name] = artifact
887
1010
  artifact.apply()
888
1011
  ArtifactAttributeSetRegistry.apply_all(self._node.task, artifact)
889
- ArtifactAttributeSetRegistry.apply_all_deps(self._node.task, self)
890
- except Exception as e:
1012
+ except (Exception, KeyboardInterrupt) as e:
891
1013
  # Rollback all attributes/resources except the last failing one
892
- ArtifactAttributeSetRegistry.unapply_all_deps(self._node.task, self)
893
1014
  for name, artifact in reversed(list(self._artifacts.items())[:-1]):
894
1015
  with utils.ignore_exception():
895
1016
  ArtifactAttributeSetRegistry.unapply_all(self._node.task, artifact)
@@ -898,7 +1019,6 @@ class Context(object):
898
1019
  return self
899
1020
 
900
1021
  def __exit__(self, type, value, tb):
901
- ArtifactAttributeSetRegistry.unapply_all_deps(self._node.task, self)
902
1022
  for name, artifact in reversed(self._artifacts.items()):
903
1023
  ArtifactAttributeSetRegistry.unapply_all(self._node.task, artifact)
904
1024
  artifact.unapply()
@@ -925,7 +1045,7 @@ class Context(object):
925
1045
  key = self._node.tools.expand(key)
926
1046
 
927
1047
  alias, artifact, task, params = utils.parse_aliased_task_name(key)
928
- raise_task_error_if(alias, self._node, "Cannot define alias when indexing dependencies")
1048
+ raise_task_error_if(alias, self._node, "Cannot define alias when indexing dependencies: {}", alias)
929
1049
  task_name = utils.format_task_name(task, params)
930
1050
  task_artifact_name = utils.format_task_name(task, params, artifact)
931
1051
 
@@ -960,11 +1080,148 @@ class Context(object):
960
1080
 
961
1081
 
962
1082
  class PidProvider(object):
963
- def __init__(self):
964
- self._uuid = uuid.uuid4()
965
-
966
1083
  def __call__(self):
967
- return str(self._uuid)
1084
+ pid = str(uuid.uuid4())
1085
+ log.debug("New cache lock file: {0}", pid)
1086
+ return pid
1087
+
1088
+
1089
+ class StorageProvider(object):
1090
+ """
1091
+ Base class for remote artifact storage providers.
1092
+
1093
+ A storage provider is responsible for uploading and downloading
1094
+ artifacts to and from a remote storage location. The storage
1095
+ location can be a file system path, a cloud storage service, or
1096
+ any other type of storage.
1097
+
1098
+ """
1099
+
1100
+ def download(self, artifact: Artifact, force: bool = False) -> bool:
1101
+ """
1102
+ Download an artifact from the storage location.
1103
+
1104
+ The should be downloaded to the path returned by the artifact's
1105
+ :func:`~jolt.Artifact.get_archive_path` method. The downloaded artifact
1106
+ must be in the format specified by DEFAULT_ARCHIVE_TYPE.
1107
+
1108
+ The download should be retried if it fails due to network issues.
1109
+ The method may raise an exception on errors.
1110
+
1111
+ Args:
1112
+ artifact (Artifact): The artifact to download.
1113
+ force (bool, optional): If True, the download should be forced,
1114
+ even if the artifact is already present locally, or if the
1115
+ download is disabled. The default is False.
1116
+
1117
+ Returns:
1118
+ bool: True if the download was successful, False otherwise.
1119
+
1120
+ """
1121
+ return False
1122
+
1123
+ def download_enabled(self) -> bool:
1124
+ """ Return True if downloading is enabled. Default is True. """
1125
+ return True
1126
+
1127
+ def upload(self, artifact: Artifact, force: bool = False) -> bool:
1128
+ """
1129
+ Upload an artifact to the storage location.
1130
+
1131
+ The artifact to be uploaded is located at the path returned by
1132
+ the artifact's :func:`~jolt.Artifact.get_archive_path` method. The
1133
+ uploaded artifact is in the format specified by DEFAULT_ARCHIVE_TYPE.
1134
+ The provider may choose to upload the artifact using a different
1135
+ format, but it must be able to download the artifact in the
1136
+ DEFAULT_ARCHIVE_TYPE format.
1137
+
1138
+ The upload should be retried if it fails due to network issues.
1139
+ The method may raise an exception on errors.
1140
+
1141
+ Args:
1142
+ artifact (Artifact): The artifact to upload.
1143
+ force (bool, optional): If True, the upload should be forced,
1144
+ even if the artifact is already present remotely, or if the
1145
+ upload is disabled. The default is False.
1146
+
1147
+ Returns:
1148
+ bool: True if the upload was successful, False otherwise.
1149
+
1150
+ """
1151
+ return False
1152
+
1153
+ def upload_enabled(self) -> bool:
1154
+ """ Return True if uploading is enabled. Default is True. """
1155
+ return True
1156
+
1157
+ def location(self, artifact) -> str:
1158
+ """
1159
+ Return the URL of the artifact in the storage location.
1160
+
1161
+ This method is sometimes used to identify if an artifact is
1162
+ present in the storage location. The URL should point to the
1163
+ artifact if present, or an empty string if the artifact is
1164
+ absent.
1165
+
1166
+ Args:
1167
+ artifact (Artifact): The artifact to locate.
1168
+ """
1169
+ return '' # URL
1170
+
1171
+ def availability(self, artifacts: list) -> tuple:
1172
+ """
1173
+ Check the availability of a list of artifacts.
1174
+
1175
+ This method is used to determine which artifacts are present in the
1176
+ storage location. The method should return a tuple of two lists:
1177
+ the first list contains the artifacts that are present, and the
1178
+ second list contains the artifacts that are missing.
1179
+
1180
+ The default implementation of this method calls the :func:`~jolt.StorageProvider.location`
1181
+ method for each artifact in the list. Subclasses may override this
1182
+ method to provide a more efficient implementation.
1183
+
1184
+ Args:
1185
+ artifacts (list): A list of artifacts to check.
1186
+
1187
+ Returns:
1188
+ tuple: A tuple of two lists: the first list contains the artifacts
1189
+ that are present, and the second list contains the artifacts
1190
+ that are missing.
1191
+
1192
+ """
1193
+ # Ensure artifacts is a list
1194
+ artifacts = utils.as_list(artifacts)
1195
+
1196
+ present = set()
1197
+ missing = set()
1198
+
1199
+ for artifact in artifacts:
1200
+ if self.location(artifact):
1201
+ present.add(artifact)
1202
+ else:
1203
+ missing.add(artifact)
1204
+
1205
+ return list(present), list(missing)
1206
+
1207
+
1208
+ class StorageProviderFactory(StorageProvider):
1209
+ """ A factory for store providers. """
1210
+
1211
+ def create(self) -> StorageProvider:
1212
+ """
1213
+ Create a new storage provider.
1214
+
1215
+ This method should return a new instance of a storage provider,
1216
+ which must be a subclass of :class:`~jolt.StorageProvider`.
1217
+
1218
+ """
1219
+ pass
1220
+
1221
+
1222
+ def RegisterStorage(cls):
1223
+ """ Decorator used to register a storage provider factory. """
1224
+ ArtifactCache.storage_provider_factories.append(cls)
968
1225
 
969
1226
 
970
1227
  @utils.Singleton
@@ -1032,13 +1289,12 @@ class ArtifactCache(StorageProvider):
1032
1289
 
1033
1290
  # If no storage providers supports the availability method,
1034
1291
  # we will not only use the local presence cache.
1035
- self._local_presence_cache = set()
1036
1292
  self._remote_presence_cache = set()
1037
1293
  self._presence_cache_only = self.has_availability()
1038
1294
 
1039
1295
  # Read configuration
1040
1296
  self._max_size = config.getsize(
1041
- "jolt", "cachesize", os.environ.get("JOLT_CACHESIZE", 1 * 1024 ** 3))
1297
+ "jolt", "cachesize", os.environ.get("JOLT_CACHE_SIZE", 1 * 1024 ** 3))
1042
1298
 
1043
1299
  # Create cache directory
1044
1300
  self._fs_create_cachedir()
@@ -1047,10 +1303,12 @@ class ArtifactCache(StorageProvider):
1047
1303
  self._cache_locked = False
1048
1304
  self._lock_file = fasteners.InterProcessLock(self._fs_get_lock_file())
1049
1305
  self._thread_lock = RLock()
1306
+ self._artifact_thread_lock = utils.IdLock()
1050
1307
 
1051
1308
  # Create process lock file
1052
1309
  with self._cache_lock():
1053
- self._pid = pidprovider() if pidprovider else PidProvider()()
1310
+ self._pid_provider = pidprovider or PidProvider()
1311
+ self._pid = self._pid_provider()
1054
1312
  self._pid_file = fasteners.InterProcessLock(self._fs_get_pid_file(self._pid))
1055
1313
  self._pid_file.acquire()
1056
1314
 
@@ -1082,6 +1340,7 @@ class ArtifactCache(StorageProvider):
1082
1340
  db = sqlite3.connect(self._db_path, detect_types=sqlite3.PARSE_DECLTYPES)
1083
1341
  try:
1084
1342
  db.execute("PRAGMA journal_mode=OFF")
1343
+ # db.set_trace_callback(log.warning)
1085
1344
  yield db
1086
1345
  finally:
1087
1346
  db.close()
@@ -1279,14 +1538,16 @@ class ArtifactCache(StorageProvider):
1279
1538
  def _fs_get_artifact(self, node, name, tools=None, session=False):
1280
1539
  return Artifact(self, node, name=name, tools=tools, session=session)
1281
1540
 
1282
- def _fs_commit_artifact(self, artifact, uploadable):
1541
+ def _fs_commit_artifact(self, artifact: Artifact, uploadable: bool, temporary: bool):
1283
1542
  artifact._set_uploadable(uploadable)
1284
1543
  if not artifact.is_unpackable():
1285
1544
  artifact._set_unpacked()
1286
- artifact._write_manifest()
1287
- if artifact.is_temporary():
1545
+ if temporary:
1546
+ artifact._write_manifest(temporary=True)
1288
1547
  fs.rmtree(artifact.final_path, ignore_errors=True)
1289
- fs.rename(artifact.path, artifact.final_path)
1548
+ fs.rename(artifact.temporary_path, artifact.final_path)
1549
+ else:
1550
+ artifact._write_manifest(temporary=False)
1290
1551
 
1291
1552
  @contextlib.contextmanager
1292
1553
  def _fs_compress_artifact(self, artifact):
@@ -1295,14 +1556,14 @@ class ArtifactCache(StorageProvider):
1295
1556
 
1296
1557
  raise_task_error_if(
1297
1558
  artifact.is_temporary(), task,
1298
- "Can't compress an unpublished task artifact")
1559
+ "Can't compress an unpublished task artifact ({})", artifact._log_name)
1299
1560
 
1300
1561
  try:
1301
1562
  artifact.tools.archive(artifact.path, archive)
1302
1563
  except KeyboardInterrupt as e:
1303
1564
  raise e
1304
1565
  except Exception:
1305
- raise_task_error(task, "Failed to compress task artifact")
1566
+ raise_task_error(task, "Failed to compress task artifact ({})", artifact._log_name)
1306
1567
  try:
1307
1568
  yield
1308
1569
  finally:
@@ -1313,29 +1574,57 @@ class ArtifactCache(StorageProvider):
1313
1574
  archive = artifact.get_archive_path()
1314
1575
  try:
1315
1576
  task.tools.extract(archive, artifact.temporary_path, ignore_owner=True)
1577
+ artifact._read_manifest(temporary=True)
1316
1578
  except KeyboardInterrupt as e:
1579
+ fs.rmtree(artifact.temporary_path, ignore_errors=True)
1317
1580
  raise e
1318
1581
  except Exception:
1319
- raise_task_error(task, "Failed to extract task artifact archive")
1582
+ fs.rmtree(artifact.temporary_path, ignore_errors=True)
1583
+ raise_task_error(task, "Failed to extract task artifact archive ({})", artifact._log_name)
1320
1584
  finally:
1321
1585
  fs.unlink(archive, ignore_errors=True)
1322
- artifact._read_manifest()
1323
1586
 
1324
1587
  def _fs_delete_artifact(self, identity, task_name, onerror=None):
1325
1588
  fs.rmtree(self._fs_get_artifact_path(identity, task_name), ignore_errors=True, onerror=onerror)
1326
1589
  fs.rmtree(self._fs_get_artifact_tmppath(identity, task_name), ignore_errors=True, onerror=onerror)
1590
+ fs.rmtree(self._fs_get_artifact_path_legacy(identity, task_name), ignore_errors=True, onerror=onerror)
1591
+ fs.rmtree(self._fs_get_artifact_tmppath_legacy(identity, task_name), ignore_errors=True, onerror=onerror)
1327
1592
  fs.unlink(fs.path.join(self.root, task_name), ignore_errors=True)
1328
1593
 
1594
+ def _fs_identity(self, identity):
1595
+ parts = identity.split("@", 1)
1596
+ if len(parts) <= 1:
1597
+ parts = ["main"] + parts
1598
+ return parts[1] + "-" + utils.canonical(parts[0])
1599
+
1600
+ def _fs_identity_legacy(self, identity):
1601
+ parts = identity.split("@", 1)
1602
+ if len(parts) <= 1:
1603
+ parts = ["main"] + parts
1604
+ return parts[0] + "@" + utils.canonical(parts[1])
1605
+
1329
1606
  def _fs_get_artifact_archivepath(self, identity, task_name):
1330
- return fs.get_archive(fs.path.join(self.root, task_name, identity))
1607
+ identity = self._fs_identity(identity)
1608
+ return fs.path.join(self.root, task_name, identity) + DEFAULT_ARCHIVE_TYPE
1331
1609
 
1332
1610
  def _fs_get_artifact_lockpath(self, identity):
1611
+ identity = self._fs_identity(identity)
1333
1612
  return fs.path.join(self.root, "locks", identity + ".lock")
1334
1613
 
1335
1614
  def _fs_get_artifact_tmppath(self, identity, task_name):
1615
+ identity = self._fs_identity(identity)
1336
1616
  return fs.path.join(self.root, task_name, "." + identity)
1337
1617
 
1338
1618
  def _fs_get_artifact_path(self, identity, task_name):
1619
+ identity = self._fs_identity(identity)
1620
+ return fs.path.join(self.root, task_name, identity)
1621
+
1622
+ def _fs_get_artifact_tmppath_legacy(self, identity, task_name):
1623
+ identity = self._fs_identity_legacy(identity)
1624
+ return fs.path.join(self.root, task_name, "." + identity)
1625
+
1626
+ def _fs_get_artifact_path_legacy(self, identity, task_name):
1627
+ identity = self._fs_identity_legacy(identity)
1339
1628
  return fs.path.join(self.root, task_name, identity)
1340
1629
 
1341
1630
  def _fs_get_artifact_manifest_path(self, identity, task_name):
@@ -1409,16 +1698,36 @@ class ArtifactCache(StorageProvider):
1409
1698
  evicted = 0
1410
1699
  for identity, task_name, _, used in artifacts:
1411
1700
  if not if_expired or self._fs_is_artifact_expired(identity, task_name, used):
1412
- self._db_delete_artifact(db, identity)
1413
- self._fs_delete_artifact(identity, task_name, onerror=onerror)
1414
- evicted += 1
1415
- log.debug("Evicted {}: {}", identity, task_name)
1701
+ with utils.delayed_interrupt():
1702
+ self._db_delete_artifact(db, identity)
1703
+ self._fs_delete_artifact(identity, task_name, onerror=onerror)
1704
+ evicted += 1
1705
+ log.debug("Evicted {}: {}", identity, task_name)
1416
1706
  return evicted == len(artifacts)
1417
1707
 
1418
1708
  ############################################################################
1419
1709
  # Public API
1420
1710
  ############################################################################
1421
1711
 
1712
+ def release(self):
1713
+ """
1714
+ Release references to artifacts held by the current process.
1715
+
1716
+ Effectively, a new pid lock file is created and the old one is deleted. This
1717
+ allows other processes to detect termination of the current process and
1718
+ garbage collect any references owned by the process.
1719
+ """
1720
+ with self._cache_lock(), self._db() as db:
1721
+ self._db_invalidate_locks(db, try_all=True)
1722
+ self._db_invalidate_references(db, try_all=True)
1723
+ self._fs_invalidate_pids(db, try_all=True)
1724
+ self._pid_file.release()
1725
+
1726
+ self._pid = self._pid_provider()
1727
+ self._pid_file = fasteners.InterProcessLock(self._fs_get_pid_file(self._pid))
1728
+ self._pid_file.acquire()
1729
+
1730
+ @utils.delay_interrupt
1422
1731
  def is_available_locally(self, artifact):
1423
1732
  """
1424
1733
  Check presence of task artifact in cache.
@@ -1430,20 +1739,13 @@ class ArtifactCache(StorageProvider):
1430
1739
  if not artifact.is_cacheable():
1431
1740
  return False
1432
1741
 
1433
- # Cache availability in node
1434
- try:
1435
- assert artifact.identity in self._local_presence_cache
1436
- except AssertionError:
1437
- pass
1438
-
1439
1742
  with self._cache_lock(), self._db() as db:
1440
1743
  if self._db_select_artifact(db, artifact.identity) or self._db_select_reference(db, artifact.identity):
1441
1744
  artifact.reload()
1442
1745
  if artifact.is_temporary():
1443
- self._db_delete_artifact(db, artifact.identity)
1746
+ self._db_delete_artifact(db, artifact.identity, and_refs=False)
1444
1747
  return False
1445
1748
  self._db_insert_reference(db, artifact.identity)
1446
- self._local_presence_cache.add(artifact.identity)
1447
1749
  return True
1448
1750
  return False
1449
1751
 
@@ -1471,7 +1773,7 @@ class ArtifactCache(StorageProvider):
1471
1773
  # Returns true if all storage providers implement the availability method
1472
1774
  return all([provider.availability.__func__ != StorageProvider.availability for provider in self._storage_providers])
1473
1775
 
1474
- def availability(self, artifacts):
1776
+ def availability(self, artifacts, remote=True):
1475
1777
  """ Check presence of task artifacts in any cache, local or remote """
1476
1778
  present = set()
1477
1779
  missing = set()
@@ -1486,6 +1788,9 @@ class ArtifactCache(StorageProvider):
1486
1788
  else:
1487
1789
  missing.add(artifact)
1488
1790
 
1791
+ if not remote:
1792
+ return list(present), list(missing)
1793
+
1489
1794
  # Check presence of all artifacts in the remote caches
1490
1795
  missing_remotely = artifacts
1491
1796
 
@@ -1507,31 +1812,35 @@ class ArtifactCache(StorageProvider):
1507
1812
  return self._options.download and \
1508
1813
  any([provider.download_enabled() for provider in self._storage_providers])
1509
1814
 
1815
+ def download_session_enabled(self):
1816
+ return self._options.download_session and \
1817
+ any([provider.download_enabled() for provider in self._storage_providers])
1818
+
1510
1819
  def upload_enabled(self):
1511
1820
  return self._options.upload and \
1512
1821
  any([provider.upload_enabled() for provider in self._storage_providers])
1513
1822
 
1514
- def download_all(self, node, force=False):
1515
- pass
1516
-
1517
1823
  def download(self, artifact, force=False):
1518
1824
  """
1519
1825
  Downloads an artifact from a remote cache to the local cache.
1520
1826
 
1521
1827
  The artifact is interprocess locked during the operation.
1522
1828
  """
1523
- if not force and not self.download_enabled():
1524
- return False
1829
+ if not force:
1830
+ if not artifact.is_session() and not self.download_enabled():
1831
+ return False
1832
+ if artifact.is_session() and not self.download_session_enabled():
1833
+ return False
1525
1834
  if not artifact.is_cacheable():
1526
1835
  return False
1527
- with self.lock_artifact(artifact) as artifact:
1836
+ with self.lock_artifact(artifact, why="download") as artifact:
1528
1837
  if self.is_available_locally(artifact):
1529
- artifact.task.info("Download skipped, already in local cache ({name})")
1838
+ artifact._info("Download skipped, already in local cache")
1530
1839
  return True
1531
1840
  for provider in self._storage_providers:
1532
1841
  if provider.download(artifact, force):
1533
1842
  self._fs_decompress_artifact(artifact)
1534
- self.commit(artifact)
1843
+ self.commit(artifact, temporary=True)
1535
1844
  return True
1536
1845
  return len(self._storage_providers) == 0
1537
1846
 
@@ -1547,11 +1856,11 @@ class ArtifactCache(StorageProvider):
1547
1856
  return True
1548
1857
  raise_task_error_if(
1549
1858
  not self.is_available_locally(artifact), artifact.task,
1550
- "Can't upload task artifact, no artifact present in the local cache")
1551
- with self.lock_artifact(artifact) if locked else artifact as artifact:
1859
+ "Can't upload task artifact, no artifact present in the local cache ({})", artifact._log_name)
1860
+ with self.lock_artifact(artifact, why="upload") if locked else artifact as artifact:
1552
1861
  raise_task_error_if(
1553
1862
  not artifact.is_uploadable(), artifact.task,
1554
- "Artifact was modified locally by another process and can no longer be uploaded, try again")
1863
+ "Artifact was modified locally by another process and can no longer be uploaded, try again ({})", artifact._log_name)
1555
1864
  if self._storage_providers:
1556
1865
  with self._fs_compress_artifact(artifact):
1557
1866
  return all([provider.upload(artifact, force) for provider in self._storage_providers])
@@ -1578,18 +1887,24 @@ class ArtifactCache(StorageProvider):
1578
1887
  """
1579
1888
  if not artifact.is_unpackable():
1580
1889
  return True
1581
- with self._thread_lock, self.lock_artifact(artifact) as artifact:
1582
- if not self.is_available_locally(artifact):
1583
- raise_task_error(
1584
- artifact.task,
1585
- "Locked artifact is missing in cache (forcibly removed?)")
1890
+ with self._thread_lock, self.lock_artifact(artifact, why="unpack") as artifact:
1891
+ raise_task_error_if(
1892
+ not self.is_available_locally(artifact),
1893
+ artifact.task,
1894
+ "Locked artifact is missing in cache (forcibly removed?) ({})", artifact._log_name)
1895
+
1896
+ raise_task_error_if(
1897
+ artifact.is_temporary(),
1898
+ artifact.task,
1899
+ "Can't unpack an unpublished task artifact ({})", artifact._log_name)
1900
+
1586
1901
  if artifact.is_unpacked():
1587
1902
  return True
1588
1903
 
1589
1904
  # Keep a temporary copy of the artifact if the task
1590
1905
  # unpack() method fails. The copy is removed in
1591
1906
  # get_locked_artifact() if left unused.
1592
- fs.copy(artifact.path, artifact.temporary_path, symlinks=True)
1907
+ fs.copy(artifact.final_path, artifact.temporary_path, symlinks=True)
1593
1908
 
1594
1909
  task = artifact.task
1595
1910
  with tools.Tools(task) as t:
@@ -1597,23 +1912,32 @@ class ArtifactCache(StorageProvider):
1597
1912
  # Note: unpack() will run on the original
1598
1913
  # artifact, not in the temporary copy.
1599
1914
  if task.unpack.__func__ is not tasks.Task.unpack:
1600
- task.info("Unpack started {}", artifact.get_node().log_name)
1915
+ artifact._info("Unpack started")
1601
1916
  artifact._set_unpacked()
1602
- task.unpack(artifact, t)
1603
- self.commit(artifact, uploadable=False)
1917
+ if artifact.name == "main":
1918
+ task.unpack(artifact, t)
1919
+ else:
1920
+ unpack = getattr(task, "unpack_" + artifact.name, None)
1921
+ raise_task_error_if(
1922
+ unpack is None, task,
1923
+ "Artifact unpack method not found: unpack_{}", artifact.name)
1924
+ unpack(artifact, t)
1925
+
1926
+ self.commit(artifact, uploadable=False, temporary=False)
1604
1927
 
1605
1928
  except NotImplementedError:
1606
- self.commit(artifact)
1929
+ self.commit(artifact, temporary=False)
1607
1930
 
1608
- except Exception as e:
1931
+ except (Exception, KeyboardInterrupt) as e:
1609
1932
  # Restore the temporary copy
1610
- fs.rmtree(artifact.path, ignore_errors=True)
1611
- fs.rename(artifact.temporary_path, artifact.path)
1612
- artifact.task.error("Unpack failed {}", artifact.get_node().log_name)
1933
+ fs.rmtree(artifact.final_path, ignore_errors=True)
1934
+ fs.rename(artifact.temporary_path, artifact.final_path)
1935
+ artifact._error("Unpack failed")
1613
1936
  raise e
1614
1937
  return True
1615
1938
 
1616
- def commit(self, artifact, uploadable=True):
1939
+ @utils.delay_interrupt
1940
+ def commit(self, artifact, uploadable=True, temporary=True):
1617
1941
  """
1618
1942
  Commits a task artifact to the cache.
1619
1943
 
@@ -1629,7 +1953,7 @@ class ArtifactCache(StorageProvider):
1629
1953
  return
1630
1954
 
1631
1955
  with self._cache_lock(), self._db() as db:
1632
- self._fs_commit_artifact(artifact, uploadable)
1956
+ self._fs_commit_artifact(artifact, uploadable, temporary)
1633
1957
  with utils.ignore_exception(): # Possibly already exists in DB, e.g. unpacked
1634
1958
  self._db_insert_artifact(db, artifact.identity, artifact.task.canonical_name, artifact.get_size())
1635
1959
  self._db_update_artifact_size(db, artifact.identity, artifact.get_size())
@@ -1646,19 +1970,17 @@ class ArtifactCache(StorageProvider):
1646
1970
  if self._discard(db, [candidate], True):
1647
1971
  evict_size -= candidate[2]
1648
1972
 
1973
+ @utils.delay_interrupt
1649
1974
  def discard(self, artifact, if_expired=False, onerror=None):
1650
1975
  with self._cache_lock(), self._db() as db:
1651
1976
  self._db_invalidate_locks(db)
1652
1977
  self._db_invalidate_references(db)
1653
1978
  self._fs_invalidate_pids(db)
1654
- discarded = self._discard(
1979
+ return self._discard(
1655
1980
  db,
1656
1981
  self._db_select_artifact_not_in_use(db, artifact.identity),
1657
1982
  if_expired,
1658
1983
  onerror=onerror)
1659
- if discarded:
1660
- self._local_presence_cache.discard(artifact.identity)
1661
- return discarded
1662
1984
 
1663
1985
  def _discard_wait(self, artifact):
1664
1986
  """
@@ -1677,10 +1999,11 @@ class ArtifactCache(StorageProvider):
1677
1999
  artifacts = self._db_select_artifact(db, artifact.identity)
1678
2000
  self._db_delete_artifact(db, artifact.identity, and_refs=False)
1679
2001
  refpids = self._db_select_artifact_reference_pids(db, artifact.identity)
2002
+ refpids = list(filter(lambda pid: pid != self._pid, refpids))
1680
2003
  lockpids = self._db_select_artifact_lock_pids(db, artifact.identity)
1681
2004
 
1682
- if len(refpids) > 1:
1683
- artifact.task.info("Artifact is temporarily in use, forced discard on hold ({name})")
2005
+ if len(refpids) > 0:
2006
+ artifact._info("Artifact is temporarily in use, forced discard on hold")
1684
2007
  for pid in refpids:
1685
2008
  # Loop waiting for other processes to surrender the artifact
1686
2009
  while True:
@@ -1700,8 +2023,7 @@ class ArtifactCache(StorageProvider):
1700
2023
 
1701
2024
  with self._cache_lock(), self._db() as db:
1702
2025
  assert self._discard(db, artifacts, False), "Failed to discard artifact"
1703
- self._local_presence_cache.discard(artifact.identity)
1704
- artifact.reload()
2026
+ artifact.reset()
1705
2027
  return artifact
1706
2028
 
1707
2029
  def discard_all(self, if_expired=False, onerror=None):
@@ -1719,10 +2041,29 @@ class ArtifactCache(StorageProvider):
1719
2041
  return Context(self, node)
1720
2042
 
1721
2043
  def get_artifact(self, node, name, tools=None, session=False):
1722
- return self._fs_get_artifact(node, name=name, tools=tools, session=session)
2044
+ artifact = self._fs_get_artifact(node, name=name, tools=tools, session=session)
2045
+ if not artifact.is_temporary():
2046
+ with self._cache_lock(), self._db() as db:
2047
+ if not self._db_select_artifact(db, artifact.identity) and not self._db_select_reference(db, artifact.identity):
2048
+ log.verbose("Artifact not present in db, discarding archive ({} )", artifact.task.short_qualified_name, artifact.identity)
2049
+ fs.rmtree(artifact.final_path, ignore_errors=True)
2050
+ artifact.reload()
2051
+ return artifact
1723
2052
 
1724
2053
  @contextlib.contextmanager
1725
- def lock_artifact(self, artifact, discard=False):
2054
+ def lock_artifact(self, artifact: Artifact, discard: bool = False, why: str = "publish"):
2055
+ """
2056
+ Locks the task artifact, both with process thread locks and interprocess file locks.
2057
+ """
2058
+ try:
2059
+ self._artifact_thread_lock.acquire(artifact.identity)
2060
+ with self._lock_artifact_interprocess(artifact, discard=discard, why=why) as artifact:
2061
+ yield artifact
2062
+ finally:
2063
+ self._artifact_thread_lock.release(artifact.identity)
2064
+
2065
+ @contextlib.contextmanager
2066
+ def _lock_artifact_interprocess(self, artifact: Artifact, discard: bool = False, why: str = "publish"):
1726
2067
  """
1727
2068
  Locks the task artifact.
1728
2069
 
@@ -1738,21 +2079,24 @@ class ArtifactCache(StorageProvider):
1738
2079
  lock = fasteners.InterProcessLock(lock_path)
1739
2080
  is_locked = lock.acquire(blocking=False)
1740
2081
  if not is_locked:
1741
- artifact.task.info("Artifact is temporarily locked by another process ({name})")
2082
+ artifact._info("Artifact is temporarily locked by another process")
1742
2083
  lock.acquire()
1743
2084
 
2085
+ artifact._debug("Artifact locked for {}", why)
2086
+
1744
2087
  try:
1745
- artifact.reload()
1746
2088
  if discard:
1747
2089
  artifact = self._discard_wait(artifact)
2090
+ else:
2091
+ artifact.reload()
2092
+
1748
2093
  if artifact.is_temporary():
1749
2094
  fs.rmtree(artifact.temporary_path, ignore_errors=True)
1750
2095
  fs.makedirs(artifact.temporary_path)
1751
2096
 
1752
- with contextlib.ExitStack() as stack:
1753
- stack.enter_context(artifact)
1754
- yield artifact
2097
+ yield artifact
1755
2098
  finally:
2099
+ artifact._debug("Artifact unlocked for {}", why)
1756
2100
  fs.rmtree(artifact.temporary_path, ignore_errors=True)
1757
2101
  with self._cache_lock():
1758
2102
  with self._db() as db:
@@ -1762,9 +2106,9 @@ class ArtifactCache(StorageProvider):
1762
2106
  if self._db_select_lock_count(db, artifact.identity) == 0:
1763
2107
  fs.unlink(lock_path, ignore_errors=True)
1764
2108
 
1765
- def precheck(self, artifacts):
2109
+ def precheck(self, artifacts, remote=True):
1766
2110
  """ Precheck artifacts for availability and cache status. """
1767
2111
  if not self.has_availability():
1768
2112
  return
1769
- present, missing = self.availability(artifacts)
2113
+ present, missing = self.availability(artifacts, remote=remote)
1770
2114
  log.verbose("Cache: {}/{} artifacts present", len(present), len(artifacts))