jolt 0.9.76__py3-none-any.whl → 0.9.429__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- jolt/__init__.py +88 -7
- jolt/__main__.py +9 -1
- jolt/bin/fstree-darwin-x86_64 +0 -0
- jolt/bin/fstree-linux-x86_64 +0 -0
- jolt/cache.py +839 -367
- jolt/chroot.py +156 -0
- jolt/cli.py +362 -143
- jolt/common_pb2.py +63 -0
- jolt/common_pb2_grpc.py +4 -0
- jolt/config.py +99 -42
- jolt/error.py +19 -4
- jolt/expires.py +2 -2
- jolt/filesystem.py +8 -6
- jolt/graph.py +705 -117
- jolt/hooks.py +63 -1
- jolt/influence.py +129 -6
- jolt/loader.py +369 -121
- jolt/log.py +225 -63
- jolt/manifest.py +28 -38
- jolt/options.py +35 -10
- jolt/pkgs/abseil.py +42 -0
- jolt/pkgs/asio.py +25 -0
- jolt/pkgs/autoconf.py +41 -0
- jolt/pkgs/automake.py +41 -0
- jolt/pkgs/b2.py +31 -0
- jolt/pkgs/boost.py +111 -0
- jolt/pkgs/boringssl.py +32 -0
- jolt/pkgs/busybox.py +39 -0
- jolt/pkgs/bzip2.py +43 -0
- jolt/pkgs/cares.py +29 -0
- jolt/pkgs/catch2.py +36 -0
- jolt/pkgs/cbindgen.py +17 -0
- jolt/pkgs/cista.py +19 -0
- jolt/pkgs/clang.py +44 -0
- jolt/pkgs/cli11.py +23 -0
- jolt/pkgs/cmake.py +48 -0
- jolt/pkgs/cpython.py +196 -0
- jolt/pkgs/crun.py +29 -0
- jolt/pkgs/curl.py +38 -0
- jolt/pkgs/dbus.py +18 -0
- jolt/pkgs/double_conversion.py +24 -0
- jolt/pkgs/fastfloat.py +21 -0
- jolt/pkgs/ffmpeg.py +28 -0
- jolt/pkgs/flatbuffers.py +29 -0
- jolt/pkgs/fmt.py +27 -0
- jolt/pkgs/fstree.py +20 -0
- jolt/pkgs/gflags.py +18 -0
- jolt/pkgs/glib.py +18 -0
- jolt/pkgs/glog.py +25 -0
- jolt/pkgs/glslang.py +21 -0
- jolt/pkgs/golang.py +16 -11
- jolt/pkgs/googlebenchmark.py +18 -0
- jolt/pkgs/googletest.py +46 -0
- jolt/pkgs/gperf.py +15 -0
- jolt/pkgs/grpc.py +73 -0
- jolt/pkgs/hdf5.py +19 -0
- jolt/pkgs/help2man.py +14 -0
- jolt/pkgs/inja.py +28 -0
- jolt/pkgs/jsoncpp.py +31 -0
- jolt/pkgs/libarchive.py +43 -0
- jolt/pkgs/libcap.py +44 -0
- jolt/pkgs/libdrm.py +44 -0
- jolt/pkgs/libedit.py +42 -0
- jolt/pkgs/libevent.py +31 -0
- jolt/pkgs/libexpat.py +27 -0
- jolt/pkgs/libfastjson.py +21 -0
- jolt/pkgs/libffi.py +16 -0
- jolt/pkgs/libglvnd.py +30 -0
- jolt/pkgs/libogg.py +28 -0
- jolt/pkgs/libpciaccess.py +18 -0
- jolt/pkgs/libseccomp.py +21 -0
- jolt/pkgs/libtirpc.py +24 -0
- jolt/pkgs/libtool.py +42 -0
- jolt/pkgs/libunwind.py +35 -0
- jolt/pkgs/libva.py +18 -0
- jolt/pkgs/libvorbis.py +33 -0
- jolt/pkgs/libxml2.py +35 -0
- jolt/pkgs/libxslt.py +17 -0
- jolt/pkgs/libyajl.py +16 -0
- jolt/pkgs/llvm.py +81 -0
- jolt/pkgs/lua.py +54 -0
- jolt/pkgs/lz4.py +26 -0
- jolt/pkgs/m4.py +14 -0
- jolt/pkgs/make.py +17 -0
- jolt/pkgs/mesa.py +81 -0
- jolt/pkgs/meson.py +17 -0
- jolt/pkgs/mstch.py +28 -0
- jolt/pkgs/mysql.py +60 -0
- jolt/pkgs/nasm.py +49 -0
- jolt/pkgs/ncurses.py +30 -0
- jolt/pkgs/ng_log.py +25 -0
- jolt/pkgs/ninja.py +45 -0
- jolt/pkgs/nlohmann_json.py +25 -0
- jolt/pkgs/nodejs.py +19 -11
- jolt/pkgs/opencv.py +24 -0
- jolt/pkgs/openjdk.py +26 -0
- jolt/pkgs/openssl.py +103 -0
- jolt/pkgs/paho.py +76 -0
- jolt/pkgs/patchelf.py +16 -0
- jolt/pkgs/perl.py +42 -0
- jolt/pkgs/pkgconfig.py +64 -0
- jolt/pkgs/poco.py +39 -0
- jolt/pkgs/protobuf.py +77 -0
- jolt/pkgs/pugixml.py +27 -0
- jolt/pkgs/python.py +19 -0
- jolt/pkgs/qt.py +35 -0
- jolt/pkgs/rapidjson.py +26 -0
- jolt/pkgs/rapidyaml.py +28 -0
- jolt/pkgs/re2.py +30 -0
- jolt/pkgs/re2c.py +17 -0
- jolt/pkgs/readline.py +15 -0
- jolt/pkgs/rust.py +41 -0
- jolt/pkgs/sdl.py +28 -0
- jolt/pkgs/simdjson.py +27 -0
- jolt/pkgs/soci.py +46 -0
- jolt/pkgs/spdlog.py +29 -0
- jolt/pkgs/spirv_llvm.py +21 -0
- jolt/pkgs/spirv_tools.py +24 -0
- jolt/pkgs/sqlite.py +83 -0
- jolt/pkgs/ssl.py +12 -0
- jolt/pkgs/texinfo.py +15 -0
- jolt/pkgs/tomlplusplus.py +22 -0
- jolt/pkgs/wayland.py +26 -0
- jolt/pkgs/x11.py +58 -0
- jolt/pkgs/xerces_c.py +20 -0
- jolt/pkgs/xorg.py +360 -0
- jolt/pkgs/xz.py +29 -0
- jolt/pkgs/yamlcpp.py +30 -0
- jolt/pkgs/zeromq.py +47 -0
- jolt/pkgs/zlib.py +69 -0
- jolt/pkgs/zstd.py +33 -0
- jolt/plugins/alias.py +3 -0
- jolt/plugins/allure.py +5 -2
- jolt/plugins/autotools.py +66 -0
- jolt/plugins/cache.py +133 -0
- jolt/plugins/cmake.py +74 -6
- jolt/plugins/conan.py +238 -0
- jolt/plugins/cxx.py +698 -0
- jolt/plugins/cxxinfo.py +7 -0
- jolt/plugins/dashboard.py +1 -1
- jolt/plugins/docker.py +91 -23
- jolt/plugins/email.py +5 -2
- jolt/plugins/email.xslt +144 -101
- jolt/plugins/environ.py +11 -0
- jolt/plugins/fetch.py +141 -0
- jolt/plugins/gdb.py +44 -21
- jolt/plugins/gerrit.py +1 -14
- jolt/plugins/git.py +316 -101
- jolt/plugins/googletest.py +522 -1
- jolt/plugins/http.py +36 -38
- jolt/plugins/libtool.py +63 -0
- jolt/plugins/linux.py +990 -0
- jolt/plugins/logstash.py +4 -4
- jolt/plugins/meson.py +61 -0
- jolt/plugins/ninja-compdb.py +107 -31
- jolt/plugins/ninja.py +929 -134
- jolt/plugins/paths.py +11 -1
- jolt/plugins/pkgconfig.py +219 -0
- jolt/plugins/podman.py +148 -91
- jolt/plugins/python.py +137 -0
- jolt/plugins/remote_execution/__init__.py +0 -0
- jolt/plugins/remote_execution/administration_pb2.py +46 -0
- jolt/plugins/remote_execution/administration_pb2_grpc.py +170 -0
- jolt/plugins/remote_execution/log_pb2.py +32 -0
- jolt/plugins/remote_execution/log_pb2_grpc.py +68 -0
- jolt/plugins/remote_execution/scheduler_pb2.py +41 -0
- jolt/plugins/remote_execution/scheduler_pb2_grpc.py +141 -0
- jolt/plugins/remote_execution/worker_pb2.py +38 -0
- jolt/plugins/remote_execution/worker_pb2_grpc.py +112 -0
- jolt/plugins/report.py +12 -2
- jolt/plugins/rust.py +25 -0
- jolt/plugins/scheduler.py +710 -0
- jolt/plugins/selfdeploy/setup.py +9 -4
- jolt/plugins/selfdeploy.py +138 -88
- jolt/plugins/strings.py +35 -22
- jolt/plugins/symlinks.py +26 -11
- jolt/plugins/telemetry.py +5 -2
- jolt/plugins/timeline.py +13 -3
- jolt/plugins/volume.py +46 -48
- jolt/scheduler.py +591 -191
- jolt/tasks.py +1783 -245
- jolt/templates/export.sh.template +12 -6
- jolt/templates/timeline.html.template +44 -47
- jolt/timer.py +22 -0
- jolt/tools.py +749 -302
- jolt/utils.py +245 -18
- jolt/version.py +1 -1
- jolt/version_utils.py +2 -2
- jolt/xmldom.py +12 -2
- {jolt-0.9.76.dist-info → jolt-0.9.429.dist-info}/METADATA +98 -38
- jolt-0.9.429.dist-info/RECORD +207 -0
- {jolt-0.9.76.dist-info → jolt-0.9.429.dist-info}/WHEEL +1 -1
- jolt/plugins/amqp.py +0 -834
- jolt/plugins/debian.py +0 -338
- jolt/plugins/ftp.py +0 -181
- jolt/plugins/ninja-cache.py +0 -64
- jolt/plugins/ninjacli.py +0 -271
- jolt/plugins/repo.py +0 -253
- jolt-0.9.76.dist-info/RECORD +0 -79
- {jolt-0.9.76.dist-info → jolt-0.9.429.dist-info}/entry_points.txt +0 -0
- {jolt-0.9.76.dist-info → jolt-0.9.429.dist-info}/top_level.txt +0 -0
jolt/cache.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import atexit
|
|
2
2
|
import contextlib
|
|
3
|
-
from collections import OrderedDict
|
|
3
|
+
from collections import namedtuple, OrderedDict
|
|
4
4
|
from datetime import datetime
|
|
5
5
|
import fasteners
|
|
6
6
|
import json
|
|
@@ -10,18 +10,20 @@ from threading import RLock
|
|
|
10
10
|
import uuid
|
|
11
11
|
|
|
12
12
|
from jolt import config
|
|
13
|
+
from jolt import expires
|
|
13
14
|
from jolt import filesystem as fs
|
|
14
15
|
from jolt import influence
|
|
15
16
|
from jolt import log
|
|
16
17
|
from jolt import tools
|
|
17
18
|
from jolt import utils
|
|
19
|
+
from jolt import tasks
|
|
18
20
|
from jolt.options import JoltOptions
|
|
19
21
|
from jolt.error import raise_error, raise_error_if
|
|
20
22
|
from jolt.error import raise_task_error, raise_task_error_if
|
|
21
23
|
from jolt.expires import ArtifactEvictionStrategyRegister
|
|
22
24
|
|
|
23
25
|
|
|
24
|
-
DEFAULT_ARCHIVE_TYPE = ".tar.
|
|
26
|
+
DEFAULT_ARCHIVE_TYPE = ".tar.zst"
|
|
25
27
|
|
|
26
28
|
|
|
27
29
|
def locked(func):
|
|
@@ -31,179 +33,122 @@ def locked(func):
|
|
|
31
33
|
return _f
|
|
32
34
|
|
|
33
35
|
|
|
34
|
-
class StorageProvider(object):
|
|
35
|
-
def download(self, node, force=False):
|
|
36
|
-
return False
|
|
37
|
-
|
|
38
|
-
def download_enabled(self):
|
|
39
|
-
return True
|
|
40
|
-
|
|
41
|
-
def upload(self, node, force=False):
|
|
42
|
-
return False
|
|
43
|
-
|
|
44
|
-
def upload_enabled(self):
|
|
45
|
-
return True
|
|
46
|
-
|
|
47
|
-
def location(self, node):
|
|
48
|
-
return '' # URL
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
class StorageProviderFactory(StorageProvider):
|
|
52
|
-
def create(self):
|
|
53
|
-
pass
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
def RegisterStorage(cls):
|
|
57
|
-
ArtifactCache.storage_provider_factories.append(cls)
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
class ArtifactAttributeSet(object):
|
|
61
|
-
def __init__(self):
|
|
62
|
-
super(ArtifactAttributeSet, self).__setattr__("_attributes", {})
|
|
63
|
-
|
|
64
|
-
def _get_attributes(self):
|
|
65
|
-
return self._attributes
|
|
66
|
-
|
|
67
|
-
def __getattr__(self, name):
|
|
68
|
-
attributes = self._get_attributes()
|
|
69
|
-
if name not in attributes:
|
|
70
|
-
attributes[name] = self.create(name)
|
|
71
|
-
return attributes[name]
|
|
72
|
-
|
|
73
|
-
def __setattr__(self, name, value):
|
|
74
|
-
attributes = self._get_attributes()
|
|
75
|
-
if name not in attributes:
|
|
76
|
-
attributes[name] = self.create(name)
|
|
77
|
-
attributes[name].set_value(value)
|
|
78
|
-
return attributes[name]
|
|
79
|
-
|
|
80
|
-
def __dict__(self):
|
|
81
|
-
return {key: str(value) for key, value in self.items()}
|
|
82
|
-
|
|
83
|
-
def items(self):
|
|
84
|
-
return self._get_attributes().items()
|
|
85
|
-
|
|
86
|
-
def apply(self, task, artifact):
|
|
87
|
-
for _, value in self.items():
|
|
88
|
-
value.apply(task, artifact)
|
|
89
|
-
|
|
90
|
-
def apply_deps(self, task, deps):
|
|
91
|
-
pass
|
|
92
|
-
|
|
93
|
-
def unapply(self, task, artifact):
|
|
94
|
-
for _, value in self.items():
|
|
95
|
-
value.unapply(task, artifact)
|
|
96
|
-
|
|
97
|
-
def unapply_deps(self, task, deps):
|
|
98
|
-
pass
|
|
99
|
-
|
|
100
|
-
def visit(self, task, artifact, visitor):
|
|
101
|
-
for _, value in self.items():
|
|
102
|
-
value.visit(task, artifact, visitor)
|
|
103
|
-
|
|
104
|
-
|
|
105
36
|
class ArtifactAttributeSetRegistry(object):
|
|
106
|
-
|
|
37
|
+
"""
|
|
38
|
+
Registry for providers of artifact attribute sets.
|
|
39
|
+
"""
|
|
40
|
+
|
|
41
|
+
providers = [] # List of objects that implement ArtifactAttributeSetProvider
|
|
107
42
|
|
|
108
43
|
@staticmethod
|
|
109
44
|
def create_all(artifact):
|
|
45
|
+
""" Create all artifact attribute sets. """
|
|
110
46
|
for provider in ArtifactAttributeSetRegistry.providers:
|
|
111
47
|
provider().create(artifact)
|
|
112
48
|
|
|
113
49
|
@staticmethod
|
|
114
50
|
def parse_all(artifact, content):
|
|
51
|
+
""" Parse all artifact attribute sets. """
|
|
115
52
|
for provider in ArtifactAttributeSetRegistry.providers:
|
|
116
53
|
provider().parse(artifact, content)
|
|
117
54
|
|
|
118
55
|
@staticmethod
|
|
119
56
|
def format_all(artifact, content):
|
|
57
|
+
""" Format all artifact attribute sets. """
|
|
120
58
|
for provider in ArtifactAttributeSetRegistry.providers:
|
|
121
59
|
provider().format(artifact, content)
|
|
122
60
|
|
|
123
61
|
@staticmethod
|
|
124
62
|
def apply_all(task, artifact):
|
|
63
|
+
""" Apply all artifact attribute sets. """
|
|
125
64
|
for provider in ArtifactAttributeSetRegistry.providers:
|
|
126
65
|
provider().apply(task, artifact)
|
|
127
66
|
|
|
128
|
-
@staticmethod
|
|
129
|
-
def apply_all_deps(task, deps):
|
|
130
|
-
for provider in ArtifactAttributeSetRegistry.providers:
|
|
131
|
-
provider().apply_deps(task, deps)
|
|
132
|
-
|
|
133
67
|
@staticmethod
|
|
134
68
|
def unapply_all(task, artifact):
|
|
69
|
+
""" Unapply all artifact attribute sets. """
|
|
135
70
|
for provider in ArtifactAttributeSetRegistry.providers:
|
|
136
71
|
provider().unapply(task, artifact)
|
|
137
72
|
|
|
138
|
-
@staticmethod
|
|
139
|
-
def unapply_all_deps(task, deps):
|
|
140
|
-
for provider in ArtifactAttributeSetRegistry.providers:
|
|
141
|
-
provider().unapply_deps(task, deps)
|
|
142
|
-
|
|
143
73
|
@staticmethod
|
|
144
74
|
def visit_all(task, artifact, visitor):
|
|
75
|
+
""" Visit all artifact attribute sets. """
|
|
145
76
|
for provider in ArtifactAttributeSetRegistry.providers:
|
|
146
77
|
provider().visit(task, artifact, visitor)
|
|
147
78
|
|
|
148
79
|
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
class ArtifactAttributeSetProvider(object):
|
|
154
|
-
@staticmethod
|
|
155
|
-
def Register(cls):
|
|
156
|
-
ArtifactAttributeSetRegistry.providers.append(cls)
|
|
157
|
-
|
|
158
|
-
def create(self, artifact):
|
|
159
|
-
raise NotImplementedError()
|
|
160
|
-
|
|
161
|
-
def parse(self, artifact, content):
|
|
162
|
-
raise NotImplementedError()
|
|
163
|
-
|
|
164
|
-
def format(self, artifact, content):
|
|
165
|
-
raise NotImplementedError()
|
|
166
|
-
|
|
167
|
-
def apply(self, task, artifact):
|
|
168
|
-
pass
|
|
169
|
-
|
|
170
|
-
def apply_deps(self, task, deps):
|
|
171
|
-
pass
|
|
172
|
-
|
|
173
|
-
def unapply(self, task, artifact):
|
|
174
|
-
pass
|
|
175
|
-
|
|
176
|
-
def unapply_deps(self, task, deps):
|
|
177
|
-
pass
|
|
80
|
+
class ArtifactAttribute(object):
|
|
81
|
+
"""
|
|
82
|
+
An artifact attribute.
|
|
178
83
|
|
|
179
|
-
|
|
180
|
-
|
|
84
|
+
An artifact attribute is a key-value pair that can be set and retrieved
|
|
85
|
+
from an artifact attribute set. Attributes are used to store metadata and other
|
|
86
|
+
information that is associated with an artifact. They communicate information
|
|
87
|
+
between tasks and store information that is used by tasks when they consume an artifact.
|
|
181
88
|
|
|
89
|
+
Artifact attributes can also perform actions when the artifact is consumed.
|
|
182
90
|
|
|
183
|
-
|
|
91
|
+
"""
|
|
184
92
|
def __init__(self, name):
|
|
185
93
|
self._name = name
|
|
186
94
|
|
|
187
95
|
def get_name(self):
|
|
96
|
+
""" Get the name of the attribute. """
|
|
188
97
|
return self._name
|
|
189
98
|
|
|
190
99
|
def set_value(self, value, expand=True):
|
|
100
|
+
"""
|
|
101
|
+
Set the value of the attribute.
|
|
102
|
+
|
|
103
|
+
Must be implemented by subclasses.
|
|
104
|
+
|
|
105
|
+
Args:
|
|
106
|
+
value: The value to set.
|
|
107
|
+
expand: If True, the value is macro expanded using the tools.expand() method.
|
|
108
|
+
"""
|
|
191
109
|
raise NotImplementedError()
|
|
192
110
|
|
|
193
111
|
def get_value(self):
|
|
112
|
+
"""
|
|
113
|
+
Get the value of the attribute.
|
|
114
|
+
|
|
115
|
+
Must be implemented by subclasses.
|
|
116
|
+
"""
|
|
194
117
|
raise NotImplementedError()
|
|
195
118
|
|
|
196
119
|
def apply(self, task, artifact):
|
|
120
|
+
"""
|
|
121
|
+
Perform an action when the artifact is being used.
|
|
122
|
+
|
|
123
|
+
Args:
|
|
124
|
+
task (Task): The task that is using the artifact.
|
|
125
|
+
artifact (Artifact): The artifact that is being used.
|
|
126
|
+
|
|
127
|
+
"""
|
|
197
128
|
pass
|
|
198
129
|
|
|
199
130
|
def unapply(self, task, artifact):
|
|
131
|
+
"""
|
|
132
|
+
Undo an action when the artifact is no longer being used.
|
|
133
|
+
|
|
134
|
+
Args:
|
|
135
|
+
task (Task): The task that is no longer using the artifact.
|
|
136
|
+
artifact (Artifact): The artifact that is no longer being used.
|
|
137
|
+
"""
|
|
200
138
|
pass
|
|
201
139
|
|
|
202
|
-
def __str__(self):
|
|
140
|
+
def __str__(self) -> str:
|
|
141
|
+
"""
|
|
142
|
+
Get a string representation of the attribute.
|
|
143
|
+
|
|
144
|
+
Must be implemented by subclasses.
|
|
145
|
+
"""
|
|
203
146
|
raise NotImplementedError()
|
|
204
147
|
|
|
205
148
|
|
|
206
149
|
class ArtifactStringAttribute(ArtifactAttribute):
|
|
150
|
+
""" An artifact attribute that stores a string value. """
|
|
151
|
+
|
|
207
152
|
def __init__(self, artifact, name):
|
|
208
153
|
self._artifact = artifact
|
|
209
154
|
self._name = name
|
|
@@ -213,50 +158,55 @@ class ArtifactStringAttribute(ArtifactAttribute):
|
|
|
213
158
|
return self._name
|
|
214
159
|
|
|
215
160
|
def set_value(self, value, expand=True):
|
|
216
|
-
self._value = self._artifact.
|
|
161
|
+
self._value = self._artifact.tools.expand(str(value)) if expand else str(value)
|
|
217
162
|
|
|
218
163
|
def get_value(self):
|
|
219
164
|
return self._value
|
|
220
165
|
|
|
221
|
-
def
|
|
222
|
-
pass
|
|
223
|
-
|
|
224
|
-
def unapply(self, task, artifact):
|
|
225
|
-
pass
|
|
226
|
-
|
|
227
|
-
def __str__(self):
|
|
166
|
+
def __str__(self) -> str:
|
|
228
167
|
return str(self._value)
|
|
229
168
|
|
|
230
169
|
|
|
231
170
|
class ArtifactListAttribute(ArtifactAttribute):
|
|
171
|
+
""" An artifact attribute that stores a list of values. """
|
|
172
|
+
|
|
232
173
|
def __init__(self, artifact, name):
|
|
233
174
|
self._artifact = artifact
|
|
234
175
|
self._name = name
|
|
235
176
|
self._value = []
|
|
236
177
|
|
|
178
|
+
def __getitem__(self, key):
|
|
179
|
+
return self._value[key]
|
|
180
|
+
|
|
181
|
+
def __getslice__(self, i, j):
|
|
182
|
+
return self._value[i:j]
|
|
183
|
+
|
|
184
|
+
def __len__(self):
|
|
185
|
+
return len(self._value)
|
|
186
|
+
|
|
237
187
|
def get_name(self):
|
|
238
188
|
return self._name
|
|
239
189
|
|
|
240
190
|
def set_value(self, value, expand=True):
|
|
241
|
-
if type(value)
|
|
191
|
+
if type(value) is str:
|
|
242
192
|
value = value.split(":")
|
|
243
|
-
raise_error_if(type(value)
|
|
244
|
-
self._value = self._artifact.
|
|
193
|
+
raise_error_if(type(value) is not list, "Illegal value assigned to artifact list attribute")
|
|
194
|
+
self._value = self._artifact.tools.expand(value) if expand else value
|
|
245
195
|
|
|
246
196
|
def get_value(self):
|
|
247
197
|
return self._value
|
|
248
198
|
|
|
249
199
|
def append(self, value):
|
|
250
|
-
if type(value)
|
|
251
|
-
self._value.extend(self._artifact.
|
|
200
|
+
if type(value) is list:
|
|
201
|
+
self._value.extend(self._artifact.tools.expand(value))
|
|
252
202
|
else:
|
|
253
|
-
self._value.append(self._artifact.
|
|
203
|
+
self._value.append(self._artifact.tools.expand(value))
|
|
254
204
|
|
|
255
205
|
def extend(self, value):
|
|
256
206
|
raise_error_if(
|
|
257
|
-
type(value)
|
|
207
|
+
type(value) is not list,
|
|
258
208
|
"Illegal type passed to {}.extend() - list expected".format(self._name))
|
|
259
|
-
self._value.extend(self._artifact.
|
|
209
|
+
self._value.extend(self._artifact.tools.expand(value))
|
|
260
210
|
|
|
261
211
|
def items(self):
|
|
262
212
|
return list(self._value)
|
|
@@ -264,23 +214,16 @@ class ArtifactListAttribute(ArtifactAttribute):
|
|
|
264
214
|
def count(self):
|
|
265
215
|
return len(self.items())
|
|
266
216
|
|
|
267
|
-
def
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
def unapply(self, task, artifact):
|
|
271
|
-
pass
|
|
217
|
+
def __str__(self) -> str:
|
|
218
|
+
return fs.pathsep.join(str(v) for v in self._value)
|
|
272
219
|
|
|
273
220
|
|
|
274
221
|
class ArtifactFileAttribute(object):
|
|
222
|
+
""" An attribute that stores a list of source and destination path tuples for files collected into the artifact. """
|
|
223
|
+
|
|
275
224
|
def __init__(self):
|
|
276
225
|
self._files = []
|
|
277
226
|
|
|
278
|
-
def apply(self, task, artifact):
|
|
279
|
-
pass
|
|
280
|
-
|
|
281
|
-
def unapply(self, task, artifact):
|
|
282
|
-
pass
|
|
283
|
-
|
|
284
227
|
def append(self, src, dst):
|
|
285
228
|
self._files.append((fs.as_posix(src), fs.as_posix(dst)))
|
|
286
229
|
|
|
@@ -291,8 +234,145 @@ class ArtifactFileAttribute(object):
|
|
|
291
234
|
return self._files
|
|
292
235
|
|
|
293
236
|
|
|
237
|
+
class ArtifactAttributeSet(object):
|
|
238
|
+
"""
|
|
239
|
+
A set of artifact attributes.
|
|
240
|
+
|
|
241
|
+
An attribute set is a collection of attributes. Each attribute is
|
|
242
|
+
accessed using the attribute name as an attribute of the set. For
|
|
243
|
+
example, to access an attribute named 'version' in an attribute set
|
|
244
|
+
named 'strings', you would write:
|
|
245
|
+
|
|
246
|
+
.. code-block:: python
|
|
247
|
+
|
|
248
|
+
artifact.strings.version = "1.0"
|
|
249
|
+
|
|
250
|
+
"""
|
|
251
|
+
|
|
252
|
+
def __init__(self):
|
|
253
|
+
super(ArtifactAttributeSet, self).__setattr__("_attributes", {})
|
|
254
|
+
|
|
255
|
+
def _get_attributes(self):
|
|
256
|
+
return self._attributes
|
|
257
|
+
|
|
258
|
+
def __getattr__(self, name) -> ArtifactAttribute:
|
|
259
|
+
"""
|
|
260
|
+
Get or create an attribute by name.
|
|
261
|
+
|
|
262
|
+
Args:
|
|
263
|
+
name (str): The name of the attribute.
|
|
264
|
+
|
|
265
|
+
Returns:
|
|
266
|
+
An attribute object.
|
|
267
|
+
"""
|
|
268
|
+
attributes = self._get_attributes()
|
|
269
|
+
if name not in attributes:
|
|
270
|
+
attributes[name] = self.create(name)
|
|
271
|
+
return attributes[name]
|
|
272
|
+
|
|
273
|
+
def __setattr__(self, name, value):
|
|
274
|
+
"""
|
|
275
|
+
Set an attribute by name.
|
|
276
|
+
|
|
277
|
+
Args:
|
|
278
|
+
name (str): The name of the attribute.
|
|
279
|
+
value: The value to set.
|
|
280
|
+
"""
|
|
281
|
+
attributes = self._get_attributes()
|
|
282
|
+
if name not in attributes:
|
|
283
|
+
attributes[name] = self.create(name)
|
|
284
|
+
attributes[name].set_value(value)
|
|
285
|
+
return attributes[name]
|
|
286
|
+
|
|
287
|
+
def __dict__(self):
|
|
288
|
+
""" Get a dictionary representation of the attribute set. """
|
|
289
|
+
return {key: str(value) for key, value in self.items()}
|
|
290
|
+
|
|
291
|
+
def __iter__(self):
|
|
292
|
+
""" Iterate over the attribute set. """
|
|
293
|
+
return iter(self.items())
|
|
294
|
+
|
|
295
|
+
def get(self, name, default=None):
|
|
296
|
+
""" Get an attribute by name.
|
|
297
|
+
|
|
298
|
+
Args:
|
|
299
|
+
name (str): The name of the attribute.
|
|
300
|
+
|
|
301
|
+
Returns:
|
|
302
|
+
The attribute object, or None if it does not exist.
|
|
303
|
+
"""
|
|
304
|
+
attributes = self._get_attributes()
|
|
305
|
+
return attributes.get(name, default)
|
|
306
|
+
|
|
307
|
+
def items(self):
|
|
308
|
+
""" Get a list of tuples containing the attribute name and value. """
|
|
309
|
+
return self._get_attributes().items()
|
|
310
|
+
|
|
311
|
+
def apply(self, task, artifact):
|
|
312
|
+
""" Perform attribute actions when the artifact is being used. """
|
|
313
|
+
for _, value in self.items():
|
|
314
|
+
value.apply(task, artifact)
|
|
315
|
+
|
|
316
|
+
def unapply(self, task, artifact):
|
|
317
|
+
""" Undo attribute actions when the artifact is no longer being used. """
|
|
318
|
+
for _, value in self.items():
|
|
319
|
+
value.unapply(task, artifact)
|
|
320
|
+
|
|
321
|
+
def visit(self, task, artifact, visitor):
|
|
322
|
+
""" Visit all attributes in the set. """
|
|
323
|
+
for _, value in self.items():
|
|
324
|
+
value.visit(task, artifact, visitor)
|
|
325
|
+
|
|
326
|
+
|
|
327
|
+
class ArtifactAttributeSetProvider(object):
|
|
328
|
+
""" Base class for artifact attribute set providers.
|
|
329
|
+
|
|
330
|
+
An artifact attribute set provider is a factory for creating and managing
|
|
331
|
+
attribute sets in an artifact.
|
|
332
|
+
"""
|
|
333
|
+
|
|
334
|
+
@staticmethod
|
|
335
|
+
def Register(cls):
|
|
336
|
+
""" Decorator for registering a provider class. """
|
|
337
|
+
ArtifactAttributeSetRegistry.providers.append(cls)
|
|
338
|
+
|
|
339
|
+
def create(self, artifact):
|
|
340
|
+
""" Create an attribute set for an artifact. """
|
|
341
|
+
raise NotImplementedError()
|
|
342
|
+
|
|
343
|
+
def parse(self, artifact, content):
|
|
344
|
+
"""
|
|
345
|
+
Parse an attribute set from a dictionary.
|
|
346
|
+
|
|
347
|
+
The dictionary is loaded from a JSON file embedded in the artifact.
|
|
348
|
+
"""
|
|
349
|
+
raise NotImplementedError()
|
|
350
|
+
|
|
351
|
+
def format(self, artifact, content):
|
|
352
|
+
"""
|
|
353
|
+
Format an attribute set to a dictionary.
|
|
354
|
+
|
|
355
|
+
The dictionary is saved to a JSON file embedded in the artifact.
|
|
356
|
+
"""
|
|
357
|
+
raise NotImplementedError()
|
|
358
|
+
|
|
359
|
+
def apply(self, task, artifact):
|
|
360
|
+
""" Perform actions when the artifact is being used. """
|
|
361
|
+
pass
|
|
362
|
+
|
|
363
|
+
def unapply(self, task, artifact):
|
|
364
|
+
""" Undo actions when the artifact is no longer being used. """
|
|
365
|
+
pass
|
|
366
|
+
|
|
367
|
+
def visit(self, task, artifact, visitor):
|
|
368
|
+
""" Visit all attributes in the set. """
|
|
369
|
+
pass
|
|
370
|
+
|
|
371
|
+
|
|
294
372
|
@ArtifactAttributeSetProvider.Register
|
|
295
373
|
class ArtifactFileAttributeProvider(ArtifactAttributeSetProvider):
|
|
374
|
+
""" Provider for the artifact 'files' attribute set. """
|
|
375
|
+
|
|
296
376
|
def create(self, artifact):
|
|
297
377
|
setattr(artifact, "files", ArtifactFileAttribute())
|
|
298
378
|
|
|
@@ -306,22 +386,19 @@ class ArtifactFileAttributeProvider(ArtifactAttributeSetProvider):
|
|
|
306
386
|
def format(self, artifact, content):
|
|
307
387
|
content["files"] = [{"src": src, "dst": dst} for src, dst in artifact.files.items()]
|
|
308
388
|
|
|
309
|
-
def apply(self, task, artifact):
|
|
310
|
-
pass
|
|
311
|
-
|
|
312
|
-
def unapply(self, task, artifact):
|
|
313
|
-
pass
|
|
314
389
|
|
|
315
|
-
|
|
316
|
-
|
|
390
|
+
def visit_artifact(task, artifact, visitor):
|
|
391
|
+
ArtifactAttributeSetRegistry.visit_all(task, artifact, visitor)
|
|
317
392
|
|
|
318
393
|
|
|
319
394
|
def json_serializer(obj):
|
|
395
|
+
""" JSON serializer for datetime objects. """
|
|
320
396
|
if isinstance(obj, datetime):
|
|
321
397
|
return dict(type="datetime", value=obj.strftime("%Y-%m-%d %H:%M:%S.%f"))
|
|
322
398
|
|
|
323
399
|
|
|
324
400
|
def json_deserializer(dct):
|
|
401
|
+
""" JSON deserializer for datetime objects. """
|
|
325
402
|
if dct.get("type") == "datetime":
|
|
326
403
|
return datetime.strptime(dct["value"], "%Y-%m-%d %H:%M:%S.%f")
|
|
327
404
|
return dct
|
|
@@ -453,26 +530,40 @@ class Artifact(object):
|
|
|
453
530
|
artifact.strings.version = "1.2"
|
|
454
531
|
"""
|
|
455
532
|
|
|
456
|
-
def __init__(self, cache, node, tools=None):
|
|
533
|
+
def __init__(self, cache, node, name=None, identity=None, tools=None, session=False):
|
|
457
534
|
self._cache = cache
|
|
535
|
+
if identity:
|
|
536
|
+
self._identity = identity
|
|
537
|
+
else:
|
|
538
|
+
self._identity = node.identity if not session else node.instance
|
|
539
|
+
if name:
|
|
540
|
+
self._identity = name + "@" + self._identity
|
|
541
|
+
self._main = name == "main"
|
|
542
|
+
self._name = name or "main"
|
|
543
|
+
self._full_name = f"{self._name}@{node.short_qualified_name}" if node else self._name
|
|
544
|
+
self._log_name = f"{self._full_name} {node.identity[:8]}" if node else self._full_name
|
|
458
545
|
self._node = node
|
|
546
|
+
self._session = session
|
|
547
|
+
self._task = node.task if node else None
|
|
459
548
|
self._tools = tools or self._node.tools
|
|
460
|
-
self._path = cache._fs_get_artifact_path(
|
|
461
|
-
self._temp = cache._fs_get_artifact_tmppath(
|
|
462
|
-
self._archive = cache._fs_get_artifact_archivepath(
|
|
463
|
-
self._lock_path = cache._fs_get_artifact_lockpath(
|
|
464
|
-
self._unpacked = False
|
|
465
|
-
self._uploadable = True
|
|
466
|
-
self._created = datetime.now()
|
|
467
|
-
self._modified = datetime.now()
|
|
468
|
-
self._expires = node.task.expires
|
|
469
|
-
self._size = 0
|
|
470
|
-
self._influence = None
|
|
549
|
+
self._path = cache._fs_get_artifact_path(self._identity, node.canonical_name if node else name)
|
|
550
|
+
self._temp = cache._fs_get_artifact_tmppath(self._identity, node.canonical_name if node else name)
|
|
551
|
+
self._archive = cache._fs_get_artifact_archivepath(self._identity, node.canonical_name if node else name)
|
|
552
|
+
self._lock_path = cache._fs_get_artifact_lockpath(self._identity)
|
|
471
553
|
ArtifactAttributeSetRegistry.create_all(self)
|
|
472
|
-
self.
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
554
|
+
self.reload()
|
|
555
|
+
|
|
556
|
+
def _info(self, fmt, *args, **kwargs):
|
|
557
|
+
log.info(fmt + f" ({self._log_name})", *args, **kwargs)
|
|
558
|
+
|
|
559
|
+
def _debug(self, fmt, *args, **kwargs):
|
|
560
|
+
log.debug(fmt + f" ({self._log_name})", *args, **kwargs)
|
|
561
|
+
|
|
562
|
+
def _warning(self, fmt, *args, **kwargs):
|
|
563
|
+
log.warning(fmt + f" ({self._log_name})", *args, **kwargs)
|
|
564
|
+
|
|
565
|
+
def _error(self, fmt, *args, **kwargs):
|
|
566
|
+
log.error(fmt + f" ({self._log_name})", *args, **kwargs)
|
|
476
567
|
|
|
477
568
|
def __enter__(self):
|
|
478
569
|
return self
|
|
@@ -483,19 +574,23 @@ class Artifact(object):
|
|
|
483
574
|
def __getattr__(self, name):
|
|
484
575
|
raise_task_error(self._node, "Attempt to access invalid artifact attribute '{0}'", name)
|
|
485
576
|
|
|
486
|
-
def _write_manifest(self):
|
|
577
|
+
def _write_manifest(self, temporary=False):
|
|
487
578
|
content = {}
|
|
488
|
-
content["task"] = self._node.name
|
|
489
579
|
content["size"] = self._get_size()
|
|
490
580
|
content["unpacked"] = self._unpacked
|
|
491
581
|
content["uploadable"] = self._uploadable
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
582
|
+
if self._node:
|
|
583
|
+
content["task"] = self._node.name
|
|
584
|
+
content["identity"] = self._node.identity
|
|
585
|
+
content["requires"] = self._node.task.requires
|
|
586
|
+
content["parameters"] = self._node.task._get_parameters()
|
|
587
|
+
|
|
495
588
|
if self._influence is not None:
|
|
496
589
|
content["influence"] = self._influence
|
|
497
|
-
|
|
590
|
+
elif self._node:
|
|
498
591
|
content["influence"] = influence.HashInfluenceRegistry.get().get_strings(self._node.task)
|
|
592
|
+
else:
|
|
593
|
+
content["influence"] = []
|
|
499
594
|
content["created"] = self._created
|
|
500
595
|
content["modified"] = datetime.now()
|
|
501
596
|
content["expires"] = self._expires.value
|
|
@@ -504,13 +599,19 @@ class Artifact(object):
|
|
|
504
599
|
|
|
505
600
|
ArtifactAttributeSetRegistry.format_all(self, content)
|
|
506
601
|
|
|
507
|
-
|
|
602
|
+
if temporary:
|
|
603
|
+
manifest = fs.path.join(self.temporary_path, ".manifest.json")
|
|
604
|
+
else:
|
|
605
|
+
manifest = fs.path.join(self.final_path, ".manifest.json")
|
|
508
606
|
with open(manifest, "wb") as f:
|
|
509
607
|
f.write(json.dumps(content, indent=2, default=json_serializer).encode())
|
|
510
608
|
|
|
511
|
-
def _read_manifest(self):
|
|
609
|
+
def _read_manifest(self, temporary=False):
|
|
512
610
|
try:
|
|
513
|
-
|
|
611
|
+
if temporary:
|
|
612
|
+
manifest_path = fs.path.join(self.temporary_path, ".manifest.json")
|
|
613
|
+
else:
|
|
614
|
+
manifest_path = fs.path.join(self.final_path, ".manifest.json")
|
|
514
615
|
with open(manifest_path) as manifest_file:
|
|
515
616
|
content = json.load(manifest_file, object_hook=json_deserializer)
|
|
516
617
|
self._valid = True
|
|
@@ -568,9 +669,44 @@ class Artifact(object):
|
|
|
568
669
|
def unapply(self):
|
|
569
670
|
pass
|
|
570
671
|
|
|
672
|
+
def is_main(self):
|
|
673
|
+
return self._main
|
|
674
|
+
|
|
675
|
+
def is_session(self):
|
|
676
|
+
return self._session
|
|
677
|
+
|
|
571
678
|
def is_valid(self):
|
|
572
679
|
return self._valid
|
|
573
680
|
|
|
681
|
+
def reload(self):
|
|
682
|
+
self._unpacked = False
|
|
683
|
+
self._uploadable = True
|
|
684
|
+
self._created = datetime.now()
|
|
685
|
+
self._modified = datetime.now()
|
|
686
|
+
self._expires = self._task.expires if not self._session else expires.Immediately()
|
|
687
|
+
self._size = 0
|
|
688
|
+
self._influence = None
|
|
689
|
+
self._valid = False
|
|
690
|
+
self._temporary = False
|
|
691
|
+
self._read_manifest()
|
|
692
|
+
self._temporary = not self._valid
|
|
693
|
+
|
|
694
|
+
def reset(self):
|
|
695
|
+
self._unpacked = False
|
|
696
|
+
self._uploadable = True
|
|
697
|
+
self._created = datetime.now()
|
|
698
|
+
self._modified = datetime.now()
|
|
699
|
+
self._expires = self._task.expires if not self._session else expires.Immediately()
|
|
700
|
+
self._size = 0
|
|
701
|
+
self._influence = None
|
|
702
|
+
self._valid = False
|
|
703
|
+
self._temporary = True
|
|
704
|
+
|
|
705
|
+
@property
|
|
706
|
+
def name(self):
|
|
707
|
+
""" str: The name of the artifact. Default: 'main'. """
|
|
708
|
+
return self._name
|
|
709
|
+
|
|
574
710
|
@property
|
|
575
711
|
def path(self):
|
|
576
712
|
""" str: The current location of the artifact in the local cache. """
|
|
@@ -625,7 +761,7 @@ class Artifact(object):
|
|
|
625
761
|
raise_task_error_if(
|
|
626
762
|
not self.is_temporary(),
|
|
627
763
|
self._node,
|
|
628
|
-
"Can't collect files into an already published task artifact")
|
|
764
|
+
"Can't collect files into an already published task artifact ({})", self._log_name)
|
|
629
765
|
|
|
630
766
|
files = self.tools.expand_path(files)
|
|
631
767
|
files = self.tools.glob(files)
|
|
@@ -638,7 +774,7 @@ class Artifact(object):
|
|
|
638
774
|
self.files.append(self.tools.expand_relpath(src), dest)
|
|
639
775
|
self.tools.copy(src, fs.path.join(self._temp, dest), symlinks=symlinks)
|
|
640
776
|
log.verbose("Collected {0} -> {2}/{1}", src, dest, self._temp)
|
|
641
|
-
return
|
|
777
|
+
return [dest]
|
|
642
778
|
|
|
643
779
|
# Expand directories into full file list if flatting a tree
|
|
644
780
|
# Determine relative artifact destination paths
|
|
@@ -662,6 +798,8 @@ class Artifact(object):
|
|
|
662
798
|
self.tools.copy(srcpath, dstpath, symlinks=symlinks)
|
|
663
799
|
log.verbose("Collected {0} -> {1}", relsrcpath, reldstpath)
|
|
664
800
|
|
|
801
|
+
return reldestfiles
|
|
802
|
+
|
|
665
803
|
def copy(self, files, dest, flatten=False, symlinks=False, cwd=None):
|
|
666
804
|
""" Copy files from the artifact.
|
|
667
805
|
|
|
@@ -694,7 +832,7 @@ class Artifact(object):
|
|
|
694
832
|
raise_task_error_if(
|
|
695
833
|
self.is_temporary(),
|
|
696
834
|
self._node,
|
|
697
|
-
"Can't copy files from an unpublished task artifact")
|
|
835
|
+
"Can't copy files from an unpublished task artifact ({})", self._log_name)
|
|
698
836
|
|
|
699
837
|
files = fs.path.join(self._path, files)
|
|
700
838
|
files = self.tools.expand_path(files)
|
|
@@ -751,23 +889,24 @@ class Artifact(object):
|
|
|
751
889
|
def get_size(self):
|
|
752
890
|
return self._size
|
|
753
891
|
|
|
754
|
-
def get_task(self):
|
|
755
|
-
return self._node.task
|
|
756
|
-
|
|
757
|
-
def get_name(self):
|
|
758
|
-
return self._node.qualified_name
|
|
759
|
-
|
|
760
892
|
def get_cache(self):
|
|
761
893
|
return self._cache
|
|
762
894
|
|
|
763
|
-
def
|
|
764
|
-
return self._node.
|
|
895
|
+
def get_task(self):
|
|
896
|
+
return self._node.task
|
|
897
|
+
|
|
898
|
+
def get_node(self):
|
|
899
|
+
return self._node
|
|
765
900
|
|
|
766
|
-
def is_temporary(self):
|
|
901
|
+
def is_temporary(self) -> bool:
|
|
767
902
|
return self._temporary
|
|
768
903
|
|
|
769
|
-
def is_unpackable(self):
|
|
770
|
-
|
|
904
|
+
def is_unpackable(self) -> bool:
|
|
905
|
+
if not self._node:
|
|
906
|
+
return True
|
|
907
|
+
if self.name == "main":
|
|
908
|
+
return self._task.unpack.__func__ is not tasks.Task.unpack
|
|
909
|
+
return getattr(self._task, "unpack_" + self.name, tasks.Task.unpack) is not tasks.Task.unpack
|
|
771
910
|
|
|
772
911
|
def is_unpacked(self):
|
|
773
912
|
return self._unpacked
|
|
@@ -775,9 +914,55 @@ class Artifact(object):
|
|
|
775
914
|
def is_uploadable(self):
|
|
776
915
|
return self._uploadable
|
|
777
916
|
|
|
917
|
+
def is_cacheable(self):
|
|
918
|
+
if not self._node:
|
|
919
|
+
return True
|
|
920
|
+
if self.is_session():
|
|
921
|
+
return True
|
|
922
|
+
return self.task.is_cacheable()
|
|
923
|
+
|
|
778
924
|
@property
|
|
779
925
|
def identity(self):
|
|
780
|
-
return self.
|
|
926
|
+
return self._identity
|
|
927
|
+
|
|
928
|
+
@property
|
|
929
|
+
def task(self):
|
|
930
|
+
if not self._node:
|
|
931
|
+
Task = namedtuple('Point', ['name'])
|
|
932
|
+
return Task(name=self.name)
|
|
933
|
+
return self._node.task
|
|
934
|
+
|
|
935
|
+
|
|
936
|
+
class ArtifactToolsProxy(object):
|
|
937
|
+
"""
|
|
938
|
+
An artifact proxy that uses a specific tools object.
|
|
939
|
+
|
|
940
|
+
Used when artifacts are consumed by tasks. The proxy allows the
|
|
941
|
+
task to access the artifact's methods and attributes using the
|
|
942
|
+
task's own tools object. This is useful when the consumer task
|
|
943
|
+
wishes to copy files, read files, etc, using the current working
|
|
944
|
+
directory and environment of the task.
|
|
945
|
+
"""
|
|
946
|
+
|
|
947
|
+
def __init__(self, artifact, tools):
|
|
948
|
+
self._artifact = artifact
|
|
949
|
+
self._tools = tools
|
|
950
|
+
|
|
951
|
+
def __getattr__(self, name):
|
|
952
|
+
if name == "tools":
|
|
953
|
+
return self._tools
|
|
954
|
+
if name == "_artifact":
|
|
955
|
+
return self._artifact
|
|
956
|
+
attr = getattr(self._artifact.__class__, name, None)
|
|
957
|
+
if attr is not None and callable(attr):
|
|
958
|
+
return attr.__get__(self, ArtifactToolsProxy)
|
|
959
|
+
return getattr(self._artifact, name)
|
|
960
|
+
|
|
961
|
+
def __setattr__(self, name, value):
|
|
962
|
+
if name == "_artifact" or name == "_tools":
|
|
963
|
+
super(ArtifactToolsProxy, self).__setattr__(name, value)
|
|
964
|
+
else:
|
|
965
|
+
setattr(self._artifact, name, value)
|
|
781
966
|
|
|
782
967
|
|
|
783
968
|
class Context(object):
|
|
@@ -803,17 +988,29 @@ class Context(object):
|
|
|
803
988
|
def __enter__(self):
|
|
804
989
|
try:
|
|
805
990
|
for dep in reversed(self._node.children):
|
|
806
|
-
|
|
807
|
-
|
|
808
|
-
|
|
809
|
-
|
|
810
|
-
|
|
991
|
+
for artifact in dep.artifacts:
|
|
992
|
+
# Create clone with tools from this task
|
|
993
|
+
artifact = ArtifactToolsProxy(artifact, self._node.tools)
|
|
994
|
+
|
|
995
|
+
# Don't include session artifacts that don't exist,
|
|
996
|
+
# i.e. where no build has taken place due to presence
|
|
997
|
+
# of the persistent artifacts.
|
|
998
|
+
if not dep.is_resource():
|
|
999
|
+
if artifact.is_session() and not self._cache.is_available_locally(artifact):
|
|
1000
|
+
continue
|
|
1001
|
+
|
|
1002
|
+
self._cache.unpack(artifact)
|
|
1003
|
+
|
|
1004
|
+
if artifact.name == "main":
|
|
1005
|
+
self._artifacts_index[dep.qualified_name] = artifact
|
|
1006
|
+
self._artifacts_index[dep.short_qualified_name] = artifact
|
|
1007
|
+
self._artifacts[artifact.name + "@" + dep.qualified_name] = artifact
|
|
1008
|
+
self._artifacts_index[artifact.name + "@" + dep.qualified_name] = artifact
|
|
1009
|
+
self._artifacts_index[artifact.name + "@" + dep.short_qualified_name] = artifact
|
|
811
1010
|
artifact.apply()
|
|
812
1011
|
ArtifactAttributeSetRegistry.apply_all(self._node.task, artifact)
|
|
813
|
-
|
|
814
|
-
except Exception as e:
|
|
1012
|
+
except (Exception, KeyboardInterrupt) as e:
|
|
815
1013
|
# Rollback all attributes/resources except the last failing one
|
|
816
|
-
ArtifactAttributeSetRegistry.unapply_all_deps(self._node.task, self)
|
|
817
1014
|
for name, artifact in reversed(list(self._artifacts.items())[:-1]):
|
|
818
1015
|
with utils.ignore_exception():
|
|
819
1016
|
ArtifactAttributeSetRegistry.unapply_all(self._node.task, artifact)
|
|
@@ -822,7 +1019,6 @@ class Context(object):
|
|
|
822
1019
|
return self
|
|
823
1020
|
|
|
824
1021
|
def __exit__(self, type, value, tb):
|
|
825
|
-
ArtifactAttributeSetRegistry.unapply_all_deps(self._node.task, self)
|
|
826
1022
|
for name, artifact in reversed(self._artifacts.items()):
|
|
827
1023
|
ArtifactAttributeSetRegistry.unapply_all(self._node.task, artifact)
|
|
828
1024
|
artifact.unapply()
|
|
@@ -846,25 +1042,32 @@ class Context(object):
|
|
|
846
1042
|
|
|
847
1043
|
"""
|
|
848
1044
|
|
|
849
|
-
key = self._node.
|
|
1045
|
+
key = self._node.tools.expand(key)
|
|
850
1046
|
|
|
851
|
-
|
|
852
|
-
|
|
1047
|
+
alias, artifact, task, params = utils.parse_aliased_task_name(key)
|
|
1048
|
+
raise_task_error_if(alias, self._node, "Cannot define alias when indexing dependencies: {}", alias)
|
|
1049
|
+
task_name = utils.format_task_name(task, params)
|
|
1050
|
+
task_artifact_name = utils.format_task_name(task, params, artifact)
|
|
853
1051
|
|
|
854
|
-
|
|
855
|
-
|
|
1052
|
+
if task_name not in self._artifacts_index and \
|
|
1053
|
+
task_artifact_name not in self._artifacts_index and not params:
|
|
1054
|
+
key = self._node.resolve_requirement_alias(task_name)
|
|
1055
|
+
if key:
|
|
1056
|
+
_, _, task, params = utils.parse_aliased_task_name(key)
|
|
1057
|
+
task_name = utils.format_task_name(task, params)
|
|
1058
|
+
task_artifact_name = utils.format_task_name(task, params, artifact)
|
|
856
1059
|
|
|
857
1060
|
# Parameters may be overspecified, resolve task
|
|
858
|
-
if
|
|
1061
|
+
if task_artifact_name not in self._artifacts_index:
|
|
859
1062
|
from jolt.tasks import TaskRegistry
|
|
860
|
-
|
|
861
|
-
|
|
1063
|
+
task_obj = TaskRegistry.get().get_task(task_name)
|
|
1064
|
+
task_name = task_obj.short_qualified_name if task_obj is not None else task
|
|
1065
|
+
task_artifact_name = task_name if not artifact else f"{artifact}@{task_name}"
|
|
862
1066
|
|
|
863
|
-
|
|
864
|
-
|
|
865
|
-
|
|
866
|
-
|
|
867
|
-
return self._artifacts_index[key]
|
|
1067
|
+
if task_artifact_name not in self._artifacts_index:
|
|
1068
|
+
raise KeyError("No such artifact dependency '{0}' ({1})".format(
|
|
1069
|
+
task_artifact_name, self._node.short_qualified_name))
|
|
1070
|
+
return self._artifacts_index[task_artifact_name]
|
|
868
1071
|
|
|
869
1072
|
def items(self):
|
|
870
1073
|
""" List all requirements and their artifacts.
|
|
@@ -877,11 +1080,148 @@ class Context(object):
|
|
|
877
1080
|
|
|
878
1081
|
|
|
879
1082
|
class PidProvider(object):
|
|
880
|
-
def __init__(self):
|
|
881
|
-
self._uuid = uuid.uuid4()
|
|
882
|
-
|
|
883
1083
|
def __call__(self):
|
|
884
|
-
|
|
1084
|
+
pid = str(uuid.uuid4())
|
|
1085
|
+
log.debug("New cache lock file: {0}", pid)
|
|
1086
|
+
return pid
|
|
1087
|
+
|
|
1088
|
+
|
|
1089
|
+
class StorageProvider(object):
|
|
1090
|
+
"""
|
|
1091
|
+
Base class for remote artifact storage providers.
|
|
1092
|
+
|
|
1093
|
+
A storage provider is responsible for uploading and downloading
|
|
1094
|
+
artifacts to and from a remote storage location. The storage
|
|
1095
|
+
location can be a file system path, a cloud storage service, or
|
|
1096
|
+
any other type of storage.
|
|
1097
|
+
|
|
1098
|
+
"""
|
|
1099
|
+
|
|
1100
|
+
def download(self, artifact: Artifact, force: bool = False) -> bool:
|
|
1101
|
+
"""
|
|
1102
|
+
Download an artifact from the storage location.
|
|
1103
|
+
|
|
1104
|
+
The should be downloaded to the path returned by the artifact's
|
|
1105
|
+
:func:`~jolt.Artifact.get_archive_path` method. The downloaded artifact
|
|
1106
|
+
must be in the format specified by DEFAULT_ARCHIVE_TYPE.
|
|
1107
|
+
|
|
1108
|
+
The download should be retried if it fails due to network issues.
|
|
1109
|
+
The method may raise an exception on errors.
|
|
1110
|
+
|
|
1111
|
+
Args:
|
|
1112
|
+
artifact (Artifact): The artifact to download.
|
|
1113
|
+
force (bool, optional): If True, the download should be forced,
|
|
1114
|
+
even if the artifact is already present locally, or if the
|
|
1115
|
+
download is disabled. The default is False.
|
|
1116
|
+
|
|
1117
|
+
Returns:
|
|
1118
|
+
bool: True if the download was successful, False otherwise.
|
|
1119
|
+
|
|
1120
|
+
"""
|
|
1121
|
+
return False
|
|
1122
|
+
|
|
1123
|
+
def download_enabled(self) -> bool:
|
|
1124
|
+
""" Return True if downloading is enabled. Default is True. """
|
|
1125
|
+
return True
|
|
1126
|
+
|
|
1127
|
+
def upload(self, artifact: Artifact, force: bool = False) -> bool:
|
|
1128
|
+
"""
|
|
1129
|
+
Upload an artifact to the storage location.
|
|
1130
|
+
|
|
1131
|
+
The artifact to be uploaded is located at the path returned by
|
|
1132
|
+
the artifact's :func:`~jolt.Artifact.get_archive_path` method. The
|
|
1133
|
+
uploaded artifact is in the format specified by DEFAULT_ARCHIVE_TYPE.
|
|
1134
|
+
The provider may choose to upload the artifact using a different
|
|
1135
|
+
format, but it must be able to download the artifact in the
|
|
1136
|
+
DEFAULT_ARCHIVE_TYPE format.
|
|
1137
|
+
|
|
1138
|
+
The upload should be retried if it fails due to network issues.
|
|
1139
|
+
The method may raise an exception on errors.
|
|
1140
|
+
|
|
1141
|
+
Args:
|
|
1142
|
+
artifact (Artifact): The artifact to upload.
|
|
1143
|
+
force (bool, optional): If True, the upload should be forced,
|
|
1144
|
+
even if the artifact is already present remotely, or if the
|
|
1145
|
+
upload is disabled. The default is False.
|
|
1146
|
+
|
|
1147
|
+
Returns:
|
|
1148
|
+
bool: True if the upload was successful, False otherwise.
|
|
1149
|
+
|
|
1150
|
+
"""
|
|
1151
|
+
return False
|
|
1152
|
+
|
|
1153
|
+
def upload_enabled(self) -> bool:
|
|
1154
|
+
""" Return True if uploading is enabled. Default is True. """
|
|
1155
|
+
return True
|
|
1156
|
+
|
|
1157
|
+
def location(self, artifact) -> str:
|
|
1158
|
+
"""
|
|
1159
|
+
Return the URL of the artifact in the storage location.
|
|
1160
|
+
|
|
1161
|
+
This method is sometimes used to identify if an artifact is
|
|
1162
|
+
present in the storage location. The URL should point to the
|
|
1163
|
+
artifact if present, or an empty string if the artifact is
|
|
1164
|
+
absent.
|
|
1165
|
+
|
|
1166
|
+
Args:
|
|
1167
|
+
artifact (Artifact): The artifact to locate.
|
|
1168
|
+
"""
|
|
1169
|
+
return '' # URL
|
|
1170
|
+
|
|
1171
|
+
def availability(self, artifacts: list) -> tuple:
|
|
1172
|
+
"""
|
|
1173
|
+
Check the availability of a list of artifacts.
|
|
1174
|
+
|
|
1175
|
+
This method is used to determine which artifacts are present in the
|
|
1176
|
+
storage location. The method should return a tuple of two lists:
|
|
1177
|
+
the first list contains the artifacts that are present, and the
|
|
1178
|
+
second list contains the artifacts that are missing.
|
|
1179
|
+
|
|
1180
|
+
The default implementation of this method calls the :func:`~jolt.StorageProvider.location`
|
|
1181
|
+
method for each artifact in the list. Subclasses may override this
|
|
1182
|
+
method to provide a more efficient implementation.
|
|
1183
|
+
|
|
1184
|
+
Args:
|
|
1185
|
+
artifacts (list): A list of artifacts to check.
|
|
1186
|
+
|
|
1187
|
+
Returns:
|
|
1188
|
+
tuple: A tuple of two lists: the first list contains the artifacts
|
|
1189
|
+
that are present, and the second list contains the artifacts
|
|
1190
|
+
that are missing.
|
|
1191
|
+
|
|
1192
|
+
"""
|
|
1193
|
+
# Ensure artifacts is a list
|
|
1194
|
+
artifacts = utils.as_list(artifacts)
|
|
1195
|
+
|
|
1196
|
+
present = set()
|
|
1197
|
+
missing = set()
|
|
1198
|
+
|
|
1199
|
+
for artifact in artifacts:
|
|
1200
|
+
if self.location(artifact):
|
|
1201
|
+
present.add(artifact)
|
|
1202
|
+
else:
|
|
1203
|
+
missing.add(artifact)
|
|
1204
|
+
|
|
1205
|
+
return list(present), list(missing)
|
|
1206
|
+
|
|
1207
|
+
|
|
1208
|
+
class StorageProviderFactory(StorageProvider):
|
|
1209
|
+
""" A factory for store providers. """
|
|
1210
|
+
|
|
1211
|
+
def create(self) -> StorageProvider:
|
|
1212
|
+
"""
|
|
1213
|
+
Create a new storage provider.
|
|
1214
|
+
|
|
1215
|
+
This method should return a new instance of a storage provider,
|
|
1216
|
+
which must be a subclass of :class:`~jolt.StorageProvider`.
|
|
1217
|
+
|
|
1218
|
+
"""
|
|
1219
|
+
pass
|
|
1220
|
+
|
|
1221
|
+
|
|
1222
|
+
def RegisterStorage(cls):
|
|
1223
|
+
""" Decorator used to register a storage provider factory. """
|
|
1224
|
+
ArtifactCache.storage_provider_factories.append(cls)
|
|
885
1225
|
|
|
886
1226
|
|
|
887
1227
|
@utils.Singleton
|
|
@@ -943,14 +1283,18 @@ class ArtifactCache(StorageProvider):
|
|
|
943
1283
|
|
|
944
1284
|
def __init__(self, options=None, pidprovider=None):
|
|
945
1285
|
self._options = options or JoltOptions()
|
|
946
|
-
self._remote_identity_cache = set()
|
|
947
1286
|
self._storage_providers = [
|
|
948
1287
|
factory.create(self)
|
|
949
1288
|
for factory in ArtifactCache.storage_provider_factories]
|
|
950
1289
|
|
|
1290
|
+
# If no storage providers supports the availability method,
|
|
1291
|
+
# we will not only use the local presence cache.
|
|
1292
|
+
self._remote_presence_cache = set()
|
|
1293
|
+
self._presence_cache_only = self.has_availability()
|
|
1294
|
+
|
|
951
1295
|
# Read configuration
|
|
952
1296
|
self._max_size = config.getsize(
|
|
953
|
-
"jolt", "cachesize", os.environ.get("
|
|
1297
|
+
"jolt", "cachesize", os.environ.get("JOLT_CACHE_SIZE", 1 * 1024 ** 3))
|
|
954
1298
|
|
|
955
1299
|
# Create cache directory
|
|
956
1300
|
self._fs_create_cachedir()
|
|
@@ -959,10 +1303,12 @@ class ArtifactCache(StorageProvider):
|
|
|
959
1303
|
self._cache_locked = False
|
|
960
1304
|
self._lock_file = fasteners.InterProcessLock(self._fs_get_lock_file())
|
|
961
1305
|
self._thread_lock = RLock()
|
|
1306
|
+
self._artifact_thread_lock = utils.IdLock()
|
|
962
1307
|
|
|
963
1308
|
# Create process lock file
|
|
964
1309
|
with self._cache_lock():
|
|
965
|
-
self.
|
|
1310
|
+
self._pid_provider = pidprovider or PidProvider()
|
|
1311
|
+
self._pid = self._pid_provider()
|
|
966
1312
|
self._pid_file = fasteners.InterProcessLock(self._fs_get_pid_file(self._pid))
|
|
967
1313
|
self._pid_file.acquire()
|
|
968
1314
|
|
|
@@ -994,6 +1340,7 @@ class ArtifactCache(StorageProvider):
|
|
|
994
1340
|
db = sqlite3.connect(self._db_path, detect_types=sqlite3.PARSE_DECLTYPES)
|
|
995
1341
|
try:
|
|
996
1342
|
db.execute("PRAGMA journal_mode=OFF")
|
|
1343
|
+
# db.set_trace_callback(log.warning)
|
|
997
1344
|
yield db
|
|
998
1345
|
finally:
|
|
999
1346
|
db.close()
|
|
@@ -1015,9 +1362,9 @@ class ArtifactCache(StorageProvider):
|
|
|
1015
1362
|
cur.execute("CREATE TABLE IF NOT EXISTS artifact_lockrefs (identity text, pid text)")
|
|
1016
1363
|
db.commit()
|
|
1017
1364
|
|
|
1018
|
-
def _db_insert_artifact(self, db, identity,
|
|
1365
|
+
def _db_insert_artifact(self, db, identity, task_name, size):
|
|
1019
1366
|
cur = db.cursor()
|
|
1020
|
-
cur.execute("INSERT INTO artifacts VALUES (?,?,?,?)", (identity,
|
|
1367
|
+
cur.execute("INSERT INTO artifacts VALUES (?,?,?,?)", (identity, task_name, size, datetime.now()))
|
|
1021
1368
|
db.commit()
|
|
1022
1369
|
|
|
1023
1370
|
def _db_update_artifact_size(self, db, identity, size):
|
|
@@ -1180,6 +1527,7 @@ class ArtifactCache(StorageProvider):
|
|
|
1180
1527
|
|
|
1181
1528
|
def _fs_create_cachedir(self):
|
|
1182
1529
|
self.root = config.get_cachedir()
|
|
1530
|
+
log.verbose("Jolt cache path: {}", self.root)
|
|
1183
1531
|
try:
|
|
1184
1532
|
fs.makedirs(self.root)
|
|
1185
1533
|
except KeyboardInterrupt as e:
|
|
@@ -1187,73 +1535,103 @@ class ArtifactCache(StorageProvider):
|
|
|
1187
1535
|
except Exception:
|
|
1188
1536
|
raise_error("Failed to create cache directory '{0}'", self.root)
|
|
1189
1537
|
|
|
1190
|
-
def _fs_get_artifact(self, node, tools=None):
|
|
1191
|
-
return Artifact(self, node, tools)
|
|
1538
|
+
def _fs_get_artifact(self, node, name, tools=None, session=False):
|
|
1539
|
+
return Artifact(self, node, name=name, tools=tools, session=session)
|
|
1192
1540
|
|
|
1193
|
-
def _fs_commit_artifact(self, artifact, uploadable):
|
|
1541
|
+
def _fs_commit_artifact(self, artifact: Artifact, uploadable: bool, temporary: bool):
|
|
1194
1542
|
artifact._set_uploadable(uploadable)
|
|
1195
1543
|
if not artifact.is_unpackable():
|
|
1196
1544
|
artifact._set_unpacked()
|
|
1197
|
-
|
|
1198
|
-
|
|
1545
|
+
if temporary:
|
|
1546
|
+
artifact._write_manifest(temporary=True)
|
|
1199
1547
|
fs.rmtree(artifact.final_path, ignore_errors=True)
|
|
1200
|
-
fs.rename(artifact.
|
|
1548
|
+
fs.rename(artifact.temporary_path, artifact.final_path)
|
|
1549
|
+
else:
|
|
1550
|
+
artifact._write_manifest(temporary=False)
|
|
1201
1551
|
|
|
1202
1552
|
@contextlib.contextmanager
|
|
1203
1553
|
def _fs_compress_artifact(self, artifact):
|
|
1204
|
-
task = artifact.
|
|
1554
|
+
task = artifact.task
|
|
1205
1555
|
archive = artifact.get_archive_path()
|
|
1206
1556
|
|
|
1207
1557
|
raise_task_error_if(
|
|
1208
1558
|
artifact.is_temporary(), task,
|
|
1209
|
-
"Can't compress an unpublished task artifact")
|
|
1559
|
+
"Can't compress an unpublished task artifact ({})", artifact._log_name)
|
|
1210
1560
|
|
|
1211
1561
|
try:
|
|
1212
|
-
|
|
1562
|
+
artifact.tools.archive(artifact.path, archive)
|
|
1213
1563
|
except KeyboardInterrupt as e:
|
|
1214
1564
|
raise e
|
|
1215
1565
|
except Exception:
|
|
1216
|
-
raise_task_error(task, "Failed to compress task artifact")
|
|
1566
|
+
raise_task_error(task, "Failed to compress task artifact ({})", artifact._log_name)
|
|
1217
1567
|
try:
|
|
1218
1568
|
yield
|
|
1219
1569
|
finally:
|
|
1220
1570
|
fs.unlink(archive, ignore_errors=True)
|
|
1221
1571
|
|
|
1222
1572
|
def _fs_decompress_artifact(self, artifact):
|
|
1223
|
-
task = artifact.
|
|
1573
|
+
task = artifact.task
|
|
1224
1574
|
archive = artifact.get_archive_path()
|
|
1225
1575
|
try:
|
|
1226
1576
|
task.tools.extract(archive, artifact.temporary_path, ignore_owner=True)
|
|
1577
|
+
artifact._read_manifest(temporary=True)
|
|
1227
1578
|
except KeyboardInterrupt as e:
|
|
1579
|
+
fs.rmtree(artifact.temporary_path, ignore_errors=True)
|
|
1228
1580
|
raise e
|
|
1229
1581
|
except Exception:
|
|
1230
|
-
|
|
1582
|
+
fs.rmtree(artifact.temporary_path, ignore_errors=True)
|
|
1583
|
+
raise_task_error(task, "Failed to extract task artifact archive ({})", artifact._log_name)
|
|
1231
1584
|
finally:
|
|
1232
1585
|
fs.unlink(archive, ignore_errors=True)
|
|
1233
|
-
artifact._read_manifest()
|
|
1234
|
-
|
|
1235
|
-
def _fs_delete_artifact(self, identity, name, onerror=None):
|
|
1236
|
-
fs.rmtree(self._fs_get_artifact_path(identity, name), ignore_errors=True, onerror=onerror)
|
|
1237
|
-
fs.rmtree(self._fs_get_artifact_tmppath(identity, name), ignore_errors=True, onerror=onerror)
|
|
1238
|
-
fs.unlink(fs.path.join(self.root, name), ignore_errors=True)
|
|
1239
1586
|
|
|
1240
|
-
def
|
|
1241
|
-
|
|
1587
|
+
def _fs_delete_artifact(self, identity, task_name, onerror=None):
|
|
1588
|
+
fs.rmtree(self._fs_get_artifact_path(identity, task_name), ignore_errors=True, onerror=onerror)
|
|
1589
|
+
fs.rmtree(self._fs_get_artifact_tmppath(identity, task_name), ignore_errors=True, onerror=onerror)
|
|
1590
|
+
fs.rmtree(self._fs_get_artifact_path_legacy(identity, task_name), ignore_errors=True, onerror=onerror)
|
|
1591
|
+
fs.rmtree(self._fs_get_artifact_tmppath_legacy(identity, task_name), ignore_errors=True, onerror=onerror)
|
|
1592
|
+
fs.unlink(fs.path.join(self.root, task_name), ignore_errors=True)
|
|
1593
|
+
|
|
1594
|
+
def _fs_identity(self, identity):
|
|
1595
|
+
parts = identity.split("@", 1)
|
|
1596
|
+
if len(parts) <= 1:
|
|
1597
|
+
parts = ["main"] + parts
|
|
1598
|
+
return parts[1] + "-" + utils.canonical(parts[0])
|
|
1599
|
+
|
|
1600
|
+
def _fs_identity_legacy(self, identity):
|
|
1601
|
+
parts = identity.split("@", 1)
|
|
1602
|
+
if len(parts) <= 1:
|
|
1603
|
+
parts = ["main"] + parts
|
|
1604
|
+
return parts[0] + "@" + utils.canonical(parts[1])
|
|
1605
|
+
|
|
1606
|
+
def _fs_get_artifact_archivepath(self, identity, task_name):
|
|
1607
|
+
identity = self._fs_identity(identity)
|
|
1608
|
+
return fs.path.join(self.root, task_name, identity) + DEFAULT_ARCHIVE_TYPE
|
|
1242
1609
|
|
|
1243
1610
|
def _fs_get_artifact_lockpath(self, identity):
|
|
1611
|
+
identity = self._fs_identity(identity)
|
|
1244
1612
|
return fs.path.join(self.root, "locks", identity + ".lock")
|
|
1245
1613
|
|
|
1246
|
-
def _fs_get_artifact_tmppath(self, identity,
|
|
1247
|
-
|
|
1614
|
+
def _fs_get_artifact_tmppath(self, identity, task_name):
|
|
1615
|
+
identity = self._fs_identity(identity)
|
|
1616
|
+
return fs.path.join(self.root, task_name, "." + identity)
|
|
1248
1617
|
|
|
1249
|
-
def _fs_get_artifact_path(self, identity,
|
|
1250
|
-
|
|
1618
|
+
def _fs_get_artifact_path(self, identity, task_name):
|
|
1619
|
+
identity = self._fs_identity(identity)
|
|
1620
|
+
return fs.path.join(self.root, task_name, identity)
|
|
1251
1621
|
|
|
1252
|
-
def
|
|
1253
|
-
|
|
1622
|
+
def _fs_get_artifact_tmppath_legacy(self, identity, task_name):
|
|
1623
|
+
identity = self._fs_identity_legacy(identity)
|
|
1624
|
+
return fs.path.join(self.root, task_name, "." + identity)
|
|
1254
1625
|
|
|
1255
|
-
def
|
|
1256
|
-
|
|
1626
|
+
def _fs_get_artifact_path_legacy(self, identity, task_name):
|
|
1627
|
+
identity = self._fs_identity_legacy(identity)
|
|
1628
|
+
return fs.path.join(self.root, task_name, identity)
|
|
1629
|
+
|
|
1630
|
+
def _fs_get_artifact_manifest_path(self, identity, task_name):
|
|
1631
|
+
return fs.path.join(self._fs_get_artifact_path(identity, task_name), ".manifest.json")
|
|
1632
|
+
|
|
1633
|
+
def _fs_get_artifact_manifest(self, identity, task_name):
|
|
1634
|
+
path = self._fs_get_artifact_manifest_path(identity, task_name)
|
|
1257
1635
|
with open(path) as manifest_file:
|
|
1258
1636
|
return json.load(manifest_file, object_hook=json_deserializer)
|
|
1259
1637
|
|
|
@@ -1266,9 +1644,9 @@ class ArtifactCache(StorageProvider):
|
|
|
1266
1644
|
def _fs_get_pid_file(self, pid):
|
|
1267
1645
|
return fs.path.join(self.root, "pids", pid)
|
|
1268
1646
|
|
|
1269
|
-
def _fs_is_artifact_expired(self, identity,
|
|
1647
|
+
def _fs_is_artifact_expired(self, identity, task_name, last_used):
|
|
1270
1648
|
try:
|
|
1271
|
-
manifest = self._fs_get_artifact_manifest(identity,
|
|
1649
|
+
manifest = self._fs_get_artifact_manifest(identity, task_name)
|
|
1272
1650
|
manifest["used"] = last_used
|
|
1273
1651
|
strategy = ArtifactEvictionStrategyRegister.get().find(
|
|
1274
1652
|
manifest.get("expires", "immediately"))
|
|
@@ -1318,19 +1696,39 @@ class ArtifactCache(StorageProvider):
|
|
|
1318
1696
|
""" Discard list of artifacts. Cache lock must be held. """
|
|
1319
1697
|
self._assert_cache_locked()
|
|
1320
1698
|
evicted = 0
|
|
1321
|
-
for identity,
|
|
1322
|
-
if not if_expired or self._fs_is_artifact_expired(identity,
|
|
1323
|
-
|
|
1324
|
-
|
|
1325
|
-
|
|
1326
|
-
|
|
1699
|
+
for identity, task_name, _, used in artifacts:
|
|
1700
|
+
if not if_expired or self._fs_is_artifact_expired(identity, task_name, used):
|
|
1701
|
+
with utils.delayed_interrupt():
|
|
1702
|
+
self._db_delete_artifact(db, identity)
|
|
1703
|
+
self._fs_delete_artifact(identity, task_name, onerror=onerror)
|
|
1704
|
+
evicted += 1
|
|
1705
|
+
log.debug("Evicted {}: {}", identity, task_name)
|
|
1327
1706
|
return evicted == len(artifacts)
|
|
1328
1707
|
|
|
1329
1708
|
############################################################################
|
|
1330
1709
|
# Public API
|
|
1331
1710
|
############################################################################
|
|
1332
1711
|
|
|
1333
|
-
def
|
|
1712
|
+
def release(self):
|
|
1713
|
+
"""
|
|
1714
|
+
Release references to artifacts held by the current process.
|
|
1715
|
+
|
|
1716
|
+
Effectively, a new pid lock file is created and the old one is deleted. This
|
|
1717
|
+
allows other processes to detect termination of the current process and
|
|
1718
|
+
garbage collect any references owned by the process.
|
|
1719
|
+
"""
|
|
1720
|
+
with self._cache_lock(), self._db() as db:
|
|
1721
|
+
self._db_invalidate_locks(db, try_all=True)
|
|
1722
|
+
self._db_invalidate_references(db, try_all=True)
|
|
1723
|
+
self._fs_invalidate_pids(db, try_all=True)
|
|
1724
|
+
self._pid_file.release()
|
|
1725
|
+
|
|
1726
|
+
self._pid = self._pid_provider()
|
|
1727
|
+
self._pid_file = fasteners.InterProcessLock(self._fs_get_pid_file(self._pid))
|
|
1728
|
+
self._pid_file.acquire()
|
|
1729
|
+
|
|
1730
|
+
@utils.delay_interrupt
|
|
1731
|
+
def is_available_locally(self, artifact):
|
|
1334
1732
|
"""
|
|
1335
1733
|
Check presence of task artifact in cache.
|
|
1336
1734
|
|
|
@@ -1338,84 +1736,115 @@ class ArtifactCache(StorageProvider):
|
|
|
1338
1736
|
recorded for the running process to prevent eviction by other
|
|
1339
1737
|
processes.
|
|
1340
1738
|
"""
|
|
1341
|
-
if not
|
|
1739
|
+
if not artifact.is_cacheable():
|
|
1342
1740
|
return False
|
|
1343
1741
|
|
|
1344
|
-
# Cache availability in node
|
|
1345
|
-
try:
|
|
1346
|
-
return node.__available
|
|
1347
|
-
except AttributeError:
|
|
1348
|
-
pass
|
|
1349
|
-
|
|
1350
1742
|
with self._cache_lock(), self._db() as db:
|
|
1351
|
-
if self._db_select_artifact(db,
|
|
1352
|
-
|
|
1353
|
-
|
|
1354
|
-
|
|
1355
|
-
|
|
1356
|
-
|
|
1357
|
-
node.__available = True
|
|
1743
|
+
if self._db_select_artifact(db, artifact.identity) or self._db_select_reference(db, artifact.identity):
|
|
1744
|
+
artifact.reload()
|
|
1745
|
+
if artifact.is_temporary():
|
|
1746
|
+
self._db_delete_artifact(db, artifact.identity, and_refs=False)
|
|
1747
|
+
return False
|
|
1748
|
+
self._db_insert_reference(db, artifact.identity)
|
|
1358
1749
|
return True
|
|
1359
1750
|
return False
|
|
1360
1751
|
|
|
1361
|
-
def is_available_remotely(self,
|
|
1752
|
+
def is_available_remotely(self, artifact, cache=True):
|
|
1362
1753
|
"""
|
|
1363
1754
|
Check presence of task artifact in external remote caches.
|
|
1364
1755
|
"""
|
|
1365
|
-
if
|
|
1366
|
-
|
|
1367
|
-
|
|
1368
|
-
|
|
1756
|
+
if cache:
|
|
1757
|
+
if artifact.identity in self._remote_presence_cache:
|
|
1758
|
+
return True
|
|
1759
|
+
if self._presence_cache_only:
|
|
1760
|
+
return False
|
|
1369
1761
|
for provider in self._storage_providers:
|
|
1370
|
-
|
|
1371
|
-
|
|
1762
|
+
present, _ = provider.availability([artifact])
|
|
1763
|
+
if present:
|
|
1764
|
+
self._remote_presence_cache.add(artifact.identity)
|
|
1372
1765
|
return True
|
|
1373
1766
|
return False
|
|
1374
1767
|
|
|
1375
|
-
def is_available(self,
|
|
1768
|
+
def is_available(self, artifact):
|
|
1376
1769
|
""" Check presence of task artifact in any cache, local or remote """
|
|
1377
|
-
return self.is_available_locally(
|
|
1770
|
+
return self.is_available_locally(artifact) or self.is_available_remotely(artifact)
|
|
1378
1771
|
|
|
1379
|
-
def
|
|
1380
|
-
|
|
1381
|
-
|
|
1772
|
+
def has_availability(self):
|
|
1773
|
+
# Returns true if all storage providers implement the availability method
|
|
1774
|
+
return all([provider.availability.__func__ != StorageProvider.availability for provider in self._storage_providers])
|
|
1775
|
+
|
|
1776
|
+
def availability(self, artifacts, remote=True):
|
|
1777
|
+
""" Check presence of task artifacts in any cache, local or remote """
|
|
1778
|
+
present = set()
|
|
1779
|
+
missing = set()
|
|
1780
|
+
|
|
1781
|
+
# Make sure artifacts is a list
|
|
1782
|
+
artifacts = utils.as_list(artifacts)
|
|
1783
|
+
|
|
1784
|
+
# Check presence of all artifacts in the local cache
|
|
1785
|
+
for artifact in artifacts:
|
|
1786
|
+
if self.is_available_locally(artifact):
|
|
1787
|
+
present.add(artifact)
|
|
1788
|
+
else:
|
|
1789
|
+
missing.add(artifact)
|
|
1790
|
+
|
|
1791
|
+
if not remote:
|
|
1792
|
+
return list(present), list(missing)
|
|
1793
|
+
|
|
1794
|
+
# Check presence of all artifacts in the remote caches
|
|
1795
|
+
missing_remotely = artifacts
|
|
1796
|
+
|
|
1797
|
+
for provider in self._storage_providers:
|
|
1798
|
+
present_in_provider, missing_in_provider = provider.availability(missing_remotely)
|
|
1799
|
+
for artifact in present_in_provider:
|
|
1800
|
+
self._remote_presence_cache.add(artifact.identity)
|
|
1801
|
+
present.update(present_in_provider)
|
|
1802
|
+
missing_remotely = missing_in_provider
|
|
1803
|
+
if not missing_in_provider:
|
|
1804
|
+
break
|
|
1382
1805
|
|
|
1383
|
-
|
|
1384
|
-
|
|
1385
|
-
|
|
1806
|
+
missing.update(missing_remotely)
|
|
1807
|
+
missing = missing - present
|
|
1808
|
+
|
|
1809
|
+
return list(present), list(missing)
|
|
1386
1810
|
|
|
1387
1811
|
def download_enabled(self):
|
|
1388
1812
|
return self._options.download and \
|
|
1389
1813
|
any([provider.download_enabled() for provider in self._storage_providers])
|
|
1390
1814
|
|
|
1815
|
+
def download_session_enabled(self):
|
|
1816
|
+
return self._options.download_session and \
|
|
1817
|
+
any([provider.download_enabled() for provider in self._storage_providers])
|
|
1818
|
+
|
|
1391
1819
|
def upload_enabled(self):
|
|
1392
1820
|
return self._options.upload and \
|
|
1393
1821
|
any([provider.upload_enabled() for provider in self._storage_providers])
|
|
1394
1822
|
|
|
1395
|
-
def download(self,
|
|
1823
|
+
def download(self, artifact, force=False):
|
|
1396
1824
|
"""
|
|
1397
1825
|
Downloads an artifact from a remote cache to the local cache.
|
|
1398
1826
|
|
|
1399
1827
|
The artifact is interprocess locked during the operation.
|
|
1400
1828
|
"""
|
|
1401
|
-
if not force
|
|
1402
|
-
|
|
1403
|
-
|
|
1829
|
+
if not force:
|
|
1830
|
+
if not artifact.is_session() and not self.download_enabled():
|
|
1831
|
+
return False
|
|
1832
|
+
if artifact.is_session() and not self.download_session_enabled():
|
|
1833
|
+
return False
|
|
1834
|
+
if not artifact.is_cacheable():
|
|
1404
1835
|
return False
|
|
1405
|
-
|
|
1406
|
-
|
|
1407
|
-
|
|
1408
|
-
if self.is_available_locally(node):
|
|
1409
|
-
node.info("Download skipped, already in local cache")
|
|
1836
|
+
with self.lock_artifact(artifact, why="download") as artifact:
|
|
1837
|
+
if self.is_available_locally(artifact):
|
|
1838
|
+
artifact._info("Download skipped, already in local cache")
|
|
1410
1839
|
return True
|
|
1411
1840
|
for provider in self._storage_providers:
|
|
1412
|
-
if provider.download(
|
|
1841
|
+
if provider.download(artifact, force):
|
|
1413
1842
|
self._fs_decompress_artifact(artifact)
|
|
1414
|
-
self.commit(artifact)
|
|
1843
|
+
self.commit(artifact, temporary=True)
|
|
1415
1844
|
return True
|
|
1416
1845
|
return len(self._storage_providers) == 0
|
|
1417
1846
|
|
|
1418
|
-
def upload(self,
|
|
1847
|
+
def upload(self, artifact, force=False, locked=True):
|
|
1419
1848
|
"""
|
|
1420
1849
|
Uploads an artifact from the local cache to all configured remote caches.
|
|
1421
1850
|
|
|
@@ -1423,30 +1852,28 @@ class ArtifactCache(StorageProvider):
|
|
|
1423
1852
|
"""
|
|
1424
1853
|
if not force and not self.upload_enabled():
|
|
1425
1854
|
return False
|
|
1426
|
-
if not
|
|
1855
|
+
if not artifact.is_cacheable():
|
|
1427
1856
|
return True
|
|
1428
1857
|
raise_task_error_if(
|
|
1429
|
-
not self.is_available_locally(
|
|
1430
|
-
"Can't upload task artifact, no artifact present in the local cache")
|
|
1431
|
-
with self.
|
|
1858
|
+
not self.is_available_locally(artifact), artifact.task,
|
|
1859
|
+
"Can't upload task artifact, no artifact present in the local cache ({})", artifact._log_name)
|
|
1860
|
+
with self.lock_artifact(artifact, why="upload") if locked else artifact as artifact:
|
|
1432
1861
|
raise_task_error_if(
|
|
1433
|
-
not artifact.is_uploadable(),
|
|
1434
|
-
"Artifact was modified locally by another process and can no longer be uploaded, try again")
|
|
1862
|
+
not artifact.is_uploadable(), artifact.task,
|
|
1863
|
+
"Artifact was modified locally by another process and can no longer be uploaded, try again ({})", artifact._log_name)
|
|
1435
1864
|
if self._storage_providers:
|
|
1436
1865
|
with self._fs_compress_artifact(artifact):
|
|
1437
|
-
return all([provider.upload(
|
|
1866
|
+
return all([provider.upload(artifact, force) for provider in self._storage_providers])
|
|
1438
1867
|
return len(self._storage_providers) == 0
|
|
1439
1868
|
|
|
1440
|
-
def location(self,
|
|
1441
|
-
if not node.task.is_cacheable():
|
|
1442
|
-
return ''
|
|
1869
|
+
def location(self, artifact):
|
|
1443
1870
|
for provider in self._storage_providers:
|
|
1444
|
-
url = provider.location(
|
|
1871
|
+
url = provider.location(artifact)
|
|
1445
1872
|
if url:
|
|
1446
1873
|
return url
|
|
1447
1874
|
return ''
|
|
1448
1875
|
|
|
1449
|
-
def unpack(self,
|
|
1876
|
+
def unpack(self, artifact):
|
|
1450
1877
|
"""
|
|
1451
1878
|
Unpacks/relocates the task artifact to the local cache.
|
|
1452
1879
|
|
|
@@ -1458,40 +1885,59 @@ class ArtifactCache(StorageProvider):
|
|
|
1458
1885
|
|
|
1459
1886
|
The artifact is interprocess locked during the operation.
|
|
1460
1887
|
"""
|
|
1461
|
-
if not
|
|
1462
|
-
return False
|
|
1463
|
-
if not node.is_unpackable():
|
|
1888
|
+
if not artifact.is_unpackable():
|
|
1464
1889
|
return True
|
|
1465
|
-
with self._thread_lock, self.
|
|
1466
|
-
|
|
1467
|
-
|
|
1890
|
+
with self._thread_lock, self.lock_artifact(artifact, why="unpack") as artifact:
|
|
1891
|
+
raise_task_error_if(
|
|
1892
|
+
not self.is_available_locally(artifact),
|
|
1893
|
+
artifact.task,
|
|
1894
|
+
"Locked artifact is missing in cache (forcibly removed?) ({})", artifact._log_name)
|
|
1895
|
+
|
|
1896
|
+
raise_task_error_if(
|
|
1897
|
+
artifact.is_temporary(),
|
|
1898
|
+
artifact.task,
|
|
1899
|
+
"Can't unpack an unpublished task artifact ({})", artifact._log_name)
|
|
1900
|
+
|
|
1468
1901
|
if artifact.is_unpacked():
|
|
1469
1902
|
return True
|
|
1470
1903
|
|
|
1471
1904
|
# Keep a temporary copy of the artifact if the task
|
|
1472
1905
|
# unpack() method fails. The copy is removed in
|
|
1473
1906
|
# get_locked_artifact() if left unused.
|
|
1474
|
-
fs.copy(artifact.
|
|
1907
|
+
fs.copy(artifact.final_path, artifact.temporary_path, symlinks=True)
|
|
1475
1908
|
|
|
1476
|
-
task = artifact.
|
|
1909
|
+
task = artifact.task
|
|
1477
1910
|
with tools.Tools(task) as t:
|
|
1478
1911
|
try:
|
|
1479
1912
|
# Note: unpack() will run on the original
|
|
1480
1913
|
# artifact, not in the temporary copy.
|
|
1481
|
-
|
|
1914
|
+
if task.unpack.__func__ is not tasks.Task.unpack:
|
|
1915
|
+
artifact._info("Unpack started")
|
|
1482
1916
|
artifact._set_unpacked()
|
|
1483
|
-
|
|
1484
|
-
|
|
1917
|
+
if artifact.name == "main":
|
|
1918
|
+
task.unpack(artifact, t)
|
|
1919
|
+
else:
|
|
1920
|
+
unpack = getattr(task, "unpack_" + artifact.name, None)
|
|
1921
|
+
raise_task_error_if(
|
|
1922
|
+
unpack is None, task,
|
|
1923
|
+
"Artifact unpack method not found: unpack_{}", artifact.name)
|
|
1924
|
+
unpack(artifact, t)
|
|
1925
|
+
|
|
1926
|
+
self.commit(artifact, uploadable=False, temporary=False)
|
|
1927
|
+
|
|
1485
1928
|
except NotImplementedError:
|
|
1486
|
-
self.commit(artifact)
|
|
1487
|
-
|
|
1929
|
+
self.commit(artifact, temporary=False)
|
|
1930
|
+
|
|
1931
|
+
except (Exception, KeyboardInterrupt) as e:
|
|
1488
1932
|
# Restore the temporary copy
|
|
1489
|
-
fs.rmtree(artifact.
|
|
1490
|
-
fs.rename(artifact.temporary_path, artifact.
|
|
1933
|
+
fs.rmtree(artifact.final_path, ignore_errors=True)
|
|
1934
|
+
fs.rename(artifact.temporary_path, artifact.final_path)
|
|
1935
|
+
artifact._error("Unpack failed")
|
|
1491
1936
|
raise e
|
|
1492
1937
|
return True
|
|
1493
1938
|
|
|
1494
|
-
|
|
1939
|
+
@utils.delay_interrupt
|
|
1940
|
+
def commit(self, artifact, uploadable=True, temporary=True):
|
|
1495
1941
|
"""
|
|
1496
1942
|
Commits a task artifact to the cache.
|
|
1497
1943
|
|
|
@@ -1503,14 +1949,16 @@ class ArtifactCache(StorageProvider):
|
|
|
1503
1949
|
take place if the resulting cache size exceeds the configured
|
|
1504
1950
|
limit.
|
|
1505
1951
|
"""
|
|
1506
|
-
if not artifact.
|
|
1952
|
+
if not artifact.is_cacheable():
|
|
1507
1953
|
return
|
|
1954
|
+
|
|
1508
1955
|
with self._cache_lock(), self._db() as db:
|
|
1509
|
-
self._fs_commit_artifact(artifact, uploadable)
|
|
1956
|
+
self._fs_commit_artifact(artifact, uploadable, temporary)
|
|
1510
1957
|
with utils.ignore_exception(): # Possibly already exists in DB, e.g. unpacked
|
|
1511
|
-
self._db_insert_artifact(db, artifact.
|
|
1512
|
-
self._db_update_artifact_size(db, artifact.
|
|
1513
|
-
self._db_insert_reference(db, artifact.
|
|
1958
|
+
self._db_insert_artifact(db, artifact.identity, artifact.task.canonical_name, artifact.get_size())
|
|
1959
|
+
self._db_update_artifact_size(db, artifact.identity, artifact.get_size())
|
|
1960
|
+
self._db_insert_reference(db, artifact.identity)
|
|
1961
|
+
artifact.reload()
|
|
1514
1962
|
|
|
1515
1963
|
evict_size = self._db_select_sum_artifact_size(db) - self._max_size
|
|
1516
1964
|
if evict_size < 0:
|
|
@@ -1522,21 +1970,19 @@ class ArtifactCache(StorageProvider):
|
|
|
1522
1970
|
if self._discard(db, [candidate], True):
|
|
1523
1971
|
evict_size -= candidate[2]
|
|
1524
1972
|
|
|
1525
|
-
|
|
1973
|
+
@utils.delay_interrupt
|
|
1974
|
+
def discard(self, artifact, if_expired=False, onerror=None):
|
|
1526
1975
|
with self._cache_lock(), self._db() as db:
|
|
1527
1976
|
self._db_invalidate_locks(db)
|
|
1528
1977
|
self._db_invalidate_references(db)
|
|
1529
1978
|
self._fs_invalidate_pids(db)
|
|
1530
|
-
|
|
1979
|
+
return self._discard(
|
|
1531
1980
|
db,
|
|
1532
|
-
self._db_select_artifact_not_in_use(db,
|
|
1981
|
+
self._db_select_artifact_not_in_use(db, artifact.identity),
|
|
1533
1982
|
if_expired,
|
|
1534
1983
|
onerror=onerror)
|
|
1535
|
-
if discarded and hasattr(node, "_ArtifactCache__available"):
|
|
1536
|
-
del node.__available
|
|
1537
|
-
return discarded
|
|
1538
1984
|
|
|
1539
|
-
def _discard_wait(self,
|
|
1985
|
+
def _discard_wait(self, artifact):
|
|
1540
1986
|
"""
|
|
1541
1987
|
Discards an artifact without expiration consideration.
|
|
1542
1988
|
|
|
@@ -1550,13 +1996,14 @@ class ArtifactCache(StorageProvider):
|
|
|
1550
1996
|
self._db_invalidate_locks(db)
|
|
1551
1997
|
self._db_invalidate_references(db)
|
|
1552
1998
|
self._fs_invalidate_pids(db)
|
|
1553
|
-
artifacts = self._db_select_artifact(db,
|
|
1554
|
-
self._db_delete_artifact(db,
|
|
1555
|
-
refpids = self._db_select_artifact_reference_pids(db,
|
|
1556
|
-
|
|
1557
|
-
|
|
1558
|
-
|
|
1559
|
-
|
|
1999
|
+
artifacts = self._db_select_artifact(db, artifact.identity)
|
|
2000
|
+
self._db_delete_artifact(db, artifact.identity, and_refs=False)
|
|
2001
|
+
refpids = self._db_select_artifact_reference_pids(db, artifact.identity)
|
|
2002
|
+
refpids = list(filter(lambda pid: pid != self._pid, refpids))
|
|
2003
|
+
lockpids = self._db_select_artifact_lock_pids(db, artifact.identity)
|
|
2004
|
+
|
|
2005
|
+
if len(refpids) > 0:
|
|
2006
|
+
artifact._info("Artifact is temporarily in use, forced discard on hold")
|
|
1560
2007
|
for pid in refpids:
|
|
1561
2008
|
# Loop waiting for other processes to surrender the artifact
|
|
1562
2009
|
while True:
|
|
@@ -1572,13 +2019,12 @@ class ArtifactCache(StorageProvider):
|
|
|
1572
2019
|
break
|
|
1573
2020
|
except RuntimeError:
|
|
1574
2021
|
with self._cache_lock(), self._db() as db:
|
|
1575
|
-
lockpids = self._db_select_artifact_lock_pids(db,
|
|
2022
|
+
lockpids = self._db_select_artifact_lock_pids(db, artifact.identity)
|
|
1576
2023
|
|
|
1577
2024
|
with self._cache_lock(), self._db() as db:
|
|
1578
2025
|
assert self._discard(db, artifacts, False), "Failed to discard artifact"
|
|
1579
|
-
|
|
1580
|
-
|
|
1581
|
-
return self._fs_get_artifact(node)
|
|
2026
|
+
artifact.reset()
|
|
2027
|
+
return artifact
|
|
1582
2028
|
|
|
1583
2029
|
def discard_all(self, if_expired=False, onerror=None):
|
|
1584
2030
|
with self._cache_lock(), self._db() as db:
|
|
@@ -1594,11 +2040,30 @@ class ArtifactCache(StorageProvider):
|
|
|
1594
2040
|
def get_context(self, node):
|
|
1595
2041
|
return Context(self, node)
|
|
1596
2042
|
|
|
1597
|
-
def get_artifact(self, node, tools=None):
|
|
1598
|
-
|
|
2043
|
+
def get_artifact(self, node, name, tools=None, session=False):
|
|
2044
|
+
artifact = self._fs_get_artifact(node, name=name, tools=tools, session=session)
|
|
2045
|
+
if not artifact.is_temporary():
|
|
2046
|
+
with self._cache_lock(), self._db() as db:
|
|
2047
|
+
if not self._db_select_artifact(db, artifact.identity) and not self._db_select_reference(db, artifact.identity):
|
|
2048
|
+
log.verbose("Artifact not present in db, discarding archive ({} )", artifact.task.short_qualified_name, artifact.identity)
|
|
2049
|
+
fs.rmtree(artifact.final_path, ignore_errors=True)
|
|
2050
|
+
artifact.reload()
|
|
2051
|
+
return artifact
|
|
2052
|
+
|
|
2053
|
+
@contextlib.contextmanager
|
|
2054
|
+
def lock_artifact(self, artifact: Artifact, discard: bool = False, why: str = "publish"):
|
|
2055
|
+
"""
|
|
2056
|
+
Locks the task artifact, both with process thread locks and interprocess file locks.
|
|
2057
|
+
"""
|
|
2058
|
+
try:
|
|
2059
|
+
self._artifact_thread_lock.acquire(artifact.identity)
|
|
2060
|
+
with self._lock_artifact_interprocess(artifact, discard=discard, why=why) as artifact:
|
|
2061
|
+
yield artifact
|
|
2062
|
+
finally:
|
|
2063
|
+
self._artifact_thread_lock.release(artifact.identity)
|
|
1599
2064
|
|
|
1600
2065
|
@contextlib.contextmanager
|
|
1601
|
-
def
|
|
2066
|
+
def _lock_artifact_interprocess(self, artifact: Artifact, discard: bool = False, why: str = "publish"):
|
|
1602
2067
|
"""
|
|
1603
2068
|
Locks the task artifact.
|
|
1604
2069
|
|
|
@@ -1608,35 +2073,42 @@ class ArtifactCache(StorageProvider):
|
|
|
1608
2073
|
"""
|
|
1609
2074
|
with self._cache_lock():
|
|
1610
2075
|
with self._db() as db:
|
|
1611
|
-
self._db_insert_lock(db,
|
|
1612
|
-
self._db_insert_reference(db,
|
|
1613
|
-
lock_path = self._fs_get_artifact_lockpath(
|
|
2076
|
+
self._db_insert_lock(db, artifact.identity)
|
|
2077
|
+
self._db_insert_reference(db, artifact.identity)
|
|
2078
|
+
lock_path = self._fs_get_artifact_lockpath(artifact.identity)
|
|
1614
2079
|
lock = fasteners.InterProcessLock(lock_path)
|
|
1615
2080
|
is_locked = lock.acquire(blocking=False)
|
|
1616
2081
|
if not is_locked:
|
|
1617
|
-
|
|
2082
|
+
artifact._info("Artifact is temporarily locked by another process")
|
|
1618
2083
|
lock.acquire()
|
|
1619
2084
|
|
|
2085
|
+
artifact._debug("Artifact locked for {}", why)
|
|
2086
|
+
|
|
1620
2087
|
try:
|
|
1621
|
-
artifact = self.get_artifact(node)
|
|
1622
2088
|
if discard:
|
|
1623
|
-
artifact = self._discard_wait(
|
|
2089
|
+
artifact = self._discard_wait(artifact)
|
|
2090
|
+
else:
|
|
2091
|
+
artifact.reload()
|
|
2092
|
+
|
|
1624
2093
|
if artifact.is_temporary():
|
|
1625
2094
|
fs.rmtree(artifact.temporary_path, ignore_errors=True)
|
|
1626
2095
|
fs.makedirs(artifact.temporary_path)
|
|
1627
2096
|
|
|
1628
|
-
|
|
1629
|
-
stack.enter_context(artifact)
|
|
1630
|
-
yield artifact
|
|
2097
|
+
yield artifact
|
|
1631
2098
|
finally:
|
|
2099
|
+
artifact._debug("Artifact unlocked for {}", why)
|
|
1632
2100
|
fs.rmtree(artifact.temporary_path, ignore_errors=True)
|
|
1633
2101
|
with self._cache_lock():
|
|
1634
2102
|
with self._db() as db:
|
|
1635
|
-
self._db_delete_lock(db,
|
|
2103
|
+
self._db_delete_lock(db, artifact.identity)
|
|
1636
2104
|
lock.release()
|
|
1637
2105
|
with self._db() as db:
|
|
1638
|
-
if self._db_select_lock_count(db,
|
|
2106
|
+
if self._db_select_lock_count(db, artifact.identity) == 0:
|
|
1639
2107
|
fs.unlink(lock_path, ignore_errors=True)
|
|
1640
2108
|
|
|
1641
|
-
def
|
|
1642
|
-
|
|
2109
|
+
def precheck(self, artifacts, remote=True):
|
|
2110
|
+
""" Precheck artifacts for availability and cache status. """
|
|
2111
|
+
if not self.has_availability():
|
|
2112
|
+
return
|
|
2113
|
+
present, missing = self.availability(artifacts, remote=remote)
|
|
2114
|
+
log.verbose("Cache: {}/{} artifacts present", len(present), len(artifacts))
|