jolt 0.9.76__py3-none-any.whl → 0.9.429__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- jolt/__init__.py +88 -7
- jolt/__main__.py +9 -1
- jolt/bin/fstree-darwin-x86_64 +0 -0
- jolt/bin/fstree-linux-x86_64 +0 -0
- jolt/cache.py +839 -367
- jolt/chroot.py +156 -0
- jolt/cli.py +362 -143
- jolt/common_pb2.py +63 -0
- jolt/common_pb2_grpc.py +4 -0
- jolt/config.py +99 -42
- jolt/error.py +19 -4
- jolt/expires.py +2 -2
- jolt/filesystem.py +8 -6
- jolt/graph.py +705 -117
- jolt/hooks.py +63 -1
- jolt/influence.py +129 -6
- jolt/loader.py +369 -121
- jolt/log.py +225 -63
- jolt/manifest.py +28 -38
- jolt/options.py +35 -10
- jolt/pkgs/abseil.py +42 -0
- jolt/pkgs/asio.py +25 -0
- jolt/pkgs/autoconf.py +41 -0
- jolt/pkgs/automake.py +41 -0
- jolt/pkgs/b2.py +31 -0
- jolt/pkgs/boost.py +111 -0
- jolt/pkgs/boringssl.py +32 -0
- jolt/pkgs/busybox.py +39 -0
- jolt/pkgs/bzip2.py +43 -0
- jolt/pkgs/cares.py +29 -0
- jolt/pkgs/catch2.py +36 -0
- jolt/pkgs/cbindgen.py +17 -0
- jolt/pkgs/cista.py +19 -0
- jolt/pkgs/clang.py +44 -0
- jolt/pkgs/cli11.py +23 -0
- jolt/pkgs/cmake.py +48 -0
- jolt/pkgs/cpython.py +196 -0
- jolt/pkgs/crun.py +29 -0
- jolt/pkgs/curl.py +38 -0
- jolt/pkgs/dbus.py +18 -0
- jolt/pkgs/double_conversion.py +24 -0
- jolt/pkgs/fastfloat.py +21 -0
- jolt/pkgs/ffmpeg.py +28 -0
- jolt/pkgs/flatbuffers.py +29 -0
- jolt/pkgs/fmt.py +27 -0
- jolt/pkgs/fstree.py +20 -0
- jolt/pkgs/gflags.py +18 -0
- jolt/pkgs/glib.py +18 -0
- jolt/pkgs/glog.py +25 -0
- jolt/pkgs/glslang.py +21 -0
- jolt/pkgs/golang.py +16 -11
- jolt/pkgs/googlebenchmark.py +18 -0
- jolt/pkgs/googletest.py +46 -0
- jolt/pkgs/gperf.py +15 -0
- jolt/pkgs/grpc.py +73 -0
- jolt/pkgs/hdf5.py +19 -0
- jolt/pkgs/help2man.py +14 -0
- jolt/pkgs/inja.py +28 -0
- jolt/pkgs/jsoncpp.py +31 -0
- jolt/pkgs/libarchive.py +43 -0
- jolt/pkgs/libcap.py +44 -0
- jolt/pkgs/libdrm.py +44 -0
- jolt/pkgs/libedit.py +42 -0
- jolt/pkgs/libevent.py +31 -0
- jolt/pkgs/libexpat.py +27 -0
- jolt/pkgs/libfastjson.py +21 -0
- jolt/pkgs/libffi.py +16 -0
- jolt/pkgs/libglvnd.py +30 -0
- jolt/pkgs/libogg.py +28 -0
- jolt/pkgs/libpciaccess.py +18 -0
- jolt/pkgs/libseccomp.py +21 -0
- jolt/pkgs/libtirpc.py +24 -0
- jolt/pkgs/libtool.py +42 -0
- jolt/pkgs/libunwind.py +35 -0
- jolt/pkgs/libva.py +18 -0
- jolt/pkgs/libvorbis.py +33 -0
- jolt/pkgs/libxml2.py +35 -0
- jolt/pkgs/libxslt.py +17 -0
- jolt/pkgs/libyajl.py +16 -0
- jolt/pkgs/llvm.py +81 -0
- jolt/pkgs/lua.py +54 -0
- jolt/pkgs/lz4.py +26 -0
- jolt/pkgs/m4.py +14 -0
- jolt/pkgs/make.py +17 -0
- jolt/pkgs/mesa.py +81 -0
- jolt/pkgs/meson.py +17 -0
- jolt/pkgs/mstch.py +28 -0
- jolt/pkgs/mysql.py +60 -0
- jolt/pkgs/nasm.py +49 -0
- jolt/pkgs/ncurses.py +30 -0
- jolt/pkgs/ng_log.py +25 -0
- jolt/pkgs/ninja.py +45 -0
- jolt/pkgs/nlohmann_json.py +25 -0
- jolt/pkgs/nodejs.py +19 -11
- jolt/pkgs/opencv.py +24 -0
- jolt/pkgs/openjdk.py +26 -0
- jolt/pkgs/openssl.py +103 -0
- jolt/pkgs/paho.py +76 -0
- jolt/pkgs/patchelf.py +16 -0
- jolt/pkgs/perl.py +42 -0
- jolt/pkgs/pkgconfig.py +64 -0
- jolt/pkgs/poco.py +39 -0
- jolt/pkgs/protobuf.py +77 -0
- jolt/pkgs/pugixml.py +27 -0
- jolt/pkgs/python.py +19 -0
- jolt/pkgs/qt.py +35 -0
- jolt/pkgs/rapidjson.py +26 -0
- jolt/pkgs/rapidyaml.py +28 -0
- jolt/pkgs/re2.py +30 -0
- jolt/pkgs/re2c.py +17 -0
- jolt/pkgs/readline.py +15 -0
- jolt/pkgs/rust.py +41 -0
- jolt/pkgs/sdl.py +28 -0
- jolt/pkgs/simdjson.py +27 -0
- jolt/pkgs/soci.py +46 -0
- jolt/pkgs/spdlog.py +29 -0
- jolt/pkgs/spirv_llvm.py +21 -0
- jolt/pkgs/spirv_tools.py +24 -0
- jolt/pkgs/sqlite.py +83 -0
- jolt/pkgs/ssl.py +12 -0
- jolt/pkgs/texinfo.py +15 -0
- jolt/pkgs/tomlplusplus.py +22 -0
- jolt/pkgs/wayland.py +26 -0
- jolt/pkgs/x11.py +58 -0
- jolt/pkgs/xerces_c.py +20 -0
- jolt/pkgs/xorg.py +360 -0
- jolt/pkgs/xz.py +29 -0
- jolt/pkgs/yamlcpp.py +30 -0
- jolt/pkgs/zeromq.py +47 -0
- jolt/pkgs/zlib.py +69 -0
- jolt/pkgs/zstd.py +33 -0
- jolt/plugins/alias.py +3 -0
- jolt/plugins/allure.py +5 -2
- jolt/plugins/autotools.py +66 -0
- jolt/plugins/cache.py +133 -0
- jolt/plugins/cmake.py +74 -6
- jolt/plugins/conan.py +238 -0
- jolt/plugins/cxx.py +698 -0
- jolt/plugins/cxxinfo.py +7 -0
- jolt/plugins/dashboard.py +1 -1
- jolt/plugins/docker.py +91 -23
- jolt/plugins/email.py +5 -2
- jolt/plugins/email.xslt +144 -101
- jolt/plugins/environ.py +11 -0
- jolt/plugins/fetch.py +141 -0
- jolt/plugins/gdb.py +44 -21
- jolt/plugins/gerrit.py +1 -14
- jolt/plugins/git.py +316 -101
- jolt/plugins/googletest.py +522 -1
- jolt/plugins/http.py +36 -38
- jolt/plugins/libtool.py +63 -0
- jolt/plugins/linux.py +990 -0
- jolt/plugins/logstash.py +4 -4
- jolt/plugins/meson.py +61 -0
- jolt/plugins/ninja-compdb.py +107 -31
- jolt/plugins/ninja.py +929 -134
- jolt/plugins/paths.py +11 -1
- jolt/plugins/pkgconfig.py +219 -0
- jolt/plugins/podman.py +148 -91
- jolt/plugins/python.py +137 -0
- jolt/plugins/remote_execution/__init__.py +0 -0
- jolt/plugins/remote_execution/administration_pb2.py +46 -0
- jolt/plugins/remote_execution/administration_pb2_grpc.py +170 -0
- jolt/plugins/remote_execution/log_pb2.py +32 -0
- jolt/plugins/remote_execution/log_pb2_grpc.py +68 -0
- jolt/plugins/remote_execution/scheduler_pb2.py +41 -0
- jolt/plugins/remote_execution/scheduler_pb2_grpc.py +141 -0
- jolt/plugins/remote_execution/worker_pb2.py +38 -0
- jolt/plugins/remote_execution/worker_pb2_grpc.py +112 -0
- jolt/plugins/report.py +12 -2
- jolt/plugins/rust.py +25 -0
- jolt/plugins/scheduler.py +710 -0
- jolt/plugins/selfdeploy/setup.py +9 -4
- jolt/plugins/selfdeploy.py +138 -88
- jolt/plugins/strings.py +35 -22
- jolt/plugins/symlinks.py +26 -11
- jolt/plugins/telemetry.py +5 -2
- jolt/plugins/timeline.py +13 -3
- jolt/plugins/volume.py +46 -48
- jolt/scheduler.py +591 -191
- jolt/tasks.py +1783 -245
- jolt/templates/export.sh.template +12 -6
- jolt/templates/timeline.html.template +44 -47
- jolt/timer.py +22 -0
- jolt/tools.py +749 -302
- jolt/utils.py +245 -18
- jolt/version.py +1 -1
- jolt/version_utils.py +2 -2
- jolt/xmldom.py +12 -2
- {jolt-0.9.76.dist-info → jolt-0.9.429.dist-info}/METADATA +98 -38
- jolt-0.9.429.dist-info/RECORD +207 -0
- {jolt-0.9.76.dist-info → jolt-0.9.429.dist-info}/WHEEL +1 -1
- jolt/plugins/amqp.py +0 -834
- jolt/plugins/debian.py +0 -338
- jolt/plugins/ftp.py +0 -181
- jolt/plugins/ninja-cache.py +0 -64
- jolt/plugins/ninjacli.py +0 -271
- jolt/plugins/repo.py +0 -253
- jolt-0.9.76.dist-info/RECORD +0 -79
- {jolt-0.9.76.dist-info → jolt-0.9.429.dist-info}/entry_points.txt +0 -0
- {jolt-0.9.76.dist-info → jolt-0.9.429.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,710 @@
|
|
|
1
|
+
import click
|
|
2
|
+
import grpc
|
|
3
|
+
import queue
|
|
4
|
+
from threading import Lock
|
|
5
|
+
import time
|
|
6
|
+
|
|
7
|
+
from google.protobuf.timestamp_pb2 import Timestamp
|
|
8
|
+
|
|
9
|
+
from jolt import cache
|
|
10
|
+
from jolt import cli
|
|
11
|
+
from jolt import colors
|
|
12
|
+
from jolt import config
|
|
13
|
+
from jolt import hooks
|
|
14
|
+
from jolt import loader
|
|
15
|
+
from jolt import log
|
|
16
|
+
from jolt import common_pb2 as common_pb
|
|
17
|
+
from jolt import scheduler
|
|
18
|
+
from jolt import utils
|
|
19
|
+
from jolt.error import LoggedJoltError, JoltError, raise_error, raise_error_if, raise_task_error, raise_task_error_if
|
|
20
|
+
from jolt.graph import GraphBuilder
|
|
21
|
+
from jolt.scheduler import ExecutorRegistry, JoltEnvironment, NetworkExecutor, NetworkExecutorFactory, WorkerStrategy
|
|
22
|
+
from jolt.tasks import TaskRegistry
|
|
23
|
+
from jolt.options import JoltOptions
|
|
24
|
+
from jolt.plugins import selfdeploy
|
|
25
|
+
from jolt.plugins.remote_execution import log_pb2 as log_pb
|
|
26
|
+
from jolt.plugins.remote_execution import log_pb2_grpc as log_grpc
|
|
27
|
+
from jolt.plugins.remote_execution import scheduler_pb2 as scheduler_pb
|
|
28
|
+
from jolt.plugins.remote_execution import scheduler_pb2_grpc as scheduler_grpc
|
|
29
|
+
from jolt.plugins.remote_execution import worker_pb2_grpc as worker_grpc
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
NAME = "scheduler"
|
|
33
|
+
TYPE = "Remote execution"
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def locked(func):
|
|
37
|
+
""" Decorator to lock a method. """
|
|
38
|
+
def _f(self, *args, **kwargs):
|
|
39
|
+
with self.lock:
|
|
40
|
+
return func(self, *args, **kwargs)
|
|
41
|
+
return _f
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class LogHandler(object):
|
|
45
|
+
"""
|
|
46
|
+
Log handler for executors.
|
|
47
|
+
|
|
48
|
+
The handler is installed in the log module and sends log messages to the
|
|
49
|
+
scheduler. The scheduler then forwards the messages to the client.
|
|
50
|
+
|
|
51
|
+
The handler is installed for the duration of the task execution.
|
|
52
|
+
"""
|
|
53
|
+
|
|
54
|
+
def __init__(self, stream, task):
|
|
55
|
+
self.stream = stream
|
|
56
|
+
self.task = task
|
|
57
|
+
self.level = log.EXCEPTION
|
|
58
|
+
|
|
59
|
+
def emit(self, record):
|
|
60
|
+
""" No log messages are emitted. """
|
|
61
|
+
pass
|
|
62
|
+
|
|
63
|
+
def handle(self, record):
|
|
64
|
+
"""
|
|
65
|
+
Handle a log record.
|
|
66
|
+
|
|
67
|
+
The log record is formatted and sent to the scheduler.
|
|
68
|
+
"""
|
|
69
|
+
try:
|
|
70
|
+
record.message = record.msg.format(*record.args)
|
|
71
|
+
except Exception:
|
|
72
|
+
record.message = record.msg
|
|
73
|
+
|
|
74
|
+
timestamp = Timestamp()
|
|
75
|
+
timestamp.FromNanoseconds(int(record.created * 1000000000))
|
|
76
|
+
|
|
77
|
+
self.stream.push(
|
|
78
|
+
scheduler_pb.TaskUpdate(
|
|
79
|
+
request=self.task,
|
|
80
|
+
status=common_pb.TaskStatus.TASK_RUNNING,
|
|
81
|
+
loglines=[
|
|
82
|
+
common_pb.LogLine(
|
|
83
|
+
context=self.task.task_id[:8],
|
|
84
|
+
level=log.level_to_pb(record.levelno),
|
|
85
|
+
time=timestamp,
|
|
86
|
+
message=record.message,
|
|
87
|
+
),
|
|
88
|
+
]
|
|
89
|
+
)
|
|
90
|
+
)
|
|
91
|
+
|
|
92
|
+
def createLock(self):
|
|
93
|
+
""" Return a lock. """
|
|
94
|
+
return None
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
class TaskCancelledException(JoltError):
|
|
98
|
+
""" An exception raised when a task is cancelled by the scheduler. """
|
|
99
|
+
pass
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
class Queue(object):
|
|
103
|
+
""" A simple queue that can be used to send messages to the scheduler. """
|
|
104
|
+
|
|
105
|
+
def __init__(self):
|
|
106
|
+
self.q = queue.Queue()
|
|
107
|
+
|
|
108
|
+
def __next__(self):
|
|
109
|
+
""" Get the next item from the queue. """
|
|
110
|
+
data = self.q.get()
|
|
111
|
+
if data is None:
|
|
112
|
+
raise StopIteration
|
|
113
|
+
return data
|
|
114
|
+
|
|
115
|
+
def push(self, item):
|
|
116
|
+
""" Push an item to the queue. """
|
|
117
|
+
self.q.put(item)
|
|
118
|
+
|
|
119
|
+
def close(self):
|
|
120
|
+
self.q.put(None)
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
class RemoteExecutor(NetworkExecutor):
|
|
124
|
+
"""
|
|
125
|
+
Executor for remotely executed tasks.
|
|
126
|
+
|
|
127
|
+
The executor schedules the task with the scheduler and waits for the
|
|
128
|
+
scheduler to respond with a task id. The executor then waits for the
|
|
129
|
+
scheduler to respond with task updates. Log messages are forwarded to the
|
|
130
|
+
logging system where they are formatted and emitted.
|
|
131
|
+
|
|
132
|
+
The executor is responsible for downloading persistent artifacts from the
|
|
133
|
+
cache. The executor will not download persistent artifacts unless the
|
|
134
|
+
task is marked as successfully completed.
|
|
135
|
+
|
|
136
|
+
The executor is also responsible for downloading session artifacts from the
|
|
137
|
+
cache. The executor will attempt download session artifacts regardless of the
|
|
138
|
+
task status. No error is raised if the download fails.
|
|
139
|
+
|
|
140
|
+
"""
|
|
141
|
+
|
|
142
|
+
def __init__(self, factory, session, task):
|
|
143
|
+
self.factory = factory
|
|
144
|
+
self.session = session
|
|
145
|
+
self.task = task
|
|
146
|
+
|
|
147
|
+
def schedule(self, env):
|
|
148
|
+
"""
|
|
149
|
+
Schedule the task for execution.
|
|
150
|
+
|
|
151
|
+
The task is marked as in progress before scheduling.
|
|
152
|
+
"""
|
|
153
|
+
self.task.set_in_progress()
|
|
154
|
+
return super().schedule(env)
|
|
155
|
+
|
|
156
|
+
def cancel(self):
|
|
157
|
+
"""
|
|
158
|
+
Cancel the build session.
|
|
159
|
+
|
|
160
|
+
The build session will be cancelled if the task is cancelled.
|
|
161
|
+
"""
|
|
162
|
+
self.session.cancel()
|
|
163
|
+
|
|
164
|
+
def download_persistent_artifacts(self, task):
|
|
165
|
+
""" Download persistent artifacts from the cache. """
|
|
166
|
+
|
|
167
|
+
for extension in task.extensions:
|
|
168
|
+
self.download_persistent_artifacts(extension)
|
|
169
|
+
if not task.has_artifact():
|
|
170
|
+
return
|
|
171
|
+
if not task.cache.download_enabled():
|
|
172
|
+
return
|
|
173
|
+
if not task.is_downloadable():
|
|
174
|
+
return
|
|
175
|
+
raise_task_error_if(
|
|
176
|
+
not task.download(persistent_only=True), task,
|
|
177
|
+
"Failed to download artifact")
|
|
178
|
+
|
|
179
|
+
def download_session_artifacts(self, task):
|
|
180
|
+
""" Download session artifacts from the cache. """
|
|
181
|
+
|
|
182
|
+
for extension in task.extensions:
|
|
183
|
+
self.download_session_artifacts(extension)
|
|
184
|
+
if not task.has_artifact():
|
|
185
|
+
return
|
|
186
|
+
if not task.cache.download_session_enabled():
|
|
187
|
+
return
|
|
188
|
+
if not task.is_downloadable():
|
|
189
|
+
return
|
|
190
|
+
if not task.download(session_only=True):
|
|
191
|
+
task.warning("Failed to download session artifact")
|
|
192
|
+
if not task.is_resource():
|
|
193
|
+
# Tasks also download session artifacts of consumed resources
|
|
194
|
+
for resource in filter(lambda task: task.is_resource() and not task.is_workspace_resource(), task.children):
|
|
195
|
+
if not resource.is_available_locally(persistent_only=False):
|
|
196
|
+
self.download_session_artifacts(resource)
|
|
197
|
+
|
|
198
|
+
def download_log(self, task):
|
|
199
|
+
""" Download log and transfer lines into local logging system. """
|
|
200
|
+
request = log_pb.ReadLogRequest(
|
|
201
|
+
id=task.instance,
|
|
202
|
+
)
|
|
203
|
+
for response in self.session.logs.ReadLog(request):
|
|
204
|
+
for line in response.loglines:
|
|
205
|
+
log.log(
|
|
206
|
+
log.pb_to_level(line.level),
|
|
207
|
+
line.message,
|
|
208
|
+
created=line.time.ToMicroseconds() / 1000000,
|
|
209
|
+
context=line.context[:7],
|
|
210
|
+
prefix=True)
|
|
211
|
+
|
|
212
|
+
def update_logstash(self, task):
|
|
213
|
+
""" Update logstash with the task status. """
|
|
214
|
+
self.task.logstash = self.session.http_uri + "/logs/" + self.task.instance
|
|
215
|
+
|
|
216
|
+
def run(self, env):
|
|
217
|
+
""" Run the task. """
|
|
218
|
+
if self.is_aborted():
|
|
219
|
+
return
|
|
220
|
+
try:
|
|
221
|
+
with hooks.task_run([self.task] + self.task.extensions), self.task.run_resources():
|
|
222
|
+
try:
|
|
223
|
+
self.run_build(env)
|
|
224
|
+
except (grpc.RpcError, grpc._channel._MultiThreadedRendezvous) as rpc_error:
|
|
225
|
+
raise_task_error(self.task, rpc_error.details(), type="Scheduler Error")
|
|
226
|
+
except Exception as e:
|
|
227
|
+
if not self.task.is_unstable:
|
|
228
|
+
raise e
|
|
229
|
+
finally:
|
|
230
|
+
self.download_session_artifacts(self.task)
|
|
231
|
+
|
|
232
|
+
@utils.retried.on_exception(grpc.RpcError)
|
|
233
|
+
def run_build(self, env):
|
|
234
|
+
""" Initialize the build session and schedule the task. """
|
|
235
|
+
|
|
236
|
+
try:
|
|
237
|
+
self.session.make_build_request()
|
|
238
|
+
|
|
239
|
+
self.task.queued(remote=True)
|
|
240
|
+
for extension in self.task.extensions:
|
|
241
|
+
extension.queued(remote=True)
|
|
242
|
+
|
|
243
|
+
request = scheduler_pb.TaskRequest(
|
|
244
|
+
build_id=self.session.build_id,
|
|
245
|
+
task_id=self.task.identity,
|
|
246
|
+
)
|
|
247
|
+
response = self.session.exec.ScheduleTask(request)
|
|
248
|
+
|
|
249
|
+
self.update_logstash(self.task)
|
|
250
|
+
self.run_task(env, response)
|
|
251
|
+
self.download_persistent_artifacts(self.task)
|
|
252
|
+
|
|
253
|
+
self.task.finished_execution(remote=True)
|
|
254
|
+
for extension in self.task.extensions:
|
|
255
|
+
extension.finished_execution(remote=True)
|
|
256
|
+
|
|
257
|
+
except TaskCancelledException:
|
|
258
|
+
pass
|
|
259
|
+
|
|
260
|
+
except (grpc.RpcError, grpc._channel._MultiThreadedRendezvous) as rpc_error:
|
|
261
|
+
if self.is_aborted():
|
|
262
|
+
if self.task.is_running():
|
|
263
|
+
self.task.failed_execution(remote=True, interrupt=True)
|
|
264
|
+
for extension in self.task.extensions:
|
|
265
|
+
extension.failed_execution(remote=True, interrupt=True)
|
|
266
|
+
return
|
|
267
|
+
|
|
268
|
+
if rpc_error.code() not in [grpc.StatusCode.NOT_FOUND, grpc.StatusCode.UNAVAILABLE]:
|
|
269
|
+
raise_task_error(self.task, rpc_error.details(), type="Scheduler Error")
|
|
270
|
+
|
|
271
|
+
self.session.clear_build_request(f"Scheduler Error: {rpc_error.details()}")
|
|
272
|
+
raise rpc_error
|
|
273
|
+
|
|
274
|
+
except Exception as e:
|
|
275
|
+
if not isinstance(e, LoggedJoltError):
|
|
276
|
+
log.exception()
|
|
277
|
+
|
|
278
|
+
if self.factory.options.mute:
|
|
279
|
+
try:
|
|
280
|
+
self.download_log(self.task)
|
|
281
|
+
except Exception:
|
|
282
|
+
self.task.warning("Failed to download build log")
|
|
283
|
+
|
|
284
|
+
self.task.failed_execution(remote=True)
|
|
285
|
+
for extension in self.task.extensions:
|
|
286
|
+
extension.failed_execution(remote=True)
|
|
287
|
+
|
|
288
|
+
raise e
|
|
289
|
+
|
|
290
|
+
def run_task(self, env, response):
|
|
291
|
+
""" Run the task.
|
|
292
|
+
|
|
293
|
+
Task updates are received from the scheduler and forwarded to the
|
|
294
|
+
logging system. The task is marked as running when the scheduler
|
|
295
|
+
responds with a task running status.
|
|
296
|
+
|
|
297
|
+
A change in task status is used to determine when the task has
|
|
298
|
+
completed. The task is marked as completed when the scheduler
|
|
299
|
+
responds with a task passed, skipped, downloaded, or uploaded status.
|
|
300
|
+
An exception is raised if the scheduler responds with a task error,
|
|
301
|
+
failed, unstable, or cancelled status.
|
|
302
|
+
"""
|
|
303
|
+
|
|
304
|
+
last_status = common_pb.TaskStatus.TASK_QUEUED
|
|
305
|
+
|
|
306
|
+
for progress in response:
|
|
307
|
+
for line in progress.loglines:
|
|
308
|
+
log.log(
|
|
309
|
+
log.pb_to_level(line.level),
|
|
310
|
+
line.message,
|
|
311
|
+
created=line.time.ToMicroseconds() / 1000000,
|
|
312
|
+
context=line.context[:7],
|
|
313
|
+
prefix=True)
|
|
314
|
+
|
|
315
|
+
if progress.worker:
|
|
316
|
+
self.task.worker = progress.worker.hostname
|
|
317
|
+
|
|
318
|
+
if progress.status in [common_pb.TaskStatus.TASK_RUNNING] \
|
|
319
|
+
and progress.status != self.task.status():
|
|
320
|
+
self.task.running_execution(remote=True)
|
|
321
|
+
for extension in self.task.extensions:
|
|
322
|
+
extension.running_execution(remote=True)
|
|
323
|
+
|
|
324
|
+
if progress.status in [common_pb.TaskStatus.TASK_QUEUED]:
|
|
325
|
+
if last_status in [common_pb.TaskStatus.TASK_RUNNING]:
|
|
326
|
+
self.task.restarted_execution(remote=True)
|
|
327
|
+
for extension in self.task.extensions:
|
|
328
|
+
extension.restarted_execution(remote=True)
|
|
329
|
+
|
|
330
|
+
if progress.status in [
|
|
331
|
+
common_pb.TaskStatus.TASK_PASSED,
|
|
332
|
+
common_pb.TaskStatus.TASK_DOWNLOADED,
|
|
333
|
+
common_pb.TaskStatus.TASK_UPLOADED,
|
|
334
|
+
common_pb.TaskStatus.TASK_SKIPPED,
|
|
335
|
+
]:
|
|
336
|
+
break
|
|
337
|
+
|
|
338
|
+
if progress.status in [common_pb.TaskStatus.TASK_CANCELLED]:
|
|
339
|
+
if last_status in [common_pb.TaskStatus.TASK_RUNNING]:
|
|
340
|
+
self.task.failed_execution(remote=True, interrupt=True)
|
|
341
|
+
for extension in self.task.extensions:
|
|
342
|
+
extension.failed_execution(remote=True, interrupt=True)
|
|
343
|
+
raise TaskCancelledException()
|
|
344
|
+
|
|
345
|
+
if progress.status in [
|
|
346
|
+
common_pb.TaskStatus.TASK_FAILED,
|
|
347
|
+
common_pb.TaskStatus.TASK_UNSTABLE,
|
|
348
|
+
]:
|
|
349
|
+
for error in progress.errors:
|
|
350
|
+
with self.task.task.report() as report:
|
|
351
|
+
report.add_error(
|
|
352
|
+
error.type,
|
|
353
|
+
error.location,
|
|
354
|
+
error.message,
|
|
355
|
+
error.details,
|
|
356
|
+
)
|
|
357
|
+
self.task.raise_for_status()
|
|
358
|
+
raise raise_error("Remote execution failed")
|
|
359
|
+
|
|
360
|
+
if progress.status in [
|
|
361
|
+
common_pb.TaskStatus.TASK_ERROR,
|
|
362
|
+
]:
|
|
363
|
+
log.log(
|
|
364
|
+
log.VERBOSE,
|
|
365
|
+
f"Host: {progress.worker.hostname}",
|
|
366
|
+
created=time.time(),
|
|
367
|
+
context=self.task.identity[:7],
|
|
368
|
+
prefix=True)
|
|
369
|
+
|
|
370
|
+
for error in progress.errors:
|
|
371
|
+
with self.task.task.report() as report:
|
|
372
|
+
report.add_error(
|
|
373
|
+
error.type,
|
|
374
|
+
error.location,
|
|
375
|
+
error.message,
|
|
376
|
+
error.details,
|
|
377
|
+
)
|
|
378
|
+
self.task.raise_for_status(log_details=not self.factory.options.mute)
|
|
379
|
+
raise raise_error("Remote execution failed")
|
|
380
|
+
|
|
381
|
+
last_status = progress.status
|
|
382
|
+
|
|
383
|
+
|
|
384
|
+
class RemoteSession(object):
|
|
385
|
+
"""
|
|
386
|
+
A session with the scheduler.
|
|
387
|
+
|
|
388
|
+
The session is responsible for establishing a connection with the scheduler,
|
|
389
|
+
registering the build and creating task executors.
|
|
390
|
+
"""
|
|
391
|
+
|
|
392
|
+
def __init__(self, factory):
|
|
393
|
+
# Associated executor factory.
|
|
394
|
+
self.factory = factory
|
|
395
|
+
|
|
396
|
+
# Address of the scheduler.
|
|
397
|
+
self.address = config.geturi(NAME, "uri", "tcp://scheduler.:9090")
|
|
398
|
+
raise_error_if(self.address.scheme not in ["tcp"], "Invalid scheme in scheduler URI config: {}", self.address.scheme)
|
|
399
|
+
raise_error_if(not self.address.netloc, "Invalid network address in scheduler URI config: {}", self.address.netloc)
|
|
400
|
+
|
|
401
|
+
# URI of scheduler HTTP endpoints.
|
|
402
|
+
self.http_uri = config.get(NAME, "http_uri", f"http://{self.address.netloc}")
|
|
403
|
+
|
|
404
|
+
# GRPC channel.
|
|
405
|
+
self.channel = grpc.insecure_channel(
|
|
406
|
+
target=self.address.netloc,
|
|
407
|
+
)
|
|
408
|
+
|
|
409
|
+
# GRPC stub for the scheduler service.
|
|
410
|
+
self.exec = scheduler_grpc.SchedulerStub(self.channel)
|
|
411
|
+
|
|
412
|
+
# GRPC stub for the logstash service.
|
|
413
|
+
self.logs = log_grpc.LogStashStub(self.channel)
|
|
414
|
+
|
|
415
|
+
# Read build priority from config.
|
|
416
|
+
# Higher priority builds will be scheduled first.
|
|
417
|
+
# Default is 0.
|
|
418
|
+
self.priority = config.getint(NAME, "priority", 0)
|
|
419
|
+
|
|
420
|
+
# The build associated with this session.
|
|
421
|
+
self.build = None
|
|
422
|
+
self.build_id = None
|
|
423
|
+
|
|
424
|
+
# Flag to indicate if the build has been aborted.
|
|
425
|
+
self.aborted = False
|
|
426
|
+
|
|
427
|
+
# Lock to ensure only one build is registered at a time.
|
|
428
|
+
self.lock = Lock()
|
|
429
|
+
|
|
430
|
+
# The build environment: client, workspace, etc.
|
|
431
|
+
self.buildenv = None
|
|
432
|
+
|
|
433
|
+
def initialize(self, graph):
|
|
434
|
+
""" Initialize the session with the scheduler. """
|
|
435
|
+
self.tasks = graph.tasks
|
|
436
|
+
self.pruned = graph.pruned
|
|
437
|
+
|
|
438
|
+
@locked
|
|
439
|
+
@utils.retried.on_exception(grpc.RpcError)
|
|
440
|
+
def make_build_request(self):
|
|
441
|
+
""" Create a build request with the scheduler. """
|
|
442
|
+
|
|
443
|
+
# If a build is already registered, return.
|
|
444
|
+
if self.build:
|
|
445
|
+
return
|
|
446
|
+
|
|
447
|
+
if not self.buildenv:
|
|
448
|
+
# Create the build environment.
|
|
449
|
+
self.buildenv = common_pb.BuildEnvironment(
|
|
450
|
+
client=selfdeploy.get_client(),
|
|
451
|
+
parameters=config.export_params(),
|
|
452
|
+
task_default_parameters=scheduler.export_task_default_params(self.tasks),
|
|
453
|
+
tasks=scheduler.export_tasks(self.tasks + self.pruned),
|
|
454
|
+
workspace=loader.export_workspace(self.tasks),
|
|
455
|
+
loglevel=log.get_level_pb(),
|
|
456
|
+
config=config.export_config(),
|
|
457
|
+
)
|
|
458
|
+
|
|
459
|
+
# Create the build request.
|
|
460
|
+
req = scheduler_pb.BuildRequest(
|
|
461
|
+
environment=self.buildenv,
|
|
462
|
+
priority=self.priority,
|
|
463
|
+
logstream=not self.factory.options.mute,
|
|
464
|
+
)
|
|
465
|
+
|
|
466
|
+
# Register the build with the scheduler.
|
|
467
|
+
self.build = self.exec.ScheduleBuild(req)
|
|
468
|
+
|
|
469
|
+
# Wait for the scheduler to respond with a build id.
|
|
470
|
+
build = self.build.next()
|
|
471
|
+
|
|
472
|
+
# Check if the build was rejected.
|
|
473
|
+
if build.status == common_pb.BuildStatus.BUILD_REJECTED:
|
|
474
|
+
raise_error("Build rejected by scheduler")
|
|
475
|
+
|
|
476
|
+
# Store the build id.
|
|
477
|
+
self.build_id = build.build_id
|
|
478
|
+
|
|
479
|
+
log.info(colors.blue("Build registered with scheduler, waiting for worker"))
|
|
480
|
+
return self.build
|
|
481
|
+
|
|
482
|
+
@locked
|
|
483
|
+
def clear_build_request(self, message=None):
|
|
484
|
+
""" Clear the build request. Called when a build fails. """
|
|
485
|
+
|
|
486
|
+
# Close grpc server response stream
|
|
487
|
+
if self.build:
|
|
488
|
+
self.build.cancel()
|
|
489
|
+
if message:
|
|
490
|
+
log.warning(message)
|
|
491
|
+
self.build = None
|
|
492
|
+
self.build_id = None
|
|
493
|
+
|
|
494
|
+
def cancel(self):
|
|
495
|
+
""" Send a cancel request to the scheduler. """
|
|
496
|
+
|
|
497
|
+
# If the build has already been aborted, return.
|
|
498
|
+
if self.aborted:
|
|
499
|
+
return
|
|
500
|
+
|
|
501
|
+
# If no build is registered, return.
|
|
502
|
+
if not self.build:
|
|
503
|
+
self.aborted = True
|
|
504
|
+
return
|
|
505
|
+
|
|
506
|
+
if not self.build_id:
|
|
507
|
+
self.clear_build_request()
|
|
508
|
+
return
|
|
509
|
+
|
|
510
|
+
req = scheduler_pb.CancelBuildRequest(build_id=self.build_id)
|
|
511
|
+
try:
|
|
512
|
+
response = self.exec.CancelBuild(req)
|
|
513
|
+
if response.status != common_pb.BuildStatus.BUILD_CANCELLED:
|
|
514
|
+
log.warning("Failed to cancel build: {}", response.status)
|
|
515
|
+
except grpc.RpcError as rpc_error:
|
|
516
|
+
log.warning("Failed to cancel build: {}", rpc_error.details())
|
|
517
|
+
finally:
|
|
518
|
+
self.aborted = True
|
|
519
|
+
|
|
520
|
+
def create_executor(self, task):
|
|
521
|
+
""" Create an executor for the given task. """
|
|
522
|
+
return RemoteExecutor(self.factory, self, task)
|
|
523
|
+
|
|
524
|
+
|
|
525
|
+
@scheduler.ExecutorFactory.Register
|
|
526
|
+
class RemoteExecutionFactory(NetworkExecutorFactory):
|
|
527
|
+
"""
|
|
528
|
+
Factory for remote executors.
|
|
529
|
+
|
|
530
|
+
Registers a build session with the scheduler and creates task executors.
|
|
531
|
+
"""
|
|
532
|
+
|
|
533
|
+
def __init__(self, options):
|
|
534
|
+
workers = config.getint(NAME, "workers", 1000)
|
|
535
|
+
super().__init__(max_workers=workers)
|
|
536
|
+
self._options = options
|
|
537
|
+
|
|
538
|
+
@property
|
|
539
|
+
def options(self):
|
|
540
|
+
return self._options
|
|
541
|
+
|
|
542
|
+
def create_session(self, graph):
|
|
543
|
+
""" Create a build session in the scheduler. """
|
|
544
|
+
session = RemoteSession(self)
|
|
545
|
+
session.initialize(graph)
|
|
546
|
+
return session
|
|
547
|
+
|
|
548
|
+
def create(self, session, task):
|
|
549
|
+
""" Create an executor for the given task. """
|
|
550
|
+
return session.create_executor(task)
|
|
551
|
+
|
|
552
|
+
|
|
553
|
+
log.verbose("[Remote] Loaded")
|
|
554
|
+
|
|
555
|
+
|
|
556
|
+
@cli.cli.command(hidden=True)
|
|
557
|
+
@click.option("-w", "--worker", required=True, help="Worker identifier.")
|
|
558
|
+
@click.option("-b", "--build", required=True, help="Build identifier to enlist for.")
|
|
559
|
+
@click.argument("request", required=True)
|
|
560
|
+
@click.pass_context
|
|
561
|
+
def executor(ctx, worker, build, request):
|
|
562
|
+
address = config.geturi(NAME, "uri", "tcp://scheduler.:9090")
|
|
563
|
+
raise_error_if(address.scheme not in ["tcp"], "Invalid scheme in scheduler URI config: {}", address.scheme)
|
|
564
|
+
raise_error_if(not address.netloc, "Invalid network address in scheduler URI config: {}", address.netloc)
|
|
565
|
+
|
|
566
|
+
channel = grpc.insecure_channel(address.netloc)
|
|
567
|
+
log.verbose("Waiting for GRPC channel to connect")
|
|
568
|
+
grpc.channel_ready_future(channel).result()
|
|
569
|
+
log.verbose("GRPC channel established: {}", address.netloc)
|
|
570
|
+
|
|
571
|
+
sched = worker_grpc.WorkerStub(channel)
|
|
572
|
+
|
|
573
|
+
with open(request, "rb") as f:
|
|
574
|
+
request = scheduler_pb.BuildRequest()
|
|
575
|
+
request.ParseFromString(f.read())
|
|
576
|
+
|
|
577
|
+
# Set log level
|
|
578
|
+
loglevel = request.environment.loglevel
|
|
579
|
+
log.set_level_pb(loglevel)
|
|
580
|
+
|
|
581
|
+
# Import workspace
|
|
582
|
+
loader.import_workspace(request.environment)
|
|
583
|
+
|
|
584
|
+
# Import configuration snippet
|
|
585
|
+
config.import_config(request.environment.config)
|
|
586
|
+
|
|
587
|
+
# Import configuration parameters (-c params.key)
|
|
588
|
+
config.import_params({param.key: param.value for param in request.environment.parameters})
|
|
589
|
+
|
|
590
|
+
options = JoltOptions(
|
|
591
|
+
network=True,
|
|
592
|
+
local=False,
|
|
593
|
+
download=config.getboolean("network", "download", True),
|
|
594
|
+
upload=config.getboolean("network", "upload", True),
|
|
595
|
+
keep_going=False,
|
|
596
|
+
default=request.environment.task_default_parameters,
|
|
597
|
+
worker=True,
|
|
598
|
+
debug=False,
|
|
599
|
+
salt=None,
|
|
600
|
+
jobs=1)
|
|
601
|
+
|
|
602
|
+
log.set_worker()
|
|
603
|
+
log.verbose("Local build as a worker")
|
|
604
|
+
|
|
605
|
+
tasks = loader.JoltLoader.get().load()
|
|
606
|
+
for cls in tasks:
|
|
607
|
+
TaskRegistry.get().add_task_class(cls)
|
|
608
|
+
|
|
609
|
+
# Create the
|
|
610
|
+
acache = cache.ArtifactCache.get(options)
|
|
611
|
+
executors = ExecutorRegistry.get(options)
|
|
612
|
+
strategy = WorkerStrategy(executors, acache)
|
|
613
|
+
hooks.TaskHookRegistry.get(options)
|
|
614
|
+
registry = TaskRegistry.get(options)
|
|
615
|
+
|
|
616
|
+
for task in options.default:
|
|
617
|
+
registry.set_default_parameters(task)
|
|
618
|
+
|
|
619
|
+
# Build the graph of tasks
|
|
620
|
+
gb = GraphBuilder(registry, acache, options=options, progress=True, buildenv=request.environment)
|
|
621
|
+
task_names = [task.name for task in request.environment.tasks.values()]
|
|
622
|
+
dag = gb.build(task_names)
|
|
623
|
+
|
|
624
|
+
# Enlist to execute build tasks from the scheduler
|
|
625
|
+
enlist_msg = scheduler_pb.TaskUpdate(
|
|
626
|
+
request=scheduler_pb.TaskRequest(build_id=build),
|
|
627
|
+
worker=scheduler_pb.WorkerAllocation(id=worker, hostname=utils.hostname()),
|
|
628
|
+
)
|
|
629
|
+
|
|
630
|
+
# A queue to send updates to the scheduler
|
|
631
|
+
updates = Queue()
|
|
632
|
+
updates.push(enlist_msg)
|
|
633
|
+
|
|
634
|
+
try:
|
|
635
|
+
log.info("Subscribing to tasks")
|
|
636
|
+
|
|
637
|
+
# Subscribe to tasks
|
|
638
|
+
for task in sched.GetTasks(updates):
|
|
639
|
+
log.set_level_pb(loglevel)
|
|
640
|
+
|
|
641
|
+
log.info("Queuing {}", task.task_id)
|
|
642
|
+
graph_task = dag.get_task_by_identity(task.task_id)
|
|
643
|
+
executor = None
|
|
644
|
+
status = None
|
|
645
|
+
|
|
646
|
+
try:
|
|
647
|
+
session = {}
|
|
648
|
+
|
|
649
|
+
# Create an executor for the task
|
|
650
|
+
executor = strategy.create_executor(session, graph_task)
|
|
651
|
+
|
|
652
|
+
# Run the task
|
|
653
|
+
with log.handler(LogHandler(updates, task)):
|
|
654
|
+
executor.run(JoltEnvironment(cache=acache, queue=None, worker=True))
|
|
655
|
+
|
|
656
|
+
except KeyboardInterrupt:
|
|
657
|
+
# Send an update to the scheduler
|
|
658
|
+
update = scheduler_pb.TaskUpdate(
|
|
659
|
+
request=task,
|
|
660
|
+
status=common_pb.TaskStatus.TASK_CANCELLED,
|
|
661
|
+
)
|
|
662
|
+
updates.push(update)
|
|
663
|
+
continue
|
|
664
|
+
|
|
665
|
+
except Exception:
|
|
666
|
+
status = common_pb.TaskStatus.TASK_FAILED
|
|
667
|
+
|
|
668
|
+
else:
|
|
669
|
+
status = graph_task.status()
|
|
670
|
+
|
|
671
|
+
finally:
|
|
672
|
+
errors = []
|
|
673
|
+
|
|
674
|
+
# If the task status remains queued, mark it as failed
|
|
675
|
+
if status in [common_pb.TaskStatus.TASK_QUEUED]:
|
|
676
|
+
status = common_pb.TaskStatus.TASK_FAILED
|
|
677
|
+
|
|
678
|
+
# Add errors from the task to the update sent to the scheduler
|
|
679
|
+
with graph_task.task.report() as report:
|
|
680
|
+
for error in report.errors:
|
|
681
|
+
errors.append(common_pb.TaskError(
|
|
682
|
+
type=str(error.type),
|
|
683
|
+
location=str(error.location),
|
|
684
|
+
message=str(error.message),
|
|
685
|
+
details=str(error.details),
|
|
686
|
+
))
|
|
687
|
+
|
|
688
|
+
# Send an update to the scheduler
|
|
689
|
+
update = scheduler_pb.TaskUpdate(
|
|
690
|
+
request=task,
|
|
691
|
+
status=status,
|
|
692
|
+
errors=errors,
|
|
693
|
+
)
|
|
694
|
+
updates.push(update)
|
|
695
|
+
|
|
696
|
+
# Release references to cache artifacts
|
|
697
|
+
acache.release()
|
|
698
|
+
|
|
699
|
+
except grpc.RpcError as rpc_error:
|
|
700
|
+
log.warning("Scheduler Error: {}", rpc_error.details())
|
|
701
|
+
|
|
702
|
+
except KeyboardInterrupt:
|
|
703
|
+
log.info("Interrupted, exiting")
|
|
704
|
+
|
|
705
|
+
except Exception as e:
|
|
706
|
+
log.set_level(log.EXCEPTION)
|
|
707
|
+
log.exception(e)
|
|
708
|
+
raise e
|
|
709
|
+
|
|
710
|
+
log.info("Exiting")
|