cook-build 0.5.1__py3-none-any.whl → 0.6.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cook/__main__.py +135 -62
- cook/actions.py +21 -13
- cook/contexts.py +33 -14
- cook/controller.py +157 -56
- cook/manager.py +41 -22
- cook/task.py +13 -13
- cook/util.py +14 -9
- cook_build-0.6.0.dist-info/METADATA +91 -0
- cook_build-0.6.0.dist-info/RECORD +14 -0
- cook_build-0.5.1.dist-info/METADATA +0 -11
- cook_build-0.5.1.dist-info/RECORD +0 -14
- {cook_build-0.5.1.dist-info → cook_build-0.6.0.dist-info}/WHEEL +0 -0
- {cook_build-0.5.1.dist-info → cook_build-0.6.0.dist-info}/entry_points.txt +0 -0
- {cook_build-0.5.1.dist-info → cook_build-0.6.0.dist-info}/licenses/LICENSE +0 -0
- {cook_build-0.5.1.dist-info → cook_build-0.6.0.dist-info}/top_level.txt +0 -0
cook/controller.py
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
1
2
|
from datetime import datetime
|
|
2
3
|
import hashlib
|
|
3
4
|
import logging
|
|
@@ -7,7 +8,15 @@ from queue import Empty, Queue
|
|
|
7
8
|
from sqlite3 import Connection
|
|
8
9
|
import sys
|
|
9
10
|
import threading
|
|
10
|
-
from
|
|
11
|
+
from types import TracebackType
|
|
12
|
+
from typing import (
|
|
13
|
+
cast,
|
|
14
|
+
Iterable,
|
|
15
|
+
Literal,
|
|
16
|
+
Sequence,
|
|
17
|
+
TYPE_CHECKING,
|
|
18
|
+
overload,
|
|
19
|
+
)
|
|
11
20
|
from . import util
|
|
12
21
|
|
|
13
22
|
if TYPE_CHECKING:
|
|
@@ -22,7 +31,8 @@ QUERIES = {
|
|
|
22
31
|
"name" TEXT PRIMARY KEY,
|
|
23
32
|
"digest" TEXT NOT NULL,
|
|
24
33
|
"last_completed" TIMESTAMP,
|
|
25
|
-
"last_failed" TIMESTAMP
|
|
34
|
+
"last_failed" TIMESTAMP,
|
|
35
|
+
"last_started" TIMESTAMP
|
|
26
36
|
);
|
|
27
37
|
|
|
28
38
|
-- Information about files so we can cache digests.
|
|
@@ -47,6 +57,11 @@ QUERIES = {
|
|
|
47
57
|
VALUES (:name, '__failed__', :last_failed)
|
|
48
58
|
ON CONFLICT ("name") DO UPDATE SET "digest" = '__failed__', last_failed = :last_failed
|
|
49
59
|
""",
|
|
60
|
+
"upsert_task_started": """
|
|
61
|
+
INSERT INTO "tasks" ("name", "digest", "last_started")
|
|
62
|
+
VALUES (:name, '__pending__', :last_started)
|
|
63
|
+
ON CONFLICT ("name") DO UPDATE SET "digest" = '__pending__', last_started = :last_started
|
|
64
|
+
""",
|
|
50
65
|
"upsert_file": """
|
|
51
66
|
INSERT INTO "files" ("name", "digest", "last_digested")
|
|
52
67
|
VALUES (:name, :digest, :last_digested)
|
|
@@ -56,24 +71,39 @@ QUERIES = {
|
|
|
56
71
|
SELECT "digest", "last_digested"
|
|
57
72
|
FROM "files"
|
|
58
73
|
WHERE "name" = :name AND last_digested > :last_modified
|
|
59
|
-
"""
|
|
74
|
+
""",
|
|
60
75
|
}
|
|
61
76
|
|
|
62
77
|
|
|
78
|
+
@dataclass
|
|
79
|
+
class Event:
|
|
80
|
+
kind: Literal["start", "complete", "fail"]
|
|
81
|
+
task: "Task"
|
|
82
|
+
timestamp: datetime
|
|
83
|
+
exc_info: (
|
|
84
|
+
tuple[type[BaseException], BaseException, TracebackType]
|
|
85
|
+
| tuple[None, None, None]
|
|
86
|
+
)
|
|
87
|
+
digest: str | None
|
|
88
|
+
|
|
89
|
+
|
|
63
90
|
class Controller:
|
|
64
91
|
"""
|
|
65
92
|
Controller to manage dependencies and execute tasks.
|
|
66
93
|
"""
|
|
94
|
+
|
|
67
95
|
def __init__(self, dependencies: nx.DiGraph, connection: Connection) -> None:
|
|
68
96
|
self.dependencies = dependencies
|
|
69
97
|
self.connection = connection
|
|
70
|
-
self._digest_cache:
|
|
98
|
+
self._digest_cache: dict[Path, tuple[float, bytes]] = {}
|
|
71
99
|
|
|
72
|
-
def resolve_stale_tasks(self, tasks:
|
|
100
|
+
def resolve_stale_tasks(self, tasks: list["Task"] | None = None) -> set["Task"]:
|
|
73
101
|
self.is_stale(tasks or list(self.dependencies))
|
|
74
|
-
return {
|
|
102
|
+
return {
|
|
103
|
+
node for node, data in self.dependencies.nodes(True) if data.get("is_stale")
|
|
104
|
+
}
|
|
75
105
|
|
|
76
|
-
def _evaluate_task_hexdigest(self, task: "Task") -> str:
|
|
106
|
+
def _evaluate_task_hexdigest(self, task: "Task") -> str | None:
|
|
77
107
|
"""
|
|
78
108
|
Evaluate the digest of a task by combining the digest of all its dependencies.
|
|
79
109
|
"""
|
|
@@ -94,7 +124,7 @@ class Controller:
|
|
|
94
124
|
hasher.update(bytearray.fromhex(hexdigest))
|
|
95
125
|
return hasher.hexdigest()
|
|
96
126
|
|
|
97
|
-
def _evaluate_path_hexdigest(self, path:
|
|
127
|
+
def _evaluate_path_hexdigest(self, path: Path | str) -> str:
|
|
98
128
|
"""
|
|
99
129
|
Get the digest of a file.
|
|
100
130
|
"""
|
|
@@ -102,10 +132,7 @@ class Controller:
|
|
|
102
132
|
path = Path(path)
|
|
103
133
|
stat = path.stat()
|
|
104
134
|
name = str(path.resolve())
|
|
105
|
-
params = {
|
|
106
|
-
"name": name,
|
|
107
|
-
"last_modified": datetime.fromtimestamp(stat.st_mtime)
|
|
108
|
-
}
|
|
135
|
+
params = {"name": name, "last_modified": datetime.fromtimestamp(stat.st_mtime)}
|
|
109
136
|
digest = self.connection.execute(QUERIES["select_file"], params).fetchone()
|
|
110
137
|
if digest:
|
|
111
138
|
return digest[0]
|
|
@@ -121,7 +148,13 @@ class Controller:
|
|
|
121
148
|
self.connection.commit()
|
|
122
149
|
return digest
|
|
123
150
|
|
|
124
|
-
|
|
151
|
+
@overload
|
|
152
|
+
def is_stale(self, task: Sequence["Task"]) -> list[bool]: ...
|
|
153
|
+
|
|
154
|
+
@overload
|
|
155
|
+
def is_stale(self, task: "Task") -> bool: ...
|
|
156
|
+
|
|
157
|
+
def is_stale(self, task: "Task | Sequence[Task]") -> bool | list[bool]:
|
|
125
158
|
"""
|
|
126
159
|
Determine if one or more tasks are stale.
|
|
127
160
|
|
|
@@ -156,16 +189,19 @@ class Controller:
|
|
|
156
189
|
"""
|
|
157
190
|
# If there are no targets or the targets are missing, the task is stale.
|
|
158
191
|
if not task.targets:
|
|
159
|
-
LOGGER.debug(
|
|
192
|
+
LOGGER.debug("%s is stale because it has no targets", task)
|
|
160
193
|
return True
|
|
161
194
|
for target in task.targets:
|
|
162
195
|
if not target.is_file():
|
|
163
|
-
LOGGER.debug(
|
|
196
|
+
LOGGER.debug(
|
|
197
|
+
"%s is stale because its target `%s` is missing", task, target
|
|
198
|
+
)
|
|
164
199
|
return True
|
|
165
200
|
|
|
166
201
|
# If there is no digest in the database, the task is stale.
|
|
167
|
-
cached_digest = self.connection.execute(
|
|
168
|
-
|
|
202
|
+
cached_digest = self.connection.execute(
|
|
203
|
+
"SELECT digest FROM tasks WHERE name = :name", {"name": task.name}
|
|
204
|
+
).fetchone()
|
|
169
205
|
if cached_digest is None:
|
|
170
206
|
LOGGER.debug("%s is stale because it does not have a hash entry", task)
|
|
171
207
|
return True
|
|
@@ -176,16 +212,23 @@ class Controller:
|
|
|
176
212
|
LOGGER.debug("%s is stale because one of its dependencies is missing", task)
|
|
177
213
|
|
|
178
214
|
# If the digest has changed, the task is stale.
|
|
179
|
-
cached_digest, = cached_digest
|
|
215
|
+
(cached_digest,) = cached_digest
|
|
180
216
|
if current_digest != cached_digest:
|
|
181
|
-
LOGGER.debug(
|
|
182
|
-
|
|
217
|
+
LOGGER.debug(
|
|
218
|
+
"%s is stale because one of its dependencies has changed (cached digest: "
|
|
219
|
+
"%s, current digest: %s)",
|
|
220
|
+
task,
|
|
221
|
+
cached_digest,
|
|
222
|
+
current_digest,
|
|
223
|
+
)
|
|
183
224
|
return True
|
|
184
225
|
|
|
185
226
|
LOGGER.debug("%s is up to date", task)
|
|
186
227
|
return False
|
|
187
228
|
|
|
188
|
-
def execute(
|
|
229
|
+
def execute(
|
|
230
|
+
self, tasks: "Task | list[Task]", num_concurrent: int = 1, interval: float = 1
|
|
231
|
+
) -> None:
|
|
189
232
|
"""
|
|
190
233
|
Execute one or more tasks.
|
|
191
234
|
|
|
@@ -199,23 +242,30 @@ class Controller:
|
|
|
199
242
|
return
|
|
200
243
|
|
|
201
244
|
# Start the worker threads.
|
|
202
|
-
threads:
|
|
245
|
+
threads: list[threading.Thread] = []
|
|
203
246
|
input_queue = Queue()
|
|
204
|
-
output_queue = Queue()
|
|
247
|
+
output_queue = Queue[Event]()
|
|
205
248
|
stop = util.StopEvent(interval)
|
|
206
249
|
for i in range(num_concurrent):
|
|
207
|
-
thread = threading.Thread(
|
|
208
|
-
|
|
250
|
+
thread = threading.Thread(
|
|
251
|
+
target=self._target,
|
|
252
|
+
name=f"cook-thread-{i}",
|
|
253
|
+
args=(stop, input_queue, output_queue),
|
|
254
|
+
daemon=True,
|
|
255
|
+
)
|
|
209
256
|
thread.start()
|
|
210
257
|
threads.append(thread)
|
|
211
258
|
|
|
212
259
|
# Get the subgraph of stale nodes.
|
|
213
|
-
stale_nodes = [
|
|
214
|
-
|
|
215
|
-
|
|
260
|
+
stale_nodes = [
|
|
261
|
+
node
|
|
262
|
+
for node, data in self.dependencies.nodes.data()
|
|
263
|
+
if data.get("is_stale")
|
|
264
|
+
]
|
|
265
|
+
dependencies = cast(nx.DiGraph, self.dependencies.subgraph(stale_nodes).copy())
|
|
216
266
|
|
|
217
267
|
# Initialize the input queue with leaf nodes.
|
|
218
|
-
for node, out_degree in dependencies.out_degree():
|
|
268
|
+
for node, out_degree in cast(Iterable, dependencies.out_degree()):
|
|
219
269
|
if out_degree == 0:
|
|
220
270
|
input_queue.put((node, self._evaluate_task_hexdigest(node)))
|
|
221
271
|
|
|
@@ -223,48 +273,61 @@ class Controller:
|
|
|
223
273
|
while dependencies.number_of_nodes():
|
|
224
274
|
# Try to get the next item in the queue, continuing if there's nothing available.
|
|
225
275
|
try:
|
|
226
|
-
|
|
227
|
-
output_queue.get(timeout=interval)
|
|
276
|
+
event = output_queue.get(timeout=interval)
|
|
228
277
|
except Empty: # pragma: no cover
|
|
229
278
|
continue
|
|
230
279
|
|
|
231
|
-
assert
|
|
280
|
+
assert event is not None, "output queue returned `None`; this is a bug"
|
|
232
281
|
|
|
233
282
|
# Unpack the results.
|
|
234
|
-
|
|
235
|
-
if exc_info:
|
|
283
|
+
if event.kind == "fail":
|
|
236
284
|
# Update the status in the database.
|
|
237
285
|
params = {
|
|
238
|
-
"name": task.name,
|
|
239
|
-
"last_failed":
|
|
286
|
+
"name": event.task.name,
|
|
287
|
+
"last_failed": event.timestamp,
|
|
240
288
|
}
|
|
241
289
|
self.connection.execute(QUERIES["upsert_task_failed"], params)
|
|
242
290
|
self.connection.commit()
|
|
243
|
-
|
|
244
|
-
|
|
291
|
+
ex = event.exc_info[1]
|
|
292
|
+
raise util.FailedTaskError(ex, task=event.task) from ex
|
|
293
|
+
elif event.kind == "complete":
|
|
245
294
|
# Update the status in the database.
|
|
246
295
|
params = {
|
|
247
|
-
"name": task.name,
|
|
248
|
-
"digest": digest,
|
|
249
|
-
"last_completed":
|
|
296
|
+
"name": event.task.name,
|
|
297
|
+
"digest": event.digest,
|
|
298
|
+
"last_completed": event.timestamp,
|
|
250
299
|
}
|
|
251
300
|
self.connection.execute(QUERIES["upsert_task_completed"], params)
|
|
252
301
|
self.connection.commit()
|
|
302
|
+
elif event.kind == "start":
|
|
303
|
+
params = {
|
|
304
|
+
"name": event.task.name,
|
|
305
|
+
"last_started": event.timestamp,
|
|
306
|
+
}
|
|
307
|
+
self.connection.execute(QUERIES["upsert_task_started"], params)
|
|
308
|
+
self.connection.commit()
|
|
309
|
+
continue
|
|
310
|
+
else:
|
|
311
|
+
raise ValueError(event) # pragma: no cover
|
|
253
312
|
|
|
254
313
|
# Check if the stop event is set and abort if so.
|
|
255
314
|
if stop.is_set():
|
|
256
315
|
break
|
|
257
316
|
|
|
258
317
|
# Add tasks that are now leaf nodes to the tree.
|
|
259
|
-
predecessors = list(dependencies.predecessors(task))
|
|
260
|
-
dependencies.remove_node(task)
|
|
261
|
-
self.dependencies.add_node(task, is_stale=False)
|
|
262
|
-
for node, out_degree in
|
|
318
|
+
predecessors = list(dependencies.predecessors(event.task))
|
|
319
|
+
dependencies.remove_node(event.task)
|
|
320
|
+
self.dependencies.add_node(event.task, is_stale=False)
|
|
321
|
+
for node, out_degree in cast(
|
|
322
|
+
Iterable, dependencies.out_degree(predecessors)
|
|
323
|
+
):
|
|
263
324
|
if out_degree == 0:
|
|
264
325
|
input_queue.put((node, self._evaluate_task_hexdigest(node)))
|
|
265
326
|
finally:
|
|
266
327
|
# Set the stop event and add "None" to the queue so the workers stop waiting.
|
|
267
|
-
LOGGER.debug(
|
|
328
|
+
LOGGER.debug(
|
|
329
|
+
"set stop event for threads: %s", [thread.name for thread in threads]
|
|
330
|
+
)
|
|
268
331
|
stop.set()
|
|
269
332
|
for thread in threads:
|
|
270
333
|
input_queue.put((None, None))
|
|
@@ -275,13 +338,15 @@ class Controller:
|
|
|
275
338
|
if thread.is_alive(): # pragma: no cover
|
|
276
339
|
raise RuntimeError(f"thread {thread} failed to join")
|
|
277
340
|
|
|
278
|
-
def _target(
|
|
341
|
+
def _target(
|
|
342
|
+
self, stop: util.StopEvent, input_queue: Queue, output_queue: Queue
|
|
343
|
+
) -> None:
|
|
279
344
|
LOGGER.debug(f"started thread `{threading.current_thread().name}`")
|
|
280
345
|
while not stop.is_set():
|
|
281
346
|
try:
|
|
282
347
|
task: "Task"
|
|
283
348
|
digest: str
|
|
284
|
-
task, digest = input_queue.get(stop.interval)
|
|
349
|
+
task, digest = input_queue.get(timeout=stop.interval)
|
|
285
350
|
except Empty: # pragma: no cover
|
|
286
351
|
# It's unlikely there's nothing on the queue, but let's handle it anyway.
|
|
287
352
|
continue
|
|
@@ -295,27 +360,63 @@ class Controller:
|
|
|
295
360
|
start = datetime.now()
|
|
296
361
|
try:
|
|
297
362
|
# Execute the task.
|
|
298
|
-
LOGGER.log(
|
|
299
|
-
|
|
363
|
+
LOGGER.log(
|
|
364
|
+
logging.DEBUG if task.name.startswith("_") else logging.INFO,
|
|
365
|
+
"executing %s ...",
|
|
366
|
+
task,
|
|
367
|
+
)
|
|
368
|
+
output_queue.put(
|
|
369
|
+
Event(
|
|
370
|
+
kind="start",
|
|
371
|
+
task=task,
|
|
372
|
+
digest=None,
|
|
373
|
+
timestamp=start,
|
|
374
|
+
exc_info=(None, None, None),
|
|
375
|
+
)
|
|
376
|
+
)
|
|
300
377
|
task.execute(stop)
|
|
301
378
|
|
|
302
379
|
# Check that all targets were created.
|
|
303
380
|
for target in task.targets:
|
|
304
381
|
if not target.is_file():
|
|
305
|
-
raise FileNotFoundError(
|
|
382
|
+
raise FileNotFoundError(
|
|
383
|
+
f"task {task} did not create target {target}"
|
|
384
|
+
)
|
|
306
385
|
LOGGER.debug("%s created `%s`", task, target)
|
|
307
386
|
|
|
308
387
|
# Add the result to the output queue and report success.
|
|
309
|
-
output_queue.put(
|
|
388
|
+
output_queue.put(
|
|
389
|
+
Event(
|
|
390
|
+
kind="complete",
|
|
391
|
+
task=task,
|
|
392
|
+
digest=digest,
|
|
393
|
+
timestamp=datetime.now(),
|
|
394
|
+
exc_info=(None, None, None),
|
|
395
|
+
)
|
|
396
|
+
)
|
|
310
397
|
delta = util.format_timedelta(datetime.now() - start)
|
|
311
|
-
LOGGER.log(
|
|
312
|
-
|
|
398
|
+
LOGGER.log(
|
|
399
|
+
logging.DEBUG if task.name.startswith("_") else logging.INFO,
|
|
400
|
+
"completed %s in %s",
|
|
401
|
+
task,
|
|
402
|
+
delta,
|
|
403
|
+
)
|
|
313
404
|
except: # noqa: E722
|
|
314
405
|
exc_info = sys.exc_info()
|
|
315
406
|
delta = util.format_timedelta(datetime.now() - start)
|
|
316
|
-
LOGGER.exception(
|
|
407
|
+
LOGGER.exception(
|
|
408
|
+
"failed to execute %s after %s", task, delta, exc_info=exc_info
|
|
409
|
+
)
|
|
317
410
|
stop.set()
|
|
318
|
-
output_queue.put(
|
|
411
|
+
output_queue.put(
|
|
412
|
+
Event(
|
|
413
|
+
kind="fail",
|
|
414
|
+
task=task,
|
|
415
|
+
digest=digest,
|
|
416
|
+
timestamp=datetime.now(),
|
|
417
|
+
exc_info=sys.exc_info(),
|
|
418
|
+
)
|
|
419
|
+
)
|
|
319
420
|
|
|
320
421
|
# Put anything on the queue in case the parent is waiting.
|
|
321
422
|
LOGGER.debug(f"exiting thread `{threading.current_thread().name}`")
|
cook/manager.py
CHANGED
|
@@ -2,7 +2,7 @@ from __future__ import annotations
|
|
|
2
2
|
import logging
|
|
3
3
|
import networkx as nx
|
|
4
4
|
from pathlib import Path
|
|
5
|
-
from typing import
|
|
5
|
+
from typing import TYPE_CHECKING
|
|
6
6
|
from . import task as task_
|
|
7
7
|
from . import util
|
|
8
8
|
|
|
@@ -20,11 +20,12 @@ class Manager:
|
|
|
20
20
|
"""
|
|
21
21
|
Task manager that captures the relationship between tasks, targets, and dependencies.
|
|
22
22
|
"""
|
|
23
|
-
_INSTANCE: Optional[Manager] = None
|
|
24
23
|
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
24
|
+
_INSTANCE: Manager | None = None
|
|
25
|
+
|
|
26
|
+
def __init__(self, contexts: list["Context"] | None = None) -> None:
|
|
27
|
+
self.contexts: list["Context"] = contexts or []
|
|
28
|
+
self.tasks: dict[str, "Task"] = {}
|
|
28
29
|
|
|
29
30
|
def __enter__(self) -> Manager:
|
|
30
31
|
if Manager._INSTANCE:
|
|
@@ -62,8 +63,9 @@ class Manager:
|
|
|
62
63
|
return task
|
|
63
64
|
except: # noqa: 722
|
|
64
65
|
filename, lineno = util.get_location()
|
|
65
|
-
LOGGER.exception(
|
|
66
|
-
|
|
66
|
+
LOGGER.exception(
|
|
67
|
+
"failed to create task with name '%s' at %s:%d", name, filename, lineno
|
|
68
|
+
)
|
|
67
69
|
raise
|
|
68
70
|
|
|
69
71
|
def resolve_dependencies(self) -> nx.DiGraph:
|
|
@@ -74,22 +76,28 @@ class Manager:
|
|
|
74
76
|
Directed graph of dependencies. Edges point *from* a task *to* others it depends on.
|
|
75
77
|
"""
|
|
76
78
|
# Run over all the targets and dependencies to explore connections between tasks.
|
|
77
|
-
task_by_target:
|
|
78
|
-
tasks_by_file_dependency:
|
|
79
|
-
dependencies:
|
|
79
|
+
task_by_target: dict[Path, "Task"] = {}
|
|
80
|
+
tasks_by_file_dependency: dict[Path, set["Task"]] = {}
|
|
81
|
+
dependencies: dict["Task", set["Task"]] = {}
|
|
80
82
|
for task in self.tasks.values():
|
|
81
83
|
if task.task_dependencies:
|
|
82
84
|
dependencies[task] = set(task.task_dependencies)
|
|
83
85
|
for path in task.targets:
|
|
84
86
|
if path.is_symlink():
|
|
85
|
-
LOGGER.warning(
|
|
86
|
-
|
|
87
|
+
LOGGER.warning(
|
|
88
|
+
"target %s of %s is a symlink which may lead to unexpected "
|
|
89
|
+
"behavior",
|
|
90
|
+
path,
|
|
91
|
+
task,
|
|
92
|
+
)
|
|
87
93
|
path = path.resolve()
|
|
88
|
-
if
|
|
89
|
-
raise ValueError(
|
|
94
|
+
if other := task_by_target.get(path):
|
|
95
|
+
raise ValueError(
|
|
96
|
+
f"tasks {task} and {other} both have target {path}"
|
|
97
|
+
)
|
|
90
98
|
task_by_target[path] = task
|
|
91
99
|
for path in task.dependencies:
|
|
92
|
-
path = path.resolve()
|
|
100
|
+
path = Path(path).resolve()
|
|
93
101
|
tasks_by_file_dependency.setdefault(path, set()).add(task)
|
|
94
102
|
|
|
95
103
|
# Build a directed graph of dependencies based on files produced and consumed by tasks.
|
|
@@ -107,7 +115,9 @@ class Manager:
|
|
|
107
115
|
|
|
108
116
|
graph = nx.DiGraph()
|
|
109
117
|
graph.add_nodes_from(self.tasks.values())
|
|
110
|
-
graph.add_edges_from(
|
|
118
|
+
graph.add_edges_from(
|
|
119
|
+
(task, dep) for task, deps in dependencies.items() for dep in deps
|
|
120
|
+
)
|
|
111
121
|
|
|
112
122
|
try:
|
|
113
123
|
cycle = nx.find_cycle(graph)
|
|
@@ -118,16 +128,21 @@ class Manager:
|
|
|
118
128
|
return graph
|
|
119
129
|
|
|
120
130
|
|
|
121
|
-
def create_task(
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
131
|
+
def create_task(
|
|
132
|
+
name: str,
|
|
133
|
+
*,
|
|
134
|
+
action: "Action | str | None" = None,
|
|
135
|
+
targets: list["Path"] | None = None,
|
|
136
|
+
dependencies: list["Path"] | None = None,
|
|
137
|
+
task_dependencies: list["Task"] | None = None,
|
|
138
|
+
location: tuple[str, int] | None = None,
|
|
139
|
+
) -> "Task":
|
|
125
140
|
"""
|
|
126
141
|
Create a new task.
|
|
127
142
|
|
|
128
143
|
Args:
|
|
129
144
|
name: Name of the new task.
|
|
130
|
-
action: Action to execute.
|
|
145
|
+
action: Action to execute or a string for shell commands.
|
|
131
146
|
targets: Paths for files to be generated.
|
|
132
147
|
dependencies: Paths to files on which this task depends.
|
|
133
148
|
task_dependencies: Tasks which the new task explicitly depends on.
|
|
@@ -138,6 +153,10 @@ def create_task(name: str, *, action: Optional["Action"] = None,
|
|
|
138
153
|
New task.
|
|
139
154
|
"""
|
|
140
155
|
return Manager.get_instance().create_task(
|
|
141
|
-
name,
|
|
156
|
+
name,
|
|
157
|
+
action=action,
|
|
158
|
+
targets=targets,
|
|
159
|
+
dependencies=dependencies,
|
|
160
|
+
location=location,
|
|
142
161
|
task_dependencies=task_dependencies,
|
|
143
162
|
)
|
cook/task.py
CHANGED
|
@@ -1,8 +1,7 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
import colorama
|
|
3
3
|
from pathlib import Path
|
|
4
|
-
import
|
|
5
|
-
from typing import List, Optional, Tuple, TYPE_CHECKING
|
|
4
|
+
from typing import TYPE_CHECKING
|
|
6
5
|
from . import util
|
|
7
6
|
|
|
8
7
|
|
|
@@ -15,16 +14,17 @@ class Task:
|
|
|
15
14
|
"""
|
|
16
15
|
Task to be executed.
|
|
17
16
|
"""
|
|
17
|
+
|
|
18
18
|
def __init__(
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
19
|
+
self,
|
|
20
|
+
name: str,
|
|
21
|
+
*,
|
|
22
|
+
dependencies: list["PathOrStr"] | None = None,
|
|
23
|
+
targets: list["PathOrStr"] | None = None,
|
|
24
|
+
action: Action | None = None,
|
|
25
|
+
task_dependencies: list[Task] | None = None,
|
|
26
|
+
location: tuple[str, int] | None = None,
|
|
27
|
+
) -> None:
|
|
28
28
|
self.name = name
|
|
29
29
|
self.dependencies = dependencies or []
|
|
30
30
|
self.targets = [Path(path) for path in (targets or [])]
|
|
@@ -32,14 +32,14 @@ class Task:
|
|
|
32
32
|
self.task_dependencies = task_dependencies or []
|
|
33
33
|
self.location = location or util.get_location()
|
|
34
34
|
|
|
35
|
-
def execute(self, stop:
|
|
35
|
+
def execute(self, stop: util.StopEvent | None = None) -> None:
|
|
36
36
|
if self.action:
|
|
37
37
|
self.action.execute(self, stop)
|
|
38
38
|
|
|
39
39
|
def __hash__(self) -> int:
|
|
40
40
|
return hash(self.name)
|
|
41
41
|
|
|
42
|
-
def format(self, color: str = None) -> str:
|
|
42
|
+
def format(self, color: str | None = None) -> str:
|
|
43
43
|
name = self.name
|
|
44
44
|
if color:
|
|
45
45
|
name = f"{color}{name}{colorama.Fore.RESET}"
|
cook/util.py
CHANGED
|
@@ -7,26 +7,26 @@ import os
|
|
|
7
7
|
from pathlib import Path
|
|
8
8
|
import threading
|
|
9
9
|
from time import time
|
|
10
|
-
from typing import
|
|
10
|
+
from typing import TYPE_CHECKING, Generator
|
|
11
11
|
|
|
12
12
|
|
|
13
13
|
if TYPE_CHECKING:
|
|
14
14
|
from .task import Task
|
|
15
15
|
|
|
16
16
|
|
|
17
|
-
PathOrStr =
|
|
17
|
+
PathOrStr = Path | str
|
|
18
18
|
|
|
19
19
|
|
|
20
|
-
def evaluate_digest(path: PathOrStr, size=2
|
|
21
|
-
|
|
20
|
+
def evaluate_digest(path: PathOrStr, size=2**16, hasher: str = "sha1") -> bytes:
|
|
21
|
+
hasher_instance = hashlib.new(hasher)
|
|
22
22
|
path = Path(path)
|
|
23
23
|
with path.open("rb") as fp:
|
|
24
24
|
while chunk := fp.read(size):
|
|
25
|
-
|
|
26
|
-
return
|
|
25
|
+
hasher_instance.update(chunk)
|
|
26
|
+
return hasher_instance.digest()
|
|
27
27
|
|
|
28
28
|
|
|
29
|
-
def evaluate_hexdigest(path: PathOrStr, size=2
|
|
29
|
+
def evaluate_hexdigest(path: PathOrStr, size=2**16, hasher: str = "sha1") -> str:
|
|
30
30
|
return evaluate_digest(path, size, hasher).hex()
|
|
31
31
|
|
|
32
32
|
|
|
@@ -43,6 +43,8 @@ class Timer:
|
|
|
43
43
|
|
|
44
44
|
@property
|
|
45
45
|
def duration(self):
|
|
46
|
+
assert self.start is not None, "Timer has not started yet."
|
|
47
|
+
assert self.end is not None, "Timer has not finished yet."
|
|
46
48
|
return self.end - self.start
|
|
47
49
|
|
|
48
50
|
|
|
@@ -57,7 +59,7 @@ class FailedTaskError(Exception):
|
|
|
57
59
|
|
|
58
60
|
|
|
59
61
|
@contextlib.contextmanager
|
|
60
|
-
def working_directory(path: PathOrStr) -> Path:
|
|
62
|
+
def working_directory(path: PathOrStr) -> Generator[Path]:
|
|
61
63
|
path = Path(path)
|
|
62
64
|
original = Path.cwd()
|
|
63
65
|
try:
|
|
@@ -67,7 +69,7 @@ def working_directory(path: PathOrStr) -> Path:
|
|
|
67
69
|
os.chdir(original)
|
|
68
70
|
|
|
69
71
|
|
|
70
|
-
def get_location() ->
|
|
72
|
+
def get_location() -> tuple[Path, int]:
|
|
71
73
|
"""
|
|
72
74
|
Get the first location in the call stack which is not part of the Cook package.
|
|
73
75
|
|
|
@@ -75,8 +77,10 @@ def get_location() -> Tuple[str, int]:
|
|
|
75
77
|
Location as a tuple :code:`(filename, lineno)`.
|
|
76
78
|
"""
|
|
77
79
|
frame = inspect.currentframe()
|
|
80
|
+
assert frame is not None, "Could not fetch current frame."
|
|
78
81
|
while frame.f_globals.get("__name__", "<unknown>").startswith("cook"):
|
|
79
82
|
frame = frame.f_back
|
|
83
|
+
assert frame is not None, "Could not fetch parent frame."
|
|
80
84
|
return Path(frame.f_code.co_filename).resolve(), frame.f_lineno
|
|
81
85
|
|
|
82
86
|
|
|
@@ -84,6 +88,7 @@ class StopEvent(threading.Event):
|
|
|
84
88
|
"""
|
|
85
89
|
Event used for stopping execution with a polling interval.
|
|
86
90
|
"""
|
|
91
|
+
|
|
87
92
|
def __init__(self, interval: float = 1) -> None:
|
|
88
93
|
super().__init__()
|
|
89
94
|
self.interval = interval
|