cook-build 0.6.4__py3-none-any.whl → 0.7.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
cook/__init__.py CHANGED
@@ -1,8 +1,7 @@
1
1
  from .controller import Controller
2
- from .manager import create_task, Manager
2
+ from .manager import Manager, create_task
3
3
  from .task import Task
4
4
 
5
-
6
5
  __all__ = [
7
6
  "Controller",
8
7
  "create_task",
cook/__main__.py CHANGED
@@ -1,29 +1,30 @@
1
1
  import argparse
2
- import colorama
3
- from contextlib import closing
4
- from datetime import datetime
2
+ import asyncio
5
3
  import fnmatch
6
4
  import importlib.util
7
5
  import logging
8
6
  import os
9
- from pathlib import Path
10
7
  import re
11
8
  import sqlite3
12
9
  import sys
13
10
  import textwrap
11
+ from contextlib import closing
12
+ from datetime import datetime
13
+ from pathlib import Path
14
14
  from typing import Iterable
15
15
 
16
+ import colorama
17
+
16
18
  from .contexts import (
17
19
  create_target_directories,
18
20
  normalize_action,
19
21
  normalize_dependencies,
20
22
  )
21
- from .controller import Controller, QUERIES
23
+ from .controller import QUERIES, Controller
22
24
  from .manager import Manager
23
25
  from .task import Task
24
26
  from .util import FailedTaskError, format_datetime, format_timedelta
25
27
 
26
-
27
28
  LOGGER = logging.getLogger("cook")
28
29
 
29
30
 
@@ -147,7 +148,9 @@ class ExecCommand(Command):
147
148
 
148
149
  def execute(self, controller: Controller, args: ExecArgs) -> None: # pyright: ignore[reportIncompatibleMethodOverride]
149
150
  tasks = self.discover_tasks(controller, args)
150
- controller.execute(tasks, num_concurrent=args.jobs, dry_run=args.dry_run)
151
+ asyncio.run(
152
+ controller.execute(tasks, num_concurrent=args.jobs, dry_run=args.dry_run)
153
+ )
151
154
 
152
155
 
153
156
  class LsArgs(Args):
cook/actions.py CHANGED
@@ -8,51 +8,58 @@ multiple actions using :class:`.CompositeAction`, and executing modules as scrip
8
8
  :class:`.ModuleAction`.
9
9
 
10
10
  Custom actions can be implemented by inheriting from :class:`.Action` and implementing the
11
- :meth:`~.Action.execute` method which receives a :class:`~.task.Task`. The method should execute the
12
- action; its return value is ignored. For example, the following action waits for a specified time.
11
+ :meth:`~.Action.execute` method which receives a :class:`~.task.Task`. The method should be async
12
+ and its return value is ignored. For example, the following action waits for a specified time.
13
13
 
14
14
  .. doctest::
15
15
 
16
16
  >>> from cook.actions import Action
17
17
  >>> from cook.task import Task
18
- >>> from time import sleep, time
18
+ >>> import asyncio
19
+ >>> from time import time
19
20
 
20
21
  >>> class SleepAction(Action):
21
22
  ... def __init__(self, delay: float) -> None:
22
23
  ... self.delay = delay
23
24
  ...
24
- ... def execute(self, task: Task) -> None:
25
+ ... async def execute(self, task: Task) -> None:
25
26
  ... start = time()
26
- ... sleep(self.delay)
27
+ ... await asyncio.sleep(self.delay)
27
28
  ... print(f"time: {time() - start:.3f}")
28
29
 
29
30
  >>> action = SleepAction(0.1)
30
- >>> action.execute(None)
31
+ >>> asyncio.run(action.execute(None))
31
32
  time: 0.1...
33
+
34
+ For backwards compatibility, synchronous execute methods are also supported but will run in an
35
+ executor with a deprecation warning.
32
36
  """
33
37
 
38
+ import asyncio
34
39
  import hashlib
40
+ import logging
35
41
  import os
36
42
  import shlex
37
43
  import subprocess
38
44
  import sys
39
45
  from types import ModuleType
40
- from typing import Callable, TYPE_CHECKING
41
-
46
+ from typing import TYPE_CHECKING, Callable
42
47
 
43
48
  if TYPE_CHECKING:
44
49
  from .task import Task
45
- from .util import StopEvent
50
+
51
+
52
+ LOGGER = logging.getLogger(__name__)
46
53
 
47
54
 
48
55
  class Action:
49
56
  """
50
- Action to perform when a task is executed in its own thread.
57
+ Action to perform when a task is executed.
51
58
  """
52
59
 
53
- def execute(self, task: "Task", stop: "StopEvent | None" = None) -> None:
60
+ async def execute(self, task: "Task") -> None:
54
61
  """
55
- Execute the action.
62
+ Execute the action asynchronously.
56
63
  """
57
64
  raise NotImplementedError
58
65
 
@@ -80,17 +87,23 @@ class FunctionAction(Action):
80
87
  self.args = args
81
88
  self.kwargs = kwargs
82
89
 
83
- def execute(self, task: "Task", stop: "StopEvent | None" = None) -> None:
84
- self.func(task, *self.args, **self.kwargs)
90
+ async def execute(self, task: "Task") -> None:
91
+ # Check if the function is already async
92
+ if asyncio.iscoroutinefunction(self.func):
93
+ await self.func(task, *self.args, **self.kwargs)
94
+ else:
95
+ # Run sync function in executor
96
+ loop = asyncio.get_running_loop()
97
+ await loop.run_in_executor(None, self.func, task, *self.args, **self.kwargs)
85
98
 
86
99
 
87
100
  class SubprocessAction(Action):
88
101
  """
89
- Run a subprocess.
102
+ Run a subprocess asynchronously.
90
103
 
91
104
  Args:
92
- *args: Positional arguments for :class:`subprocess.Popen`.
93
- **kwargs: Keyword arguments for :class:`subprocess.Popen`.
105
+ *args: Positional arguments for subprocess execution.
106
+ **kwargs: Keyword arguments for subprocess execution.
94
107
 
95
108
  Example:
96
109
 
@@ -98,47 +111,55 @@ class SubprocessAction(Action):
98
111
 
99
112
  >>> from cook.actions import SubprocessAction
100
113
  >>> from pathlib import Path
114
+ >>> import asyncio
101
115
 
102
116
  >>> action = SubprocessAction(["touch", "hello.txt"])
103
- >>> action.execute(None)
117
+ >>> asyncio.run(action.execute(None))
104
118
  >>> Path("hello.txt").is_file()
105
119
  True
106
120
  """
107
121
 
108
122
  def __init__(self, *args, **kwargs) -> None:
123
+ # Validate shell argument early
124
+ if kwargs.get("shell", False) and args and not isinstance(args[0], str):
125
+ raise ValueError("shell=True requires string args")
109
126
  self.args = args
110
127
  self.kwargs = kwargs
111
128
 
112
- def execute(self, task: "Task", stop: "StopEvent | None" = None) -> None:
113
- # Repeatedly wait for the process to complete, checking the stop event after each poll.
114
- interval = stop.interval if stop else None
115
- process = subprocess.Popen(*self.args, **self.kwargs)
116
- while True:
117
- try:
118
- returncode = process.wait(interval)
119
- if returncode:
120
- raise subprocess.CalledProcessError(returncode, process.args)
121
- return
122
- except subprocess.TimeoutExpired:
123
- if stop and stop.is_set():
124
- break
125
-
126
- # Clean up the process by trying to terminate it and then killing it.
127
- for method in [process.terminate, process.kill]:
128
- method()
129
- try:
130
- returncode = process.wait(max(interval, 3) if interval else None)
131
- if returncode:
132
- raise subprocess.CalledProcessError(returncode, process.args)
133
- # The process managed to exit gracefully after the main loop. This is unlikely.
134
- return # pragma: no cover
135
- except subprocess.TimeoutExpired: # pragma: no cover
136
- pass
137
-
138
- # We couldn't kill the process. Also very unlikely.
139
- raise subprocess.SubprocessError(
140
- f"failed to shut down {process}"
141
- ) # pragma: no cover
129
+ async def execute(self, task: "Task") -> None:
130
+ # Get the command arguments
131
+ (args,) = self.args
132
+ shell = self.kwargs.get("shell", False)
133
+ other_kwargs = {k: v for k, v in self.kwargs.items() if k != "shell"}
134
+
135
+ # Create the subprocess
136
+ if shell:
137
+ process = await asyncio.create_subprocess_shell(args, **other_kwargs)
138
+ else:
139
+ # Exec mode: args can be a string (single command) or list
140
+ if isinstance(args, str):
141
+ # Single command string - treat as program name with no arguments
142
+ process = await asyncio.create_subprocess_exec(args, **other_kwargs)
143
+ else:
144
+ # List of arguments
145
+ process = await asyncio.create_subprocess_exec(*args, **other_kwargs)
146
+
147
+ try:
148
+ # Wait for the process to complete
149
+ returncode = await process.wait()
150
+ if returncode:
151
+ raise subprocess.CalledProcessError(returncode, args)
152
+
153
+ except asyncio.CancelledError:
154
+ # Task was cancelled - terminate the subprocess
155
+ if process.returncode is None:
156
+ process.terminate()
157
+ try:
158
+ await asyncio.wait_for(process.wait(), timeout=3)
159
+ except asyncio.TimeoutError:
160
+ process.kill()
161
+ await process.wait()
162
+ raise
142
163
 
143
164
  @property
144
165
  def hexdigest(self) -> str:
@@ -169,9 +190,9 @@ class CompositeAction(Action):
169
190
  def __init__(self, *actions: Action) -> None:
170
191
  self.actions = actions
171
192
 
172
- def execute(self, task: "Task", stop: "StopEvent | None" = None) -> None:
193
+ async def execute(self, task: "Task") -> None:
173
194
  for action in self.actions:
174
- action.execute(task, stop)
195
+ await action.execute(task)
175
196
 
176
197
  @property
177
198
  def hexdigest(self) -> str | None:
cook/contexts.py CHANGED
@@ -31,15 +31,15 @@ Custom contexts can be implemented by inheriting from :class:`.Context` and impl
31
31
  """
32
32
 
33
33
  from __future__ import annotations
34
+
35
+ import warnings
34
36
  from pathlib import Path
35
37
  from types import ModuleType
36
- from typing import Callable, TYPE_CHECKING, TypeVar
37
- import warnings
38
- from . import actions
38
+ from typing import TYPE_CHECKING, Callable, TypeVar
39
+
40
+ from . import actions, util
39
41
  from . import manager as manager_
40
42
  from . import task as task_
41
- from . import util
42
-
43
43
 
44
44
  if TYPE_CHECKING:
45
45
  from .manager import Manager
cook/controller.py CHANGED
@@ -1,22 +1,19 @@
1
- from dataclasses import dataclass
2
- from datetime import datetime
1
+ import asyncio
3
2
  import hashlib
4
3
  import logging
5
- import networkx as nx
4
+ import warnings
5
+ from datetime import datetime
6
6
  from pathlib import Path
7
- from queue import Empty, Queue
8
7
  from sqlite3 import Connection
9
- import sys
10
- import threading
11
- from types import TracebackType
12
8
  from typing import (
13
- cast,
14
- Iterable,
15
- Literal,
16
- Sequence,
17
9
  TYPE_CHECKING,
10
+ Sequence,
11
+ cast,
18
12
  overload,
19
13
  )
14
+
15
+ import networkx as nx
16
+
20
17
  from . import util
21
18
 
22
19
  if TYPE_CHECKING:
@@ -75,18 +72,6 @@ QUERIES = {
75
72
  }
76
73
 
77
74
 
78
- @dataclass
79
- class Event:
80
- kind: Literal["start", "complete", "fail"]
81
- task: "Task"
82
- timestamp: datetime
83
- exc_info: (
84
- tuple[type[BaseException], BaseException, TracebackType]
85
- | tuple[None, None, None]
86
- )
87
- digest: str | None
88
-
89
-
90
75
  class Controller:
91
76
  """
92
77
  Controller to manage dependencies and execute tasks.
@@ -232,42 +217,33 @@ class Controller:
232
217
  LOGGER.debug("%s is up to date", task)
233
218
  return False
234
219
 
235
- def execute(
220
+ async def execute(
236
221
  self,
237
222
  tasks: "Task | list[Task]",
238
223
  num_concurrent: int = 1,
239
- interval: float = 1,
224
+ interval: float | None = None,
240
225
  dry_run: bool = False,
241
226
  ) -> None:
242
227
  """
243
- Execute one or more tasks.
228
+ Execute one or more tasks asynchronously.
244
229
 
245
230
  Args:
246
231
  tasks: Tasks to execute.
247
- num_concurrent: Number of concurrent threads to run.
248
- interval: Interval for checking stop events.
232
+ num_concurrent: Number of concurrent tasks to run.
233
+ interval: Deprecated, kept for backward compatibility.
249
234
  dry_run: If True, show what would execute without running tasks.
250
235
  """
236
+ if interval is not None: # pragma: no cover
237
+ warnings.warn(
238
+ "The 'interval' parameter is deprecated and has no effect",
239
+ DeprecationWarning,
240
+ stacklevel=2,
241
+ )
251
242
  if not isinstance(tasks, Sequence):
252
243
  tasks = [tasks]
253
244
  if not any(self.is_stale(tasks)):
254
245
  return
255
246
 
256
- # Start the worker threads.
257
- threads: list[threading.Thread] = []
258
- input_queue = Queue()
259
- output_queue = Queue[Event]()
260
- stop = util.StopEvent(interval)
261
- for i in range(num_concurrent):
262
- thread = threading.Thread(
263
- target=self._target,
264
- name=f"cook-thread-{i}",
265
- args=(stop, input_queue, output_queue, dry_run),
266
- daemon=True,
267
- )
268
- thread.start()
269
- threads.append(thread)
270
-
271
247
  # Get the subgraph of stale nodes.
272
248
  stale_nodes = [
273
249
  node
@@ -276,191 +252,119 @@ class Controller:
276
252
  ]
277
253
  dependencies = cast(nx.DiGraph, self.dependencies.subgraph(stale_nodes).copy())
278
254
 
279
- # Initialize the input queue with leaf nodes.
280
- for node, out_degree in cast(Iterable, dependencies.out_degree()):
281
- if out_degree == 0:
282
- input_queue.put((node, self._evaluate_task_hexdigest(node)))
255
+ # Create semaphore for concurrency control
256
+ semaphore = asyncio.Semaphore(num_concurrent)
283
257
 
284
- try:
285
- while dependencies.number_of_nodes():
286
- # Try to get the next item in the queue, continuing if there's nothing available.
287
- try:
288
- event = output_queue.get(timeout=interval)
289
- except Empty: # pragma: no cover
290
- continue
291
-
292
- assert event is not None, "output queue returned `None`; this is a bug"
293
-
294
- # Unpack the results.
295
- if event.kind == "fail":
296
- # Update the status in the database.
297
- if not dry_run:
298
- params = {
299
- "name": event.task.name,
300
- "last_failed": event.timestamp,
301
- }
302
- self.connection.execute(QUERIES["upsert_task_failed"], params)
303
- self.connection.commit()
304
- ex = event.exc_info[1]
305
- raise util.FailedTaskError(ex, task=event.task) from ex
306
- elif event.kind == "complete":
307
- # Update the status in the database.
308
- if not dry_run:
309
- params = {
310
- "name": event.task.name,
311
- "digest": event.digest,
312
- "last_completed": event.timestamp,
313
- }
314
- self.connection.execute(
315
- QUERIES["upsert_task_completed"], params
316
- )
317
- self.connection.commit()
318
- elif event.kind == "start":
319
- if not dry_run:
320
- params = {
321
- "name": event.task.name,
322
- "last_started": event.timestamp,
323
- }
324
- self.connection.execute(QUERIES["upsert_task_started"], params)
325
- self.connection.commit()
326
- continue
327
- else:
328
- raise ValueError(event) # pragma: no cover
329
-
330
- # Check if the stop event is set and abort if so.
331
- if stop.is_set():
332
- break
333
-
334
- # Add tasks that are now leaf nodes to the tree.
335
- predecessors = list(dependencies.predecessors(event.task))
336
- dependencies.remove_node(event.task)
337
- self.dependencies.add_node(event.task, is_stale=False)
338
- for node, out_degree in cast(
339
- Iterable, dependencies.out_degree(predecessors)
340
- ):
341
- if out_degree == 0:
342
- input_queue.put((node, self._evaluate_task_hexdigest(node)))
343
- finally:
344
- # Set the stop event and add "None" to the queue so the workers stop waiting.
345
- LOGGER.debug(
346
- "set stop event for threads: %s", [thread.name for thread in threads]
258
+ # Create futures for all stale tasks
259
+ task_futures: dict["Task", asyncio.Task] = {}
260
+ for task in dependencies:
261
+ task_futures[task] = asyncio.create_task(
262
+ self._execute_task(task, task_futures, dependencies, semaphore, dry_run)
347
263
  )
348
- stop.set()
349
- for thread in threads:
350
- input_queue.put((None, None))
351
264
 
352
- # Shut down the worker threads.
353
- for thread in threads:
354
- thread.join(3 * stop.interval)
355
- if thread.is_alive(): # pragma: no cover
356
- raise RuntimeError(f"thread {thread} failed to join")
357
-
358
- def _target(
265
+ # Wait for requested tasks
266
+ requested_futures = [task_futures[t] for t in tasks if t in task_futures]
267
+ try:
268
+ await asyncio.gather(*requested_futures)
269
+ except Exception:
270
+ # Cancel all pending tasks
271
+ for future in task_futures.values():
272
+ if not future.done():
273
+ future.cancel()
274
+ # Wait for all cancellations to complete
275
+ await asyncio.gather(*task_futures.values(), return_exceptions=True)
276
+ raise
277
+
278
+ async def _execute_task(
359
279
  self,
360
- stop: util.StopEvent,
361
- input_queue: Queue,
362
- output_queue: Queue,
363
- dry_run: bool = False,
280
+ task: "Task",
281
+ task_futures: dict["Task", asyncio.Task],
282
+ dependencies: nx.DiGraph,
283
+ semaphore: asyncio.Semaphore,
284
+ dry_run: bool,
364
285
  ) -> None:
365
- LOGGER.debug(f"started thread `{threading.current_thread().name}`")
366
- while not stop.is_set():
367
- try:
368
- task: "Task"
369
- digest: str
370
- task, digest = input_queue.get(timeout=stop.interval)
371
- except Empty: # pragma: no cover
372
- # It's unlikely there's nothing on the queue, but let's handle it anyway.
373
- continue
374
- # Check the stop event before executing the task; it may have been set while we were
375
- # waiting for the next task in the queue.
376
- if stop.is_set():
377
- break
378
-
379
- assert task is not None, "input queue returned `None`; this is a bug"
380
-
381
- start = datetime.now()
382
- try:
383
- # Execute or simulate the task.
384
- if dry_run:
385
- LOGGER.log(
386
- logging.DEBUG if task.name.startswith("_") else logging.INFO,
387
- "would execute %s",
388
- task,
389
- )
390
- if task.action:
391
- LOGGER.log(
392
- logging.DEBUG
393
- if task.name.startswith("_")
394
- else logging.INFO,
395
- " action: %s",
396
- task.action,
397
- )
398
- else:
399
- LOGGER.log(
400
- logging.DEBUG if task.name.startswith("_") else logging.INFO,
401
- "executing %s ...",
402
- task,
403
- )
286
+ """Execute a single task after waiting for its dependencies."""
287
+ # Wait for all dependencies to complete
288
+ dep_tasks = list(dependencies.successors(task))
289
+ if dep_tasks:
290
+ dep_futures = [task_futures[dep] for dep in dep_tasks]
291
+ await asyncio.gather(*dep_futures)
404
292
 
405
- output_queue.put(
406
- Event(
407
- kind="start",
408
- task=task,
409
- digest=None,
410
- timestamp=start,
411
- exc_info=(None, None, None),
412
- )
413
- )
293
+ start = datetime.now()
294
+ digest = self._evaluate_task_hexdigest(task)
414
295
 
415
- if not dry_run:
416
- task.execute(stop)
417
-
418
- # Check that all targets were created.
419
- for target in task.targets:
420
- if not target.is_file():
421
- raise FileNotFoundError(
422
- f"task {task} did not create target {target}"
423
- )
424
- LOGGER.debug("%s created `%s`", task, target)
425
-
426
- # Add the result to the output queue and report success.
427
- output_queue.put(
428
- Event(
429
- kind="complete",
430
- task=task,
431
- digest=digest,
432
- timestamp=datetime.now(),
433
- exc_info=(None, None, None),
434
- )
296
+ try:
297
+ # Log what we're doing
298
+ if dry_run:
299
+ LOGGER.log(
300
+ logging.DEBUG if task.name.startswith("_") else logging.INFO,
301
+ "would execute %s",
302
+ task,
435
303
  )
436
- if not dry_run:
437
- delta = util.format_timedelta(datetime.now() - start)
304
+ if task.action:
438
305
  LOGGER.log(
439
306
  logging.DEBUG if task.name.startswith("_") else logging.INFO,
440
- "completed %s in %s",
441
- task,
442
- delta,
307
+ " action: %s",
308
+ task.action,
443
309
  )
444
- except: # noqa: E722
445
- exc_info = sys.exc_info()
446
- delta = util.format_timedelta(datetime.now() - start)
447
- LOGGER.exception(
448
- "failed to execute %s after %s", task, delta, exc_info=exc_info
310
+ else:
311
+ LOGGER.log(
312
+ logging.DEBUG if task.name.startswith("_") else logging.INFO,
313
+ "executing %s ...",
314
+ task,
449
315
  )
450
- stop.set()
451
- output_queue.put(
452
- Event(
453
- kind="fail",
454
- task=task,
455
- digest=digest,
456
- timestamp=datetime.now(),
457
- exc_info=sys.exc_info(),
458
- )
316
+
317
+ # Update DB for start
318
+ if not dry_run:
319
+ params = {"name": task.name, "last_started": start}
320
+ self.connection.execute(QUERIES["upsert_task_started"], params)
321
+ self.connection.commit()
322
+
323
+ # Execute the task
324
+ if not dry_run:
325
+ async with semaphore:
326
+ await task.execute()
327
+
328
+ # Check that all targets were created
329
+ for target in task.targets:
330
+ if not target.is_file():
331
+ raise FileNotFoundError(
332
+ f"task {task} did not create target {target}"
333
+ )
334
+ LOGGER.debug("%s created `%s`", task, target)
335
+
336
+ # Update DB for completion
337
+ if not dry_run:
338
+ params = {
339
+ "name": task.name,
340
+ "digest": digest,
341
+ "last_completed": datetime.now(),
342
+ }
343
+ self.connection.execute(QUERIES["upsert_task_completed"], params)
344
+ self.connection.commit()
345
+
346
+ # Log completion
347
+ delta = util.format_timedelta(datetime.now() - start)
348
+ LOGGER.log(
349
+ logging.DEBUG if task.name.startswith("_") else logging.INFO,
350
+ "completed %s in %s",
351
+ task,
352
+ delta,
459
353
  )
460
354
 
461
- # Put anything on the queue in case the parent is waiting.
462
- LOGGER.debug(f"exiting thread `{threading.current_thread().name}`")
463
- output_queue.put(None)
355
+ # Mark task as no longer stale
356
+ self.dependencies.nodes[task]["is_stale"] = False
357
+
358
+ except Exception as ex:
359
+ # Update DB for failure
360
+ if not dry_run:
361
+ params = {"name": task.name, "last_failed": datetime.now()}
362
+ self.connection.execute(QUERIES["upsert_task_failed"], params)
363
+ self.connection.commit()
364
+
365
+ delta = util.format_timedelta(datetime.now() - start)
366
+ LOGGER.exception("failed to execute %s after %s", task, delta)
367
+ raise util.FailedTaskError(ex, task=task) from ex
464
368
 
465
369
  def reset(self, *tasks: "Task") -> None:
466
370
  # TODO: add tests for resetting.
cook/manager.py CHANGED
@@ -1,12 +1,14 @@
1
1
  from __future__ import annotations
2
+
2
3
  import logging
3
- import networkx as nx
4
4
  from pathlib import Path
5
5
  from typing import TYPE_CHECKING
6
+
7
+ import networkx as nx
8
+
6
9
  from . import task as task_
7
10
  from . import util
8
11
 
9
-
10
12
  if TYPE_CHECKING:
11
13
  from .actions import Action
12
14
  from .contexts import Context
@@ -47,21 +49,21 @@ class Manager:
47
49
  raise ValueError("no manager is active")
48
50
  return Manager._INSTANCE
49
51
 
50
- def create_task(self, name: str, **kwargs):
52
+ def create_task(self, name: str | None = None, **kwargs):
51
53
  """
52
54
  Create a task. See :func:`.create_task` for details.
53
55
  """
54
56
  try:
55
- if name in self.tasks:
56
- raise ValueError(f"task with name '{name}' already exists")
57
57
  task = task_.Task(name, **kwargs)
58
+ if task.name in self.tasks:
59
+ raise ValueError(f"task with name '{task.name}' already exists")
58
60
  for context in reversed(self.contexts):
59
61
  task = context.apply(task)
60
62
  if task is None:
61
63
  raise ValueError(f"{context} did not return a task")
62
- self.tasks[name] = task
64
+ self.tasks[task.name] = task
63
65
  return task
64
- except: # noqa: 722
66
+ except:
65
67
  filename, lineno = util.get_location()
66
68
  LOGGER.exception(
67
69
  "failed to create task with name '%s' at %s:%d", name, filename, lineno
@@ -135,7 +137,7 @@ class Manager:
135
137
 
136
138
 
137
139
  def create_task(
138
- name: str,
140
+ name: str | None = None,
139
141
  *,
140
142
  action: "Action | str | None" = None,
141
143
  targets: list["Path | str"] | None = None,
@@ -147,7 +149,8 @@ def create_task(
147
149
  Create a new task.
148
150
 
149
151
  Args:
150
- name: Name of the new task.
152
+ name: Name of the new task. Defaults to the string representation of the first
153
+ dependency if not provided.
151
154
  action: Action to execute or a string for shell commands.
152
155
  targets: Paths for files to be generated.
153
156
  dependencies: Paths to files on which this task depends.
cook/task.py CHANGED
@@ -1,13 +1,21 @@
1
1
  from __future__ import annotations
2
- import colorama
2
+
3
+ import asyncio
4
+ import inspect
5
+ import logging
3
6
  from pathlib import Path
4
7
  from typing import TYPE_CHECKING
5
- from . import util
6
8
 
9
+ import colorama
10
+
11
+ from . import util
7
12
 
8
13
  if TYPE_CHECKING:
9
- from .util import PathOrStr
10
14
  from .actions import Action
15
+ from .util import PathOrStr
16
+
17
+
18
+ LOGGER = logging.getLogger(__name__)
11
19
 
12
20
 
13
21
  class Task:
@@ -17,7 +25,7 @@ class Task:
17
25
 
18
26
  def __init__(
19
27
  self,
20
- name: str,
28
+ name: str | None = None,
21
29
  *,
22
30
  dependencies: list["PathOrStr | Task"] | None = None,
23
31
  targets: list["PathOrStr"] | None = None,
@@ -25,16 +33,31 @@ class Task:
25
33
  task_dependencies: list[Task] | None = None,
26
34
  location: tuple[str, int] | None = None,
27
35
  ) -> None:
28
- self.name = name
29
36
  self.dependencies = dependencies or []
30
37
  self.targets = [Path(path) for path in (targets or [])]
38
+ if name is None:
39
+ if not self.targets:
40
+ raise ValueError("'name' is required if there are no targets.")
41
+ name = str(self.targets[0])
42
+ self.name = name
31
43
  self.action = action
32
44
  self.task_dependencies = task_dependencies or []
33
45
  self.location = location or util.get_location()
34
46
 
35
- def execute(self, stop: util.StopEvent | None = None) -> None:
47
+ async def execute(self) -> None:
36
48
  if self.action:
37
- self.action.execute(self, stop)
49
+ # Check if the action's execute method is actually async
50
+ # This handles custom actions that may have implemented sync execute()
51
+ if inspect.iscoroutinefunction(self.action.execute):
52
+ await self.action.execute(self)
53
+ else:
54
+ # User implemented old-style sync execute() - run in executor with warning
55
+ LOGGER.warning(
56
+ f"{self.action.__class__.__name__} implements sync execute(); "
57
+ "please update to async def execute() for better performance"
58
+ )
59
+ loop = asyncio.get_running_loop()
60
+ await loop.run_in_executor(None, self.action.execute, self)
38
61
 
39
62
  def __hash__(self) -> int:
40
63
  return hash(self.name)
cook/util.py CHANGED
@@ -1,15 +1,14 @@
1
1
  from __future__ import annotations
2
+
2
3
  import contextlib
3
- from datetime import datetime, timedelta
4
4
  import hashlib
5
5
  import inspect
6
6
  import os
7
+ from datetime import datetime, timedelta
7
8
  from pathlib import Path
8
- import threading
9
9
  from time import time
10
10
  from typing import TYPE_CHECKING, Generator
11
11
 
12
-
13
12
  if TYPE_CHECKING:
14
13
  from .task import Task
15
14
 
@@ -84,16 +83,6 @@ def get_location() -> tuple[Path, int]:
84
83
  return Path(frame.f_code.co_filename).resolve(), frame.f_lineno
85
84
 
86
85
 
87
- class StopEvent(threading.Event):
88
- """
89
- Event used for stopping execution with a polling interval.
90
- """
91
-
92
- def __init__(self, interval: float = 1) -> None:
93
- super().__init__()
94
- self.interval = interval
95
-
96
-
97
86
  def format_timedelta(delta: timedelta) -> str:
98
87
  """
99
88
  Format a time difference.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: cook-build
3
- Version: 0.6.4
3
+ Version: 0.7.1
4
4
  Summary: A task-centric build system with simple declarative recipes specified in Python
5
5
  Author: Till Hoffmann
6
6
  License: BSD-3-Clause
@@ -0,0 +1,14 @@
1
+ cook/__init__.py,sha256=SCa9_i6B84IzSAwq0wnSQqvycyL4dvTO7dRIysJXZj4,179
2
+ cook/__main__.py,sha256=4sO22TsNTt3oirT71dJjZJwmynsRyUGT-CHJNlDgCkk,13242
3
+ cook/actions.py,sha256=1VLyWL-pACuWpAjIyNWPBeu6wlKkoqi6t3lHr6hV0EQ,7399
4
+ cook/contexts.py,sha256=AMKO7Uz-nI52OsdPQ_zCLXjOf77V4pgzvDyj6-N8Ods,10705
5
+ cook/controller.py,sha256=vpF3QhiM3HImaI0rHJ58T8i5AQi5_P4Z2p42qFvG1po,13426
6
+ cook/manager.py,sha256=Y-QGVw9x8ZpSBMRALJIHgcMmIZulAV9KxwtBJz35Gpw,6241
7
+ cook/task.py,sha256=-LNMwHdFlTUG45DF_QyWlIHw55_n_u2Xf5beKB7LrdY,2308
8
+ cook/util.py,sha256=15MMG07CYZZ-YdFE_2jzRRTaqHMsw83UFg0s7e72MhI,2435
9
+ cook_build-0.7.1.dist-info/licenses/LICENSE,sha256=3Nuj_WTTcz7JDg4-9EzNf6vHlKRWpdLUccg-pvoZ3WE,1500
10
+ cook_build-0.7.1.dist-info/METADATA,sha256=8CK_rxir23FBjEo5H3IhxB1tkzXlMrolpAuhwqYLP4g,4586
11
+ cook_build-0.7.1.dist-info/WHEEL,sha256=qELbo2s1Yzl39ZmrAibXA2jjPLUYfnVhUNTlyF1rq0Y,92
12
+ cook_build-0.7.1.dist-info/entry_points.txt,sha256=5UP0ZmmxSNKevTVISUJxmdXEQsKrI4n54OQYkjrdX2c,48
13
+ cook_build-0.7.1.dist-info/top_level.txt,sha256=ewNQIn2oRSYV98vAsUnw88u2Q8XHKhAz70ed2PEdR2c,5
14
+ cook_build-0.7.1.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (80.9.0)
2
+ Generator: setuptools (80.10.1)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -1,14 +0,0 @@
1
- cook/__init__.py,sha256=uNRvyxT6-XuoAMdDujWYa_4ZW0ppAiom6tsxMovZ-A0,180
2
- cook/__main__.py,sha256=s0OlqS5mjynGclvczksty2Cf8JgOGAdlBDmf2-2_D4g,13192
3
- cook/actions.py,sha256=erjjDKC45kQpv_Qb2V3OCGvrrphka4Ytj78RPEvs8Uc,6718
4
- cook/contexts.py,sha256=e3bddOaR8OLhPh38-U5b-u9D83NXlbQU0Hn7UvYaITI,10717
5
- cook/controller.py,sha256=g8UZS0lSGowO_lYXuF7BLxsAsmSCVtWNsJnWzLBCHdc,17376
6
- cook/manager.py,sha256=FAy8CKIMOx5liYNnaDTgnC5YZm1aqWy3pCIGiP8Hv2w,6118
7
- cook/task.py,sha256=ioPuQ8jp09BY9VhKn2cC6Q8G17W-1e4q4kj2GD4Zs8k,1388
8
- cook/util.py,sha256=hpMxxHnmJRfCUUAzkwMQU4kF_JAcEBFVpvPsHxOOY2U,2681
9
- cook_build-0.6.4.dist-info/licenses/LICENSE,sha256=3Nuj_WTTcz7JDg4-9EzNf6vHlKRWpdLUccg-pvoZ3WE,1500
10
- cook_build-0.6.4.dist-info/METADATA,sha256=_BpZNGF-uitiE4kVGjYnwbmyPMoY8KNy0djiXQNrl5I,4586
11
- cook_build-0.6.4.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
12
- cook_build-0.6.4.dist-info/entry_points.txt,sha256=5UP0ZmmxSNKevTVISUJxmdXEQsKrI4n54OQYkjrdX2c,48
13
- cook_build-0.6.4.dist-info/top_level.txt,sha256=ewNQIn2oRSYV98vAsUnw88u2Q8XHKhAz70ed2PEdR2c,5
14
- cook_build-0.6.4.dist-info/RECORD,,