tasktree 0.0.8__py3-none-any.whl → 0.0.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
tasktree/cli.py CHANGED
@@ -26,6 +26,41 @@ app = typer.Typer(
26
26
  console = Console()
27
27
 
28
28
 
29
+ def _format_task_arguments(arg_specs: list[str | dict]) -> str:
30
+ """Format task arguments for display in list output.
31
+
32
+ Args:
33
+ arg_specs: List of argument specifications from task definition (strings or dicts)
34
+
35
+ Returns:
36
+ Formatted string showing arguments with types and defaults
37
+
38
+ Examples:
39
+ ["mode", "target"] -> "mode:str target:str"
40
+ ["mode=debug", "target=x86_64"] -> "mode:str [=debug] target:str [=x86_64]"
41
+ ["port:int", "debug:bool=false"] -> "port:int debug:bool [=false]"
42
+ [{"timeout": {"type": "int", "default": 30}}] -> "timeout:int [=30]"
43
+ """
44
+ if not arg_specs:
45
+ return ""
46
+
47
+ formatted_parts = []
48
+ for spec_str in arg_specs:
49
+ parsed = parse_arg_spec(spec_str)
50
+
51
+ # Format: name:type or name:type [=default]
52
+ # Argument names in normal intensity, types and defaults in dim
53
+ arg_part = f"{parsed.name}[dim]:{parsed.arg_type}[/dim]"
54
+
55
+ if parsed.default is not None:
56
+ # Use dim styling for the default value part
57
+ arg_part += f" [dim]\\[={parsed.default}][/dim]"
58
+
59
+ formatted_parts.append(arg_part)
60
+
61
+ return " ".join(formatted_parts)
62
+
63
+
29
64
  def _list_tasks(tasks_file: Optional[str] = None):
30
65
  """List all available tasks with descriptions."""
31
66
  recipe = _get_recipe(tasks_file)
@@ -33,14 +68,27 @@ def _list_tasks(tasks_file: Optional[str] = None):
33
68
  console.print("[red]No recipe file found (tasktree.yaml, tasktree.yml, tt.yaml, or *.tasks)[/red]")
34
69
  raise typer.Exit(1)
35
70
 
36
- table = Table(title="Available Tasks")
37
- table.add_column("Task", style="cyan", no_wrap=True)
38
- table.add_column("Description", style="white")
71
+ # Calculate maximum task name length for fixed-width column
72
+ max_task_name_len = max(len(name) for name in recipe.task_names()) if recipe.task_names() else 0
73
+
74
+ # Create borderless table with three columns
75
+ table = Table(show_edge=False, show_header=False, box=None, padding=(0, 2))
76
+
77
+ # Command column: fixed width to accommodate longest task name
78
+ table.add_column("Command", style="bold cyan", no_wrap=True, width=max_task_name_len)
79
+
80
+ # Arguments column: allow wrapping with sensible max width
81
+ table.add_column("Arguments", style="white", max_width=60)
82
+
83
+ # Description column: allow wrapping with sensible max width
84
+ table.add_column("Description", style="white", max_width=80)
39
85
 
40
86
  for task_name in sorted(recipe.task_names()):
41
87
  task = recipe.get_task(task_name)
42
88
  desc = task.desc if task else ""
43
- table.add_row(task_name, desc)
89
+ args_formatted = _format_task_arguments(task.args) if task else ""
90
+
91
+ table.add_row(task_name, args_formatted, desc)
44
92
 
45
93
  console.print(table)
46
94
 
@@ -351,9 +399,9 @@ def _execute_dynamic_task(args: list[str], force: bool = False, only: bool = Fal
351
399
  state.load()
352
400
  executor = Executor(recipe, state)
353
401
 
354
- # Prune state before execution (compute hashes with effective environment)
402
+ # Prune state before execution (compute hashes with effective environment and dependencies)
355
403
  valid_hashes = {
356
- hash_task(t.cmd, t.outputs, t.working_dir, t.args, executor._get_effective_env_name(t))
404
+ hash_task(t.cmd, t.outputs, t.working_dir, t.args, executor._get_effective_env_name(t), t.deps)
357
405
  for t in recipe.tasks.values()
358
406
  }
359
407
  state.prune(valid_hashes)
tasktree/docker.py CHANGED
@@ -87,16 +87,24 @@ class DockerManager:
87
87
 
88
88
  # Build the image
89
89
  try:
90
+ docker_build_cmd = [
91
+ "docker",
92
+ "build",
93
+ "-t",
94
+ image_tag,
95
+ "-f",
96
+ str(dockerfile_path),
97
+ ]
98
+
99
+ # Add build args if environment has them (docker environments use dict for args)
100
+ if isinstance(env.args, dict):
101
+ for arg_name, arg_value in env.args.items():
102
+ docker_build_cmd.extend(["--build-arg", f"{arg_name}={arg_value}"])
103
+
104
+ docker_build_cmd.append(str(context_path))
105
+
90
106
  subprocess.run(
91
- [
92
- "docker",
93
- "build",
94
- "-t",
95
- image_tag,
96
- "-f",
97
- str(dockerfile_path),
98
- str(context_path),
99
- ],
107
+ docker_build_cmd,
100
108
  check=True,
101
109
  capture_output=False, # Show build output to user
102
110
  )
tasktree/executor.py CHANGED
@@ -64,6 +64,62 @@ class Executor:
64
64
  self.state = state_manager
65
65
  self.docker_manager = docker_module.DockerManager(recipe.project_root)
66
66
 
67
+ def _has_regular_args(self, task: Task) -> bool:
68
+ """Check if a task has any regular (non-exported) arguments.
69
+
70
+ Args:
71
+ task: Task to check
72
+
73
+ Returns:
74
+ True if task has at least one regular (non-exported) argument, False otherwise
75
+ """
76
+ if not task.args:
77
+ return False
78
+
79
+ # Check if any arg is not exported (doesn't start with $)
80
+ for arg_spec in task.args:
81
+ # Handle both string and dict arg specs
82
+ if isinstance(arg_spec, str):
83
+ # Remove default value part if present
84
+ arg_name = arg_spec.split('=')[0].split(':')[0].strip()
85
+ if not arg_name.startswith('$'):
86
+ return True
87
+ elif isinstance(arg_spec, dict):
88
+ # Dict format: { argname: { ... } } or { $argname: { ... } }
89
+ for key in arg_spec.keys():
90
+ if not key.startswith('$'):
91
+ return True
92
+
93
+ return False
94
+
95
+ def _filter_regular_args(self, task: Task, task_args: dict[str, Any]) -> dict[str, Any]:
96
+ """Filter task_args to only include regular (non-exported) arguments.
97
+
98
+ Args:
99
+ task: Task definition
100
+ task_args: Dictionary of all task arguments
101
+
102
+ Returns:
103
+ Dictionary containing only regular (non-exported) arguments
104
+ """
105
+ if not task.args or not task_args:
106
+ return {}
107
+
108
+ # Build set of exported arg names (without the $ prefix)
109
+ exported_names = set()
110
+ for arg_spec in task.args:
111
+ if isinstance(arg_spec, str):
112
+ arg_name = arg_spec.split('=')[0].split(':')[0].strip()
113
+ if arg_name.startswith('$'):
114
+ exported_names.add(arg_name[1:]) # Remove $ prefix
115
+ elif isinstance(arg_spec, dict):
116
+ for key in arg_spec.keys():
117
+ if key.startswith('$'):
118
+ exported_names.add(key[1:]) # Remove $ prefix
119
+
120
+ # Filter out exported args
121
+ return {k: v for k, v in task_args.items() if k not in exported_names}
122
+
67
123
  def _collect_early_builtin_variables(self, task: Task, timestamp: datetime) -> dict[str, str]:
68
124
  """Collect built-in variables that don't depend on working_dir.
69
125
 
@@ -277,9 +333,9 @@ class Executor:
277
333
  reason="forced",
278
334
  )
279
335
 
280
- # Compute hashes (include effective environment)
336
+ # Compute hashes (include effective environment and dependencies)
281
337
  effective_env = self._get_effective_env_name(task)
282
- task_hash = hash_task(task.cmd, task.outputs, task.working_dir, task.args, effective_env)
338
+ task_hash = hash_task(task.cmd, task.outputs, task.working_dir, task.args, effective_env, task.deps)
283
339
  args_hash = hash_args(args_dict) if args_dict else None
284
340
  cache_key = make_cache_key(task_hash, args_hash)
285
341
 
@@ -372,22 +428,39 @@ class Executor:
372
428
  # Resolve execution order
373
429
  if only:
374
430
  # Only execute the target task, skip dependencies
375
- execution_order = [task_name]
431
+ execution_order = [(task_name, args_dict)]
376
432
  else:
377
433
  # Execute task and all dependencies
378
- execution_order = resolve_execution_order(self.recipe, task_name)
434
+ execution_order = resolve_execution_order(self.recipe, task_name, args_dict)
379
435
 
380
436
  # Single phase: Check and execute incrementally
381
437
  statuses: dict[str, TaskStatus] = {}
382
- for name in execution_order:
438
+ for name, task_args in execution_order:
383
439
  task = self.recipe.tasks[name]
384
440
 
385
- # Determine task-specific args (only for target task)
386
- task_args = args_dict if name == task_name else {}
441
+ # Convert None to {} for internal use (None is used to distinguish simple deps in graph)
442
+ args_dict_for_execution = task_args if task_args is not None else {}
387
443
 
388
444
  # Check if task needs to run (based on CURRENT filesystem state)
389
- status = self.check_task_status(task, task_args, force=force)
390
- statuses[name] = status
445
+ status = self.check_task_status(task, args_dict_for_execution, force=force)
446
+
447
+ # Use a key that includes args for status tracking
448
+ # Only include regular (non-exported) args in status key for parameterized dependencies
449
+ # For the root task (invoked from CLI), status key is always just the task name
450
+ # For dependencies with parameterized invocations, include the regular args
451
+ is_root_task = (name == task_name)
452
+ if not is_root_task and args_dict_for_execution and self._has_regular_args(task):
453
+ import json
454
+ # Filter to only include regular (non-exported) args
455
+ regular_args = self._filter_regular_args(task, args_dict_for_execution)
456
+ if regular_args:
457
+ args_str = json.dumps(regular_args, sort_keys=True, separators=(",", ":"))
458
+ status_key = f"{name}({args_str})"
459
+ else:
460
+ status_key = name
461
+ else:
462
+ status_key = name
463
+ statuses[status_key] = status
391
464
 
392
465
  # Execute immediately if needed
393
466
  if status.will_run:
@@ -399,7 +472,7 @@ class Executor:
399
472
  file=sys.stderr,
400
473
  )
401
474
 
402
- self._run_task(task, task_args)
475
+ self._run_task(task, args_dict_for_execution)
403
476
 
404
477
  return statuses
405
478
 
@@ -962,9 +1035,9 @@ class Executor:
962
1035
  task: Task that was executed
963
1036
  args_dict: Arguments used for execution
964
1037
  """
965
- # Compute hashes (include effective environment)
1038
+ # Compute hashes (include effective environment and dependencies)
966
1039
  effective_env = self._get_effective_env_name(task)
967
- task_hash = hash_task(task.cmd, task.outputs, task.working_dir, task.args, effective_env)
1040
+ task_hash = hash_task(task.cmd, task.outputs, task.working_dir, task.args, effective_env, task.deps)
968
1041
  args_hash = hash_args(args_dict) if args_dict else None
969
1042
  cache_key = make_cache_key(task_hash, args_hash)
970
1043
 
tasktree/graph.py CHANGED
@@ -2,8 +2,10 @@
2
2
 
3
3
  from graphlib import TopologicalSorter
4
4
  from pathlib import Path
5
+ from typing import Any
5
6
 
6
- from tasktree.parser import Recipe, Task
7
+ from tasktree.hasher import hash_args
8
+ from tasktree.parser import Recipe, Task, DependencyInvocation, parse_dependency_spec
7
9
 
8
10
 
9
11
  class CycleError(Exception):
@@ -18,15 +20,61 @@ class TaskNotFoundError(Exception):
18
20
  pass
19
21
 
20
22
 
21
- def resolve_execution_order(recipe: Recipe, target_task: str) -> list[str]:
23
+ class TaskNode:
24
+ """Represents a node in the dependency graph (task + arguments).
25
+
26
+ Each node represents a unique invocation of a task with specific arguments.
27
+ Tasks invoked with different arguments are considered different nodes.
28
+ """
29
+
30
+ def __init__(self, task_name: str, args: dict[str, Any] | None = None):
31
+ self.task_name = task_name
32
+ self.args = args # Keep None as None
33
+
34
+ def __hash__(self):
35
+ """Hash based on task name and sorted args."""
36
+ # Treat None and {} as equivalent for hashing
37
+ if not self.args:
38
+ return hash(self.task_name)
39
+ args_hash = hash_args(self.args)
40
+ return hash((self.task_name, args_hash))
41
+
42
+ def __eq__(self, other):
43
+ """Equality based on task name and args."""
44
+ if not isinstance(other, TaskNode):
45
+ return False
46
+ # Treat None and {} as equivalent
47
+ self_args = self.args if self.args else {}
48
+ other_args = other.args if other.args else {}
49
+ return self.task_name == other.task_name and self_args == other_args
50
+
51
+ def __repr__(self):
52
+ if not self.args:
53
+ return f"TaskNode({self.task_name})"
54
+ args_str = ", ".join(f"{k}={v}" for k, v in sorted(self.args.items()))
55
+ return f"TaskNode({self.task_name}, {{{args_str}}})"
56
+
57
+ def __str__(self):
58
+ if not self.args:
59
+ return self.task_name
60
+ args_str = ", ".join(f"{k}={v}" for k, v in sorted(self.args.items()))
61
+ return f"{self.task_name}({args_str})"
62
+
63
+
64
+ def resolve_execution_order(
65
+ recipe: Recipe,
66
+ target_task: str,
67
+ target_args: dict[str, Any] | None = None
68
+ ) -> list[tuple[str, dict[str, Any]]]:
22
69
  """Resolve execution order for a task and its dependencies.
23
70
 
24
71
  Args:
25
72
  recipe: Parsed recipe containing all tasks
26
73
  target_task: Name of the task to execute
74
+ target_args: Arguments for the target task (optional)
27
75
 
28
76
  Returns:
29
- List of task names in execution order (dependencies first)
77
+ List of (task_name, args_dict) tuples in execution order (dependencies first)
30
78
 
31
79
  Raises:
32
80
  TaskNotFoundError: If target task or any dependency doesn't exist
@@ -35,33 +83,61 @@ def resolve_execution_order(recipe: Recipe, target_task: str) -> list[str]:
35
83
  if target_task not in recipe.tasks:
36
84
  raise TaskNotFoundError(f"Task not found: {target_task}")
37
85
 
38
- # Build dependency graph
39
- graph: dict[str, set[str]] = {}
86
+ # Build dependency graph using TaskNode objects
87
+ graph: dict[TaskNode, set[TaskNode]] = {}
88
+
89
+ # Track seen nodes to detect duplicates
90
+ seen_invocations: dict[tuple[str, str], TaskNode] = {} # (task_name, args_hash) -> node
40
91
 
41
- def build_graph(task_name: str) -> None:
92
+ def get_or_create_node(task_name: str, args: dict[str, Any] | None) -> TaskNode:
93
+ """Get existing node or create new one for this invocation."""
94
+ args_hash = hash_args(args) if args else ""
95
+ key = (task_name, args_hash)
96
+
97
+ if key not in seen_invocations:
98
+ seen_invocations[key] = TaskNode(task_name, args)
99
+ return seen_invocations[key]
100
+
101
+ def build_graph(node: TaskNode) -> None:
42
102
  """Recursively build dependency graph."""
43
- if task_name in graph:
103
+ if node in graph:
44
104
  # Already processed
45
105
  return
46
106
 
47
- task = recipe.tasks.get(task_name)
107
+ task = recipe.tasks.get(node.task_name)
48
108
  if task is None:
49
- raise TaskNotFoundError(f"Task not found: {task_name}")
109
+ raise TaskNotFoundError(f"Task not found: {node.task_name}")
50
110
 
51
- # Add task to graph with its dependencies
52
- graph[task_name] = set(task.deps)
111
+ # Parse and normalize dependencies
112
+ dep_nodes = set()
113
+ for dep_spec in task.deps:
114
+ # Parse dependency specification
115
+ dep_inv = parse_dependency_spec(dep_spec, recipe)
116
+
117
+ # Create or get node for this dependency invocation
118
+ dep_node = get_or_create_node(dep_inv.task_name, dep_inv.args)
119
+ dep_nodes.add(dep_node)
120
+
121
+ # Add task to graph with its dependency nodes
122
+ graph[node] = dep_nodes
53
123
 
54
124
  # Recursively process dependencies
55
- for dep in task.deps:
56
- build_graph(dep)
125
+ for dep_node in dep_nodes:
126
+ build_graph(dep_node)
127
+
128
+ # Create root node for target task
129
+ root_node = get_or_create_node(target_task, target_args)
57
130
 
58
131
  # Build graph starting from target task
59
- build_graph(target_task)
132
+ build_graph(root_node)
60
133
 
61
134
  # Use TopologicalSorter to resolve execution order
62
135
  try:
63
136
  sorter = TopologicalSorter(graph)
64
- return list(sorter.static_order())
137
+ ordered_nodes = list(sorter.static_order())
138
+
139
+ # Convert TaskNode objects to (task_name, args_dict) tuples
140
+ return [(node.task_name, node.args) for node in ordered_nodes]
65
141
  except ValueError as e:
66
142
  raise CycleError(f"Dependency cycle detected: {e}")
67
143
 
@@ -87,8 +163,10 @@ def get_implicit_inputs(recipe: Recipe, task: Task) -> list[str]:
87
163
  implicit_inputs = []
88
164
 
89
165
  # Inherit from dependencies
90
- for dep_name in task.deps:
91
- dep_task = recipe.tasks.get(dep_name)
166
+ for dep_spec in task.deps:
167
+ # Parse dependency to get task name (ignore args for input inheritance)
168
+ dep_inv = parse_dependency_spec(dep_spec, recipe)
169
+ dep_task = recipe.tasks.get(dep_inv.task_name)
92
170
  if dep_task is None:
93
171
  continue
94
172
 
@@ -125,7 +203,7 @@ def get_implicit_inputs(recipe: Recipe, task: Task) -> list[str]:
125
203
  return implicit_inputs
126
204
 
127
205
 
128
- def build_dependency_tree(recipe: Recipe, target_task: str) -> dict:
206
+ def build_dependency_tree(recipe: Recipe, target_task: str, target_args: dict[str, Any] | None = None) -> dict:
129
207
  """Build a tree structure representing dependencies for visualization.
130
208
 
131
209
  Note: This builds a true tree representation where shared dependencies may
@@ -135,6 +213,7 @@ def build_dependency_tree(recipe: Recipe, target_task: str) -> dict:
135
213
  Args:
136
214
  recipe: Parsed recipe containing all tasks
137
215
  target_task: Name of the task to build tree for
216
+ target_args: Arguments for the target task (optional)
138
217
 
139
218
  Returns:
140
219
  Nested dictionary representing the dependency tree
@@ -144,25 +223,44 @@ def build_dependency_tree(recipe: Recipe, target_task: str) -> dict:
144
223
 
145
224
  current_path = set() # Track current recursion path for cycle detection
146
225
 
147
- def build_tree(task_name: str) -> dict:
226
+ def build_tree(task_name: str, args: dict[str, Any] | None) -> dict:
148
227
  """Recursively build dependency tree."""
149
228
  task = recipe.tasks.get(task_name)
150
229
  if task is None:
151
230
  raise TaskNotFoundError(f"Task not found: {task_name}")
152
231
 
232
+ # Create node identifier for cycle detection
233
+ from tasktree.hasher import hash_args
234
+ args_dict = args or {}
235
+ node_id = (task_name, hash_args(args_dict) if args_dict else "")
236
+
153
237
  # Detect cycles in current recursion path
154
- if task_name in current_path:
155
- return {"name": task_name, "deps": [], "cycle": True}
238
+ if node_id in current_path:
239
+ display_name = task_name if not args_dict else f"{task_name}({', '.join(f'{k}={v}' for k, v in sorted(args_dict.items()))})"
240
+ return {"name": display_name, "deps": [], "cycle": True}
241
+
242
+ current_path.add(node_id)
243
+
244
+ # Parse dependencies
245
+ dep_trees = []
246
+ for dep_spec in task.deps:
247
+ dep_inv = parse_dependency_spec(dep_spec, recipe)
248
+ dep_tree = build_tree(dep_inv.task_name, dep_inv.args)
249
+ dep_trees.append(dep_tree)
156
250
 
157
- current_path.add(task_name)
251
+ # Create display name (include args if present)
252
+ display_name = task_name
253
+ if args_dict:
254
+ args_str = ", ".join(f"{k}={v}" for k, v in sorted(args_dict.items()))
255
+ display_name = f"{task_name}({args_str})"
158
256
 
159
257
  tree = {
160
- "name": task_name,
161
- "deps": [build_tree(dep) for dep in task.deps],
258
+ "name": display_name,
259
+ "deps": dep_trees,
162
260
  }
163
261
 
164
- current_path.remove(task_name)
262
+ current_path.remove(node_id)
165
263
 
166
264
  return tree
167
265
 
168
- return build_tree(target_task)
266
+ return build_tree(target_task, target_args)
tasktree/hasher.py CHANGED
@@ -37,7 +37,27 @@ def _normalize_choices_lists(args: list[str | dict[str, Any]]) -> list[str | di
37
37
  return normalized_args
38
38
 
39
39
 
40
- def hash_task(cmd: str, outputs: list[str], working_dir: str, args: list[str | dict[str, Any]], env: str = "") -> str:
40
+ def hash_task(
41
+ cmd: str,
42
+ outputs: list[str],
43
+ working_dir: str,
44
+ args: list[str | dict[str, Any]],
45
+ env: str = "",
46
+ deps: list[str | dict[str, Any]] | None = None
47
+ ) -> str:
48
+ """Hash task definition including dependencies.
49
+
50
+ Args:
51
+ cmd: Task command
52
+ outputs: Task outputs
53
+ working_dir: Working directory
54
+ args: Task argument specifications
55
+ env: Environment name
56
+ deps: Dependency specifications (optional, for dependency hash)
57
+
58
+ Returns:
59
+ 8-character hash of task definition
60
+ """
41
61
  data = {
42
62
  "cmd": cmd,
43
63
  "outputs": sorted(outputs),
@@ -46,6 +66,23 @@ def hash_task(cmd: str, outputs: list[str], working_dir: str, args: list[str | d
46
66
  "env": env,
47
67
  }
48
68
 
69
+ # Include dependency invocation signatures if provided
70
+ if deps is not None:
71
+ # Normalize deps for hashing using JSON serialization for consistency
72
+ normalized_deps = []
73
+ for dep in deps:
74
+ if isinstance(dep, str):
75
+ # Simple string dependency
76
+ normalized_deps.append(dep)
77
+ elif isinstance(dep, dict):
78
+ # Dict dependency with args - normalize to canonical form
79
+ # Sort the dict to ensure consistent hashing
80
+ normalized_deps.append(dict(sorted(dep.items())))
81
+ else:
82
+ normalized_deps.append(dep)
83
+ # Sort using JSON serialization for consistent ordering
84
+ data["deps"] = sorted(normalized_deps, key=lambda x: json.dumps(x, sort_keys=True) if isinstance(x, dict) else x)
85
+
49
86
  serialized = json.dumps(data, sort_keys=True, separators=(",", ":"))
50
87
  return hashlib.sha256(serialized.encode()).hexdigest()[:8]
51
88
 
@@ -67,9 +104,16 @@ def hash_environment_definition(env) -> str:
67
104
  # Import inside function to avoid circular dependency
68
105
  from tasktree.parser import Environment
69
106
 
107
+ # Handle args - can be list (shell args) or dict (docker build args)
108
+ args_value = env.args
109
+ if isinstance(env.args, dict):
110
+ args_value = dict(sorted(env.args.items())) # Sort dict for determinism
111
+ elif isinstance(env.args, list):
112
+ args_value = sorted(env.args) # Sort list for determinism
113
+
70
114
  data = {
71
115
  "shell": env.shell,
72
- "args": sorted(env.args), # Sort for determinism
116
+ "args": args_value,
73
117
  "preamble": env.preamble,
74
118
  "dockerfile": env.dockerfile,
75
119
  "context": env.context,
tasktree/parser.py CHANGED
@@ -31,7 +31,7 @@ class Environment:
31
31
 
32
32
  name: str
33
33
  shell: str = "" # Path to shell (required for shell envs, optional for Docker)
34
- args: list[str] = field(default_factory=list)
34
+ args: list[str] | dict[str, str] = field(default_factory=list) # Shell args (list) or Docker build args (dict)
35
35
  preamble: str = ""
36
36
  # Docker-specific fields (presence of dockerfile indicates Docker environment)
37
37
  dockerfile: str = "" # Path to Dockerfile
@@ -44,7 +44,7 @@ class Environment:
44
44
  run_as_root: bool = False # If True, skip user mapping (run as root in container)
45
45
 
46
46
  def __post_init__(self):
47
- """Ensure args is always a list."""
47
+ """Ensure args is in the correct format."""
48
48
  if isinstance(self.args, str):
49
49
  self.args = [self.args]
50
50
 
@@ -56,11 +56,11 @@ class Task:
56
56
  name: str
57
57
  cmd: str
58
58
  desc: str = ""
59
- deps: list[str] = field(default_factory=list)
59
+ deps: list[str | dict[str, Any]] = field(default_factory=list) # Can be strings or dicts with args
60
60
  inputs: list[str] = field(default_factory=list)
61
61
  outputs: list[str] = field(default_factory=list)
62
62
  working_dir: str = ""
63
- args: list[str] = field(default_factory=list)
63
+ args: list[str | dict[str, Any]] = field(default_factory=list) # Can be strings or dicts (each dict has single key: arg name)
64
64
  source_file: str = "" # Track which file defined this task
65
65
  env: str = "" # Environment name to use for execution
66
66
 
@@ -75,6 +75,38 @@ class Task:
75
75
  if isinstance(self.args, str):
76
76
  self.args = [self.args]
77
77
 
78
+ # Validate args is not a dict (common YAML mistake)
79
+ if isinstance(self.args, dict):
80
+ raise ValueError(
81
+ f"Task '{self.name}' has invalid 'args' syntax.\n\n"
82
+ f"Found dictionary syntax (without dashes):\n"
83
+ f" args:\n"
84
+ f" {list(self.args.keys())[0] if self.args else 'key'}: ...\n\n"
85
+ f"Correct syntax uses list format (with dashes):\n"
86
+ f" args:\n"
87
+ f" - {list(self.args.keys())[0] if self.args else 'key'}: ...\n\n"
88
+ f"Arguments must be defined as a list, not a dictionary."
89
+ )
90
+
91
+
92
+ @dataclass
93
+ class DependencyInvocation:
94
+ """Represents a task dependency invocation with optional arguments.
95
+
96
+ Attributes:
97
+ task_name: Name of the dependency task
98
+ args: Dictionary of argument names to values (None if no args specified)
99
+ """
100
+ task_name: str
101
+ args: dict[str, Any] | None = None
102
+
103
+ def __str__(self) -> str:
104
+ """String representation for display."""
105
+ if not self.args:
106
+ return self.task_name
107
+ args_str = ", ".join(f"{k}={v}" for k, v in self.args.items())
108
+ return f"{self.task_name}({args_str})"
109
+
78
110
 
79
111
  @dataclass
80
112
  class ArgSpec:
@@ -731,10 +763,16 @@ def _resolve_variable_value(
731
763
  # Validate and stringify the value
732
764
  string_value = validator.convert(raw_value, None, None)
733
765
 
766
+ # Convert to string (lowercase for booleans to match YAML/shell conventions)
767
+ if isinstance(string_value, bool):
768
+ string_value_str = str(string_value).lower()
769
+ else:
770
+ string_value_str = str(string_value)
771
+
734
772
  # Substitute any {{ var.name }} references in the string value
735
773
  from tasktree.substitution import substitute_variables
736
774
  try:
737
- resolved_value = substitute_variables(str(string_value), resolved)
775
+ resolved_value = substitute_variables(string_value_str, resolved)
738
776
  except ValueError as e:
739
777
  # Check if the undefined variable is in the resolution stack (circular reference)
740
778
  error_msg = str(e)
@@ -1117,18 +1155,40 @@ def _parse_file(
1117
1155
  # 2. It starts with a local import namespace (like "base.setup" when "base" is imported)
1118
1156
  rewritten_deps = []
1119
1157
  for dep in deps:
1120
- if "." not in dep:
1121
- # Simple name - always prefix
1122
- rewritten_deps.append(f"{namespace}.{dep}")
1123
- else:
1124
- # Check if it starts with a local import namespace
1125
- dep_root = dep.split(".", 1)[0]
1126
- if dep_root in local_import_namespaces:
1127
- # Local import reference - prefix it
1158
+ if isinstance(dep, str):
1159
+ # Simple string dependency
1160
+ if "." not in dep:
1161
+ # Simple name - always prefix
1128
1162
  rewritten_deps.append(f"{namespace}.{dep}")
1129
1163
  else:
1130
- # External reference - keep as-is
1131
- rewritten_deps.append(dep)
1164
+ # Check if it starts with a local import namespace
1165
+ dep_root = dep.split(".", 1)[0]
1166
+ if dep_root in local_import_namespaces:
1167
+ # Local import reference - prefix it
1168
+ rewritten_deps.append(f"{namespace}.{dep}")
1169
+ else:
1170
+ # External reference - keep as-is
1171
+ rewritten_deps.append(dep)
1172
+ elif isinstance(dep, dict):
1173
+ # Dict dependency with args - rewrite the task name key
1174
+ rewritten_dep = {}
1175
+ for task_name, args in dep.items():
1176
+ if "." not in task_name:
1177
+ # Simple name - prefix it
1178
+ rewritten_dep[f"{namespace}.{task_name}"] = args
1179
+ else:
1180
+ # Check if it starts with a local import namespace
1181
+ dep_root = task_name.split(".", 1)[0]
1182
+ if dep_root in local_import_namespaces:
1183
+ # Local import reference - prefix it
1184
+ rewritten_dep[f"{namespace}.{task_name}"] = args
1185
+ else:
1186
+ # External reference - keep as-is
1187
+ rewritten_dep[task_name] = args
1188
+ rewritten_deps.append(rewritten_dep)
1189
+ else:
1190
+ # Unknown type - keep as-is
1191
+ rewritten_deps.append(dep)
1132
1192
  deps = rewritten_deps
1133
1193
 
1134
1194
  task = Task(
@@ -1539,3 +1599,208 @@ def _parse_arg_dict(arg_name: str, config: dict, is_exported: bool) -> ArgSpec:
1539
1599
  max_val=max_val,
1540
1600
  choices=choices
1541
1601
  )
1602
+
1603
+
1604
+ def parse_dependency_spec(dep_spec: str | dict[str, Any], recipe: Recipe) -> DependencyInvocation:
1605
+ """Parse a dependency specification into a DependencyInvocation.
1606
+
1607
+ Supports three forms:
1608
+ 1. Simple string: "task_name" -> DependencyInvocation(task_name, None)
1609
+ 2. Positional args: {"task_name": [arg1, arg2]} -> DependencyInvocation(task_name, {name1: arg1, name2: arg2})
1610
+ 3. Named args: {"task_name": {arg1: val1}} -> DependencyInvocation(task_name, {arg1: val1})
1611
+
1612
+ Args:
1613
+ dep_spec: Dependency specification (string or dict)
1614
+ recipe: Recipe containing task definitions (for arg normalization)
1615
+
1616
+ Returns:
1617
+ DependencyInvocation object with normalized args
1618
+
1619
+ Raises:
1620
+ ValueError: If dependency specification is invalid
1621
+ """
1622
+ # Simple string case
1623
+ if isinstance(dep_spec, str):
1624
+ return DependencyInvocation(task_name=dep_spec, args=None)
1625
+
1626
+ # Dictionary case
1627
+ if not isinstance(dep_spec, dict):
1628
+ raise ValueError(
1629
+ f"Dependency must be a string or dictionary, got: {type(dep_spec).__name__}"
1630
+ )
1631
+
1632
+ # Validate dict has exactly one key
1633
+ if len(dep_spec) != 1:
1634
+ raise ValueError(
1635
+ f"Dependency dictionary must have exactly one key (the task name), got: {list(dep_spec.keys())}"
1636
+ )
1637
+
1638
+ task_name, arg_spec = next(iter(dep_spec.items()))
1639
+
1640
+ # Validate task name
1641
+ if not isinstance(task_name, str) or not task_name:
1642
+ raise ValueError(
1643
+ f"Dependency task name must be a non-empty string, got: {task_name!r}"
1644
+ )
1645
+
1646
+ # Check for empty list (explicitly disallowed)
1647
+ if isinstance(arg_spec, list) and len(arg_spec) == 0:
1648
+ raise ValueError(
1649
+ f"Empty argument list for dependency '{task_name}' is not allowed.\n"
1650
+ f"Use simple string form instead: '{task_name}'"
1651
+ )
1652
+
1653
+ # Positional args (list)
1654
+ if isinstance(arg_spec, list):
1655
+ return _parse_positional_dependency_args(task_name, arg_spec, recipe)
1656
+
1657
+ # Named args (dict)
1658
+ if isinstance(arg_spec, dict):
1659
+ return _parse_named_dependency_args(task_name, arg_spec, recipe)
1660
+
1661
+ # Invalid type
1662
+ raise ValueError(
1663
+ f"Dependency arguments for '{task_name}' must be a list (positional) or dict (named), "
1664
+ f"got: {type(arg_spec).__name__}"
1665
+ )
1666
+
1667
+
1668
+ def _get_validated_task(task_name: str, recipe: Recipe) -> Task:
1669
+ """Get and validate that a task exists in the recipe.
1670
+
1671
+ Args:
1672
+ task_name: Name of the task to retrieve
1673
+ recipe: Recipe containing task definitions
1674
+
1675
+ Returns:
1676
+ The validated Task object
1677
+
1678
+ Raises:
1679
+ ValueError: If task is not found
1680
+ """
1681
+ task = recipe.get_task(task_name)
1682
+ if task is None:
1683
+ raise ValueError(f"Dependency task not found: {task_name}")
1684
+ return task
1685
+
1686
+
1687
+ def _parse_positional_dependency_args(
1688
+ task_name: str, args_list: list[Any], recipe: Recipe
1689
+ ) -> DependencyInvocation:
1690
+ """Parse positional dependency arguments.
1691
+
1692
+ Args:
1693
+ task_name: Name of the dependency task
1694
+ args_list: List of positional argument values
1695
+ recipe: Recipe containing task definitions
1696
+
1697
+ Returns:
1698
+ DependencyInvocation with normalized named args
1699
+
1700
+ Raises:
1701
+ ValueError: If validation fails
1702
+ """
1703
+ # Get the task to validate against
1704
+ task = _get_validated_task(task_name, recipe)
1705
+
1706
+ # Parse task's arg specs
1707
+ if not task.args:
1708
+ raise ValueError(
1709
+ f"Task '{task_name}' takes no arguments, but {len(args_list)} were provided"
1710
+ )
1711
+
1712
+ parsed_specs = [parse_arg_spec(spec) for spec in task.args]
1713
+
1714
+ # Check positional count doesn't exceed task's arg count
1715
+ if len(args_list) > len(parsed_specs):
1716
+ raise ValueError(
1717
+ f"Task '{task_name}' takes {len(parsed_specs)} arguments, got {len(args_list)}"
1718
+ )
1719
+
1720
+ # Map positional args to names with type conversion
1721
+ args_dict = {}
1722
+ for i, value in enumerate(args_list):
1723
+ spec = parsed_specs[i]
1724
+ if isinstance(value, str):
1725
+ # Convert string values using type validator
1726
+ click_type = get_click_type(spec.arg_type, min_val=spec.min_val, max_val=spec.max_val)
1727
+ args_dict[spec.name] = click_type.convert(value, None, None)
1728
+ else:
1729
+ # Value is already typed (e.g., bool, int from YAML)
1730
+ args_dict[spec.name] = value
1731
+
1732
+ # Fill in defaults for remaining args
1733
+ for i in range(len(args_list), len(parsed_specs)):
1734
+ spec = parsed_specs[i]
1735
+ if spec.default is not None:
1736
+ # Defaults in task specs are always strings, convert them
1737
+ click_type = get_click_type(spec.arg_type, min_val=spec.min_val, max_val=spec.max_val)
1738
+ args_dict[spec.name] = click_type.convert(spec.default, None, None)
1739
+ else:
1740
+ raise ValueError(
1741
+ f"Task '{task_name}' requires argument '{spec.name}' (no default provided)"
1742
+ )
1743
+
1744
+ return DependencyInvocation(task_name=task_name, args=args_dict)
1745
+
1746
+
1747
+ def _parse_named_dependency_args(
1748
+ task_name: str, args_dict: dict[str, Any], recipe: Recipe
1749
+ ) -> DependencyInvocation:
1750
+ """Parse named dependency arguments.
1751
+
1752
+ Args:
1753
+ task_name: Name of the dependency task
1754
+ args_dict: Dictionary of argument names to values
1755
+ recipe: Recipe containing task definitions
1756
+
1757
+ Returns:
1758
+ DependencyInvocation with normalized args (defaults filled)
1759
+
1760
+ Raises:
1761
+ ValueError: If validation fails
1762
+ """
1763
+ # Get the task to validate against
1764
+ task = _get_validated_task(task_name, recipe)
1765
+
1766
+ # Parse task's arg specs
1767
+ if not task.args:
1768
+ if args_dict:
1769
+ raise ValueError(
1770
+ f"Task '{task_name}' takes no arguments, but {len(args_dict)} were provided"
1771
+ )
1772
+ return DependencyInvocation(task_name=task_name, args={})
1773
+
1774
+ parsed_specs = [parse_arg_spec(spec) for spec in task.args]
1775
+ spec_map = {spec.name: spec for spec in parsed_specs}
1776
+
1777
+ # Validate all provided arg names exist
1778
+ for arg_name in args_dict:
1779
+ if arg_name not in spec_map:
1780
+ raise ValueError(
1781
+ f"Task '{task_name}' has no argument named '{arg_name}'"
1782
+ )
1783
+
1784
+ # Build normalized args dict with defaults
1785
+ normalized_args = {}
1786
+ for spec in parsed_specs:
1787
+ if spec.name in args_dict:
1788
+ # Use provided value with type conversion (only convert strings)
1789
+ value = args_dict[spec.name]
1790
+ if isinstance(value, str):
1791
+ click_type = get_click_type(spec.arg_type, min_val=spec.min_val, max_val=spec.max_val)
1792
+ normalized_args[spec.name] = click_type.convert(value, None, None)
1793
+ else:
1794
+ # Value is already typed (e.g., bool, int from YAML)
1795
+ normalized_args[spec.name] = value
1796
+ elif spec.default is not None:
1797
+ # Use default value (defaults are always strings in task specs)
1798
+ click_type = get_click_type(spec.arg_type, min_val=spec.min_val, max_val=spec.max_val)
1799
+ normalized_args[spec.name] = click_type.convert(spec.default, None, None)
1800
+ else:
1801
+ # Required arg not provided
1802
+ raise ValueError(
1803
+ f"Task '{task_name}' requires argument '{spec.name}' (no default provided)"
1804
+ )
1805
+
1806
+ return DependencyInvocation(task_name=task_name, args=normalized_args)
tasktree/substitution.py CHANGED
@@ -87,8 +87,11 @@ def substitute_arguments(text: str, args: dict[str, Any], exported_args: set[str
87
87
  f"Required arguments must be provided."
88
88
  )
89
89
 
90
- # Convert to string
91
- return str(args[name])
90
+ # Convert to string (lowercase for booleans to match YAML/shell conventions)
91
+ value = args[name]
92
+ if isinstance(value, bool):
93
+ return str(value).lower()
94
+ return str(value)
92
95
 
93
96
  return PLACEHOLDER_PATTERN.sub(replace_match, text)
94
97
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: tasktree
3
- Version: 0.0.8
3
+ Version: 0.0.10
4
4
  Summary: A task automation tool with incremental execution
5
5
  Requires-Python: >=3.11
6
6
  Requires-Dist: click>=8.1.0
@@ -594,6 +594,97 @@ If an exported argument with a default isn't available as an environment variabl
594
594
  2. The CLI automatically applies defaults before execution
595
595
  3. You can explicitly provide the value: `tt deploy prod-server port=8080`
596
596
 
597
+ ### Parameterized Dependencies
598
+
599
+ Dependencies can invoke tasks with specific arguments, enabling flexible and reusable task graphs:
600
+
601
+ **Syntax:**
602
+
603
+ ```yaml
604
+ tasks:
605
+ # Task with parameters
606
+ process:
607
+ args: [mode, verbose=false]
608
+ cmd: echo "mode={{arg.mode}} verbose={{arg.verbose}}"
609
+
610
+ # Simple dependency (uses defaults)
611
+ consumer1:
612
+ deps: [process] # Equivalent to: process(mode must be provided)
613
+ cmd: echo "done"
614
+
615
+ # Positional arguments
616
+ consumer2:
617
+ deps:
618
+ - process: [debug, true] # Maps to: mode=debug, verbose=true
619
+ cmd: echo "done"
620
+
621
+ # Named arguments
622
+ consumer3:
623
+ deps:
624
+ - process: {mode: release, verbose: false}
625
+ cmd: echo "done"
626
+
627
+ # Multiple invocations with different args
628
+ multi_build:
629
+ deps:
630
+ - process: [debug]
631
+ - process: [release]
632
+ cmd: echo "All builds complete"
633
+ ```
634
+
635
+ **Key behaviors:**
636
+
637
+ - **Simple string form** (`- task_name`): Uses task defaults for all arguments. Required arguments must have defaults or task invocation fails.
638
+ - **Positional form** (`- task_name: [arg1, arg2]`): Arguments mapped by position. Can omit trailing args if they have defaults.
639
+ - **Named form** (`- task_name: {arg1: val1}`): Arguments mapped by name. Can omit any arg with a default.
640
+ - **Multiple invocations**: Same task with different arguments creates separate graph nodes, each executing independently.
641
+ - **Normalization**: All forms normalized to named arguments with defaults filled before execution.
642
+ - **Cache separation**: `process(debug)` and `process(release)` cache independently.
643
+
644
+ **Restrictions:**
645
+
646
+ - **No empty lists**: `- task: []` is invalid (use `- task` instead)
647
+ - **No mixed positional and named**: Choose one form per dependency
648
+ - **Single-key dicts**: `{task1: [x], task2: [y]}` is invalid (multi-key not allowed)
649
+
650
+ **Validation:**
651
+
652
+ Validation happens at graph construction time with clear error messages:
653
+
654
+ ```
655
+ Task 'process' takes 2 arguments, got 3
656
+ Task 'build' has no argument named 'mode'
657
+ Task 'deploy' requires argument 'environment' (no default provided)
658
+ ```
659
+
660
+ **Example use cases:**
661
+
662
+ ```yaml
663
+ tasks:
664
+ # Compile for different platforms
665
+ compile:
666
+ args: [target]
667
+ cmd: cargo build --target {{arg.target}}
668
+
669
+ dist:
670
+ deps:
671
+ - compile: [x86_64-unknown-linux-gnu]
672
+ - compile: [aarch64-unknown-linux-gnu]
673
+ cmd: tar czf dist.tar.gz target/*/release/app
674
+
675
+ # Run tests with different configurations
676
+ test:
677
+ args: [config]
678
+ cmd: pytest --config={{arg.config}}
679
+
680
+ ci:
681
+ deps:
682
+ - test: [unit]
683
+ - test: [integration]
684
+ - test: [e2e]
685
+ cmd: echo "All tests passed"
686
+ ```
687
+
597
688
  ## Environment Variables
598
689
 
599
690
  Task Tree supports reading environment variables in two ways:
@@ -0,0 +1,15 @@
1
+ tasktree/__init__.py,sha256=MVmdvKb3JdqLlo0x2_TPGMfgFC0HsDnP79HAzGnFnjI,1081
2
+ tasktree/cli.py,sha256=H5T8wOxLBGx-ZTQEnkoJrX3srgD5b_7BLf1IWl18M2M,17597
3
+ tasktree/docker.py,sha256=qvja8G63uAcC73YMVY739egda1_CcBtoqzm0qIJU_Q8,14443
4
+ tasktree/executor.py,sha256=Q7Bks5B88i-IyZDpxGSps9MM3uflz0U3yn4Rtq_uHMM,42266
5
+ tasktree/graph.py,sha256=oXLxX0Ix4zSkVBg8_3x9K7WxSFpg136sp4MF-d2mDEQ,9682
6
+ tasktree/hasher.py,sha256=0GrnCfwAXnwq_kpnHFFb12B5_2VFNXx6Ng7hTdcCyXo,4415
7
+ tasktree/parser.py,sha256=N_dXHl5UF0rBvIVdbsVZOo5Ur5uFEKgY1sSPNfScTxc,67135
8
+ tasktree/state.py,sha256=Cktl4D8iDZVd55aO2LqVyPrc-BnljkesxxkcMcdcfOY,3541
9
+ tasktree/substitution.py,sha256=M_qcP0NKJATrKcNShSqHJatneuth1RVwTk1ci8-ZuxQ,6473
10
+ tasktree/tasks.py,sha256=2QdQZtJAX2rSGbyXKG1z9VF_siz1DUzdvzCgPkykxtU,173
11
+ tasktree/types.py,sha256=R_YAyO5bMLB6XZnkMRT7VAtlkA_Xx6xu0aIpzQjrBXs,4357
12
+ tasktree-0.0.10.dist-info/METADATA,sha256=JyhF89pfUwr0bkV33rNae-0ytPyOEr2bKHfiWN-YsK0,37124
13
+ tasktree-0.0.10.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
14
+ tasktree-0.0.10.dist-info/entry_points.txt,sha256=lQINlvRYnimvteBbnhH84A9clTg8NnpEjCWqWkqg8KE,40
15
+ tasktree-0.0.10.dist-info/RECORD,,
@@ -1,15 +0,0 @@
1
- tasktree/__init__.py,sha256=MVmdvKb3JdqLlo0x2_TPGMfgFC0HsDnP79HAzGnFnjI,1081
2
- tasktree/cli.py,sha256=bPojvR7kS2iQomNFSykYsrOgD4Nc5XAH4XbEfdVL9qk,15736
3
- tasktree/docker.py,sha256=R69NcZw4MyaxEXyJAwniYCm877iaI10jRhxlLmkA6Fs,14119
4
- tasktree/executor.py,sha256=iZ_BF3pjyxhH6l2p78rLSK54Xk-V08Ae_BVF6CIi3jo,38979
5
- tasktree/graph.py,sha256=lA3ExNM_ag0AlC6iW20unseCjRg5wCZXbmXs2M6TnQw,5578
6
- tasktree/hasher.py,sha256=S4OKsNjf1ZnhGAvRWr6usuAudiozlQqrvcoAGYzJ_w8,2852
7
- tasktree/parser.py,sha256=K83HyujCyh9NGJoBeUzozYRFQELNHPzWcJoZdPh79yE,56808
8
- tasktree/state.py,sha256=Cktl4D8iDZVd55aO2LqVyPrc-BnljkesxxkcMcdcfOY,3541
9
- tasktree/substitution.py,sha256=Sr8_aBdcWXtkCybkSFMHRjQyQSq-cMREtps_A9ASUgk,6320
10
- tasktree/tasks.py,sha256=2QdQZtJAX2rSGbyXKG1z9VF_siz1DUzdvzCgPkykxtU,173
11
- tasktree/types.py,sha256=R_YAyO5bMLB6XZnkMRT7VAtlkA_Xx6xu0aIpzQjrBXs,4357
12
- tasktree-0.0.8.dist-info/METADATA,sha256=JKjK-6i9ZlTq4gCVWl1dJE6W615tjtlQt64bPwi4vcU,34533
13
- tasktree-0.0.8.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
14
- tasktree-0.0.8.dist-info/entry_points.txt,sha256=lQINlvRYnimvteBbnhH84A9clTg8NnpEjCWqWkqg8KE,40
15
- tasktree-0.0.8.dist-info/RECORD,,