tasktree 0.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tasktree/__init__.py +42 -0
- tasktree/cli.py +502 -0
- tasktree/executor.py +365 -0
- tasktree/graph.py +139 -0
- tasktree/hasher.py +74 -0
- tasktree/parser.py +300 -0
- tasktree/state.py +119 -0
- tasktree/tasks.py +8 -0
- tasktree/types.py +130 -0
- tasktree-0.0.1.dist-info/METADATA +387 -0
- tasktree-0.0.1.dist-info/RECORD +13 -0
- tasktree-0.0.1.dist-info/WHEEL +4 -0
- tasktree-0.0.1.dist-info/entry_points.txt +2 -0
tasktree/parser.py
ADDED
|
@@ -0,0 +1,300 @@
|
|
|
1
|
+
"""Parse recipe YAML files and handle imports."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from dataclasses import dataclass, field
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
import yaml
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class CircularImportError(Exception):
|
|
13
|
+
"""Raised when a circular import is detected."""
|
|
14
|
+
pass
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
@dataclass
|
|
18
|
+
class Task:
|
|
19
|
+
"""Represents a task definition."""
|
|
20
|
+
|
|
21
|
+
name: str
|
|
22
|
+
cmd: str
|
|
23
|
+
desc: str = ""
|
|
24
|
+
deps: list[str] = field(default_factory=list)
|
|
25
|
+
inputs: list[str] = field(default_factory=list)
|
|
26
|
+
outputs: list[str] = field(default_factory=list)
|
|
27
|
+
working_dir: str = ""
|
|
28
|
+
args: list[str] = field(default_factory=list)
|
|
29
|
+
source_file: str = "" # Track which file defined this task
|
|
30
|
+
|
|
31
|
+
def __post_init__(self):
|
|
32
|
+
"""Ensure lists are always lists."""
|
|
33
|
+
if isinstance(self.deps, str):
|
|
34
|
+
self.deps = [self.deps]
|
|
35
|
+
if isinstance(self.inputs, str):
|
|
36
|
+
self.inputs = [self.inputs]
|
|
37
|
+
if isinstance(self.outputs, str):
|
|
38
|
+
self.outputs = [self.outputs]
|
|
39
|
+
if isinstance(self.args, str):
|
|
40
|
+
self.args = [self.args]
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
@dataclass
|
|
44
|
+
class Recipe:
|
|
45
|
+
"""Represents a parsed recipe file with all tasks."""
|
|
46
|
+
|
|
47
|
+
tasks: dict[str, Task]
|
|
48
|
+
project_root: Path
|
|
49
|
+
|
|
50
|
+
def get_task(self, name: str) -> Task | None:
|
|
51
|
+
"""Get task by name.
|
|
52
|
+
|
|
53
|
+
Args:
|
|
54
|
+
name: Task name (may be namespaced like 'build.compile')
|
|
55
|
+
|
|
56
|
+
Returns:
|
|
57
|
+
Task if found, None otherwise
|
|
58
|
+
"""
|
|
59
|
+
return self.tasks.get(name)
|
|
60
|
+
|
|
61
|
+
def task_names(self) -> list[str]:
|
|
62
|
+
"""Get all task names."""
|
|
63
|
+
return list(self.tasks.keys())
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def find_recipe_file(start_dir: Path | None = None) -> Path | None:
|
|
67
|
+
"""Find recipe file (tasktree.yaml or tt.yaml) in current or parent directories.
|
|
68
|
+
|
|
69
|
+
Args:
|
|
70
|
+
start_dir: Directory to start searching from (defaults to cwd)
|
|
71
|
+
|
|
72
|
+
Returns:
|
|
73
|
+
Path to recipe file if found, None otherwise
|
|
74
|
+
"""
|
|
75
|
+
if start_dir is None:
|
|
76
|
+
start_dir = Path.cwd()
|
|
77
|
+
|
|
78
|
+
current = start_dir.resolve()
|
|
79
|
+
|
|
80
|
+
# Search up the directory tree
|
|
81
|
+
while True:
|
|
82
|
+
for filename in ["tasktree.yaml", "tt.yaml"]:
|
|
83
|
+
recipe_path = current / filename
|
|
84
|
+
if recipe_path.exists():
|
|
85
|
+
return recipe_path
|
|
86
|
+
|
|
87
|
+
# Move to parent directory
|
|
88
|
+
parent = current.parent
|
|
89
|
+
if parent == current:
|
|
90
|
+
# Reached root
|
|
91
|
+
break
|
|
92
|
+
current = parent
|
|
93
|
+
|
|
94
|
+
return None
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
def parse_recipe(recipe_path: Path) -> Recipe:
|
|
98
|
+
"""Parse a recipe file and handle imports recursively.
|
|
99
|
+
|
|
100
|
+
Args:
|
|
101
|
+
recipe_path: Path to the main recipe file
|
|
102
|
+
|
|
103
|
+
Returns:
|
|
104
|
+
Recipe object with all tasks (including recursively imported tasks)
|
|
105
|
+
|
|
106
|
+
Raises:
|
|
107
|
+
FileNotFoundError: If recipe file doesn't exist
|
|
108
|
+
CircularImportError: If circular imports are detected
|
|
109
|
+
yaml.YAMLError: If YAML is invalid
|
|
110
|
+
ValueError: If recipe structure is invalid
|
|
111
|
+
"""
|
|
112
|
+
if not recipe_path.exists():
|
|
113
|
+
raise FileNotFoundError(f"Recipe file not found: {recipe_path}")
|
|
114
|
+
|
|
115
|
+
project_root = recipe_path.parent
|
|
116
|
+
|
|
117
|
+
# Parse main file - it will recursively handle all imports
|
|
118
|
+
tasks = _parse_file(recipe_path, namespace=None, project_root=project_root)
|
|
119
|
+
|
|
120
|
+
return Recipe(tasks=tasks, project_root=project_root)
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
def _parse_file(
|
|
124
|
+
file_path: Path,
|
|
125
|
+
namespace: str | None,
|
|
126
|
+
project_root: Path,
|
|
127
|
+
import_stack: list[Path] | None = None,
|
|
128
|
+
) -> dict[str, Task]:
|
|
129
|
+
"""Parse a single YAML file and return tasks, recursively processing imports.
|
|
130
|
+
|
|
131
|
+
Args:
|
|
132
|
+
file_path: Path to YAML file
|
|
133
|
+
namespace: Optional namespace prefix for tasks
|
|
134
|
+
project_root: Root directory of the project
|
|
135
|
+
import_stack: Stack of files being imported (for circular detection)
|
|
136
|
+
|
|
137
|
+
Returns:
|
|
138
|
+
Dictionary of task name to Task objects
|
|
139
|
+
|
|
140
|
+
Raises:
|
|
141
|
+
CircularImportError: If a circular import is detected
|
|
142
|
+
FileNotFoundError: If an imported file doesn't exist
|
|
143
|
+
ValueError: If task structure is invalid
|
|
144
|
+
"""
|
|
145
|
+
# Initialize import stack if not provided
|
|
146
|
+
if import_stack is None:
|
|
147
|
+
import_stack = []
|
|
148
|
+
|
|
149
|
+
# Detect circular imports
|
|
150
|
+
if file_path in import_stack:
|
|
151
|
+
chain = " → ".join(str(f.name) for f in import_stack + [file_path])
|
|
152
|
+
raise CircularImportError(f"Circular import detected: {chain}")
|
|
153
|
+
|
|
154
|
+
# Add current file to stack
|
|
155
|
+
import_stack.append(file_path)
|
|
156
|
+
|
|
157
|
+
# Load YAML
|
|
158
|
+
with open(file_path, "r") as f:
|
|
159
|
+
data = yaml.safe_load(f)
|
|
160
|
+
|
|
161
|
+
if data is None:
|
|
162
|
+
data = {}
|
|
163
|
+
|
|
164
|
+
tasks: dict[str, Task] = {}
|
|
165
|
+
file_dir = file_path.parent
|
|
166
|
+
|
|
167
|
+
# Default working directory is the file's directory
|
|
168
|
+
default_working_dir = str(file_dir.relative_to(project_root)) if file_dir != project_root else "."
|
|
169
|
+
|
|
170
|
+
# Track local import namespaces for dependency rewriting
|
|
171
|
+
local_import_namespaces: set[str] = set()
|
|
172
|
+
|
|
173
|
+
# Process nested imports FIRST
|
|
174
|
+
imports = data.get("import", [])
|
|
175
|
+
if imports:
|
|
176
|
+
for import_spec in imports:
|
|
177
|
+
child_file = import_spec["file"]
|
|
178
|
+
child_namespace = import_spec["as"]
|
|
179
|
+
|
|
180
|
+
# Track this namespace as a local import
|
|
181
|
+
local_import_namespaces.add(child_namespace)
|
|
182
|
+
|
|
183
|
+
# Build full namespace chain
|
|
184
|
+
full_namespace = f"{namespace}.{child_namespace}" if namespace else child_namespace
|
|
185
|
+
|
|
186
|
+
# Resolve import path relative to current file's directory
|
|
187
|
+
child_path = file_path.parent / child_file
|
|
188
|
+
if not child_path.exists():
|
|
189
|
+
raise FileNotFoundError(f"Import file not found: {child_path}")
|
|
190
|
+
|
|
191
|
+
# Recursively process with namespace chain and import stack
|
|
192
|
+
nested_tasks = _parse_file(
|
|
193
|
+
child_path,
|
|
194
|
+
full_namespace,
|
|
195
|
+
project_root,
|
|
196
|
+
import_stack.copy(), # Pass copy to avoid shared mutation
|
|
197
|
+
)
|
|
198
|
+
|
|
199
|
+
tasks.update(nested_tasks)
|
|
200
|
+
|
|
201
|
+
# Process local tasks
|
|
202
|
+
for task_name, task_data in data.items():
|
|
203
|
+
# Skip import declarations
|
|
204
|
+
if task_name == "import":
|
|
205
|
+
continue
|
|
206
|
+
|
|
207
|
+
if not isinstance(task_data, dict):
|
|
208
|
+
raise ValueError(f"Task '{task_name}' must be a dictionary")
|
|
209
|
+
|
|
210
|
+
if "cmd" not in task_data:
|
|
211
|
+
raise ValueError(f"Task '{task_name}' missing required 'cmd' field")
|
|
212
|
+
|
|
213
|
+
# Apply namespace if provided
|
|
214
|
+
full_name = f"{namespace}.{task_name}" if namespace else task_name
|
|
215
|
+
|
|
216
|
+
# Set working directory
|
|
217
|
+
working_dir = task_data.get("working_dir", default_working_dir)
|
|
218
|
+
|
|
219
|
+
# Rewrite dependencies with namespace
|
|
220
|
+
deps = task_data.get("deps", [])
|
|
221
|
+
if isinstance(deps, str):
|
|
222
|
+
deps = [deps]
|
|
223
|
+
if namespace:
|
|
224
|
+
# Rewrite dependencies: only prefix if it's a local reference
|
|
225
|
+
# A dependency is local if:
|
|
226
|
+
# 1. It has no dots (simple name like "init")
|
|
227
|
+
# 2. It starts with a local import namespace (like "base.setup" when "base" is imported)
|
|
228
|
+
rewritten_deps = []
|
|
229
|
+
for dep in deps:
|
|
230
|
+
if "." not in dep:
|
|
231
|
+
# Simple name - always prefix
|
|
232
|
+
rewritten_deps.append(f"{namespace}.{dep}")
|
|
233
|
+
else:
|
|
234
|
+
# Check if it starts with a local import namespace
|
|
235
|
+
dep_root = dep.split(".", 1)[0]
|
|
236
|
+
if dep_root in local_import_namespaces:
|
|
237
|
+
# Local import reference - prefix it
|
|
238
|
+
rewritten_deps.append(f"{namespace}.{dep}")
|
|
239
|
+
else:
|
|
240
|
+
# External reference - keep as-is
|
|
241
|
+
rewritten_deps.append(dep)
|
|
242
|
+
deps = rewritten_deps
|
|
243
|
+
|
|
244
|
+
task = Task(
|
|
245
|
+
name=full_name,
|
|
246
|
+
cmd=task_data["cmd"],
|
|
247
|
+
desc=task_data.get("desc", ""),
|
|
248
|
+
deps=deps,
|
|
249
|
+
inputs=task_data.get("inputs", []),
|
|
250
|
+
outputs=task_data.get("outputs", []),
|
|
251
|
+
working_dir=working_dir,
|
|
252
|
+
args=task_data.get("args", []),
|
|
253
|
+
source_file=str(file_path),
|
|
254
|
+
)
|
|
255
|
+
|
|
256
|
+
tasks[full_name] = task
|
|
257
|
+
|
|
258
|
+
# Remove current file from stack
|
|
259
|
+
import_stack.pop()
|
|
260
|
+
|
|
261
|
+
return tasks
|
|
262
|
+
|
|
263
|
+
|
|
264
|
+
def parse_arg_spec(arg_spec: str) -> tuple[str, str, str | None]:
|
|
265
|
+
"""Parse argument specification.
|
|
266
|
+
|
|
267
|
+
Format: name:type=default
|
|
268
|
+
- name is required
|
|
269
|
+
- type is optional (defaults to 'str')
|
|
270
|
+
- default is optional
|
|
271
|
+
|
|
272
|
+
Args:
|
|
273
|
+
arg_spec: Argument specification string
|
|
274
|
+
|
|
275
|
+
Returns:
|
|
276
|
+
Tuple of (name, type, default)
|
|
277
|
+
|
|
278
|
+
Examples:
|
|
279
|
+
>>> parse_arg_spec("environment")
|
|
280
|
+
('environment', 'str', None)
|
|
281
|
+
>>> parse_arg_spec("region=eu-west-1")
|
|
282
|
+
('region', 'str', 'eu-west-1')
|
|
283
|
+
>>> parse_arg_spec("port:int=8080")
|
|
284
|
+
('port', 'int', '8080')
|
|
285
|
+
"""
|
|
286
|
+
# Split on = to separate name:type from default
|
|
287
|
+
if "=" in arg_spec:
|
|
288
|
+
name_type, default = arg_spec.split("=", 1)
|
|
289
|
+
else:
|
|
290
|
+
name_type = arg_spec
|
|
291
|
+
default = None
|
|
292
|
+
|
|
293
|
+
# Split on : to separate name from type
|
|
294
|
+
if ":" in name_type:
|
|
295
|
+
name, arg_type = name_type.split(":", 1)
|
|
296
|
+
else:
|
|
297
|
+
name = name_type
|
|
298
|
+
arg_type = "str"
|
|
299
|
+
|
|
300
|
+
return name, arg_type, default
|
tasktree/state.py
ADDED
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
"""State file management and pruning."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
from dataclasses import dataclass, field
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import Any
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@dataclass
|
|
12
|
+
class TaskState:
|
|
13
|
+
"""State for a single task execution."""
|
|
14
|
+
|
|
15
|
+
last_run: float
|
|
16
|
+
input_state: dict[str, float] = field(default_factory=dict)
|
|
17
|
+
|
|
18
|
+
def to_dict(self) -> dict[str, Any]:
|
|
19
|
+
"""Convert to dictionary for JSON serialization."""
|
|
20
|
+
return {
|
|
21
|
+
"last_run": self.last_run,
|
|
22
|
+
"input_state": self.input_state,
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
@classmethod
|
|
26
|
+
def from_dict(cls, data: dict[str, Any]) -> "TaskState":
|
|
27
|
+
"""Create from dictionary loaded from JSON."""
|
|
28
|
+
return cls(
|
|
29
|
+
last_run=data["last_run"],
|
|
30
|
+
input_state=data.get("input_state", {}),
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class StateManager:
|
|
35
|
+
"""Manages the .tasktree-state file."""
|
|
36
|
+
|
|
37
|
+
STATE_FILE = ".tasktree-state"
|
|
38
|
+
|
|
39
|
+
def __init__(self, project_root: Path):
|
|
40
|
+
"""Initialize state manager.
|
|
41
|
+
|
|
42
|
+
Args:
|
|
43
|
+
project_root: Root directory of the project
|
|
44
|
+
"""
|
|
45
|
+
self.project_root = project_root
|
|
46
|
+
self.state_path = project_root / self.STATE_FILE
|
|
47
|
+
self._state: dict[str, TaskState] = {}
|
|
48
|
+
self._loaded = False
|
|
49
|
+
|
|
50
|
+
def load(self) -> None:
|
|
51
|
+
"""Load state from file if it exists."""
|
|
52
|
+
if self.state_path.exists():
|
|
53
|
+
try:
|
|
54
|
+
with open(self.state_path, "r") as f:
|
|
55
|
+
data = json.load(f)
|
|
56
|
+
self._state = {
|
|
57
|
+
key: TaskState.from_dict(value)
|
|
58
|
+
for key, value in data.items()
|
|
59
|
+
}
|
|
60
|
+
except (json.JSONDecodeError, KeyError) as e:
|
|
61
|
+
# If state file is corrupted, start fresh
|
|
62
|
+
self._state = {}
|
|
63
|
+
self._loaded = True
|
|
64
|
+
|
|
65
|
+
def save(self) -> None:
|
|
66
|
+
"""Save state to file."""
|
|
67
|
+
data = {key: value.to_dict() for key, value in self._state.items()}
|
|
68
|
+
with open(self.state_path, "w") as f:
|
|
69
|
+
json.dump(data, f, indent=2)
|
|
70
|
+
|
|
71
|
+
def get(self, cache_key: str) -> TaskState | None:
|
|
72
|
+
"""Get state for a task.
|
|
73
|
+
|
|
74
|
+
Args:
|
|
75
|
+
cache_key: Cache key (task_hash or task_hash__args_hash)
|
|
76
|
+
|
|
77
|
+
Returns:
|
|
78
|
+
TaskState if found, None otherwise
|
|
79
|
+
"""
|
|
80
|
+
if not self._loaded:
|
|
81
|
+
self.load()
|
|
82
|
+
return self._state.get(cache_key)
|
|
83
|
+
|
|
84
|
+
def set(self, cache_key: str, state: TaskState) -> None:
|
|
85
|
+
"""Set state for a task.
|
|
86
|
+
|
|
87
|
+
Args:
|
|
88
|
+
cache_key: Cache key (task_hash or task_hash__args_hash)
|
|
89
|
+
state: TaskState to store
|
|
90
|
+
"""
|
|
91
|
+
if not self._loaded:
|
|
92
|
+
self.load()
|
|
93
|
+
self._state[cache_key] = state
|
|
94
|
+
|
|
95
|
+
def prune(self, valid_task_hashes: set[str]) -> None:
|
|
96
|
+
"""Remove state entries for tasks that no longer exist.
|
|
97
|
+
|
|
98
|
+
Args:
|
|
99
|
+
valid_task_hashes: Set of valid task hashes from current recipe
|
|
100
|
+
"""
|
|
101
|
+
if not self._loaded:
|
|
102
|
+
self.load()
|
|
103
|
+
|
|
104
|
+
# Find keys to remove
|
|
105
|
+
keys_to_remove = []
|
|
106
|
+
for cache_key in self._state.keys():
|
|
107
|
+
# Extract task hash (before __ if present)
|
|
108
|
+
task_hash = cache_key.split("__")[0]
|
|
109
|
+
if task_hash not in valid_task_hashes:
|
|
110
|
+
keys_to_remove.append(cache_key)
|
|
111
|
+
|
|
112
|
+
# Remove stale entries
|
|
113
|
+
for key in keys_to_remove:
|
|
114
|
+
del self._state[key]
|
|
115
|
+
|
|
116
|
+
def clear(self) -> None:
|
|
117
|
+
"""Clear all state (useful for testing)."""
|
|
118
|
+
self._state = {}
|
|
119
|
+
self._loaded = True
|
tasktree/tasks.py
ADDED
tasktree/types.py
ADDED
|
@@ -0,0 +1,130 @@
|
|
|
1
|
+
"""Custom Click parameter types for task argument validation."""
|
|
2
|
+
|
|
3
|
+
import re
|
|
4
|
+
from datetime import datetime
|
|
5
|
+
from ipaddress import IPv4Address, IPv6Address, ip_address
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
import click
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class HostnameType(click.ParamType):
|
|
13
|
+
"""Validates hostname format (not DNS resolution)."""
|
|
14
|
+
|
|
15
|
+
name = "hostname"
|
|
16
|
+
|
|
17
|
+
# Simple hostname validation (RFC 1123)
|
|
18
|
+
HOSTNAME_PATTERN = re.compile(
|
|
19
|
+
r"^(?=.{1,253}$)(?!-)[A-Za-z0-9-]{1,63}(?<!-)(\.[A-Za-z0-9-]{1,63})*\.?$"
|
|
20
|
+
)
|
|
21
|
+
|
|
22
|
+
def convert(self, value: Any, param: click.Parameter | None, ctx: click.Context | None) -> str:
|
|
23
|
+
if isinstance(value, str):
|
|
24
|
+
if self.HOSTNAME_PATTERN.match(value):
|
|
25
|
+
return value
|
|
26
|
+
self.fail(f"{value!r} is not a valid hostname", param, ctx)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class EmailType(click.ParamType):
|
|
30
|
+
"""Validates email format (not deliverability)."""
|
|
31
|
+
|
|
32
|
+
name = "email"
|
|
33
|
+
|
|
34
|
+
# Basic email validation (RFC 5322 simplified)
|
|
35
|
+
EMAIL_PATTERN = re.compile(
|
|
36
|
+
r"^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$"
|
|
37
|
+
)
|
|
38
|
+
|
|
39
|
+
def convert(self, value: Any, param: click.Parameter | None, ctx: click.Context | None) -> str:
|
|
40
|
+
if isinstance(value, str):
|
|
41
|
+
if self.EMAIL_PATTERN.match(value):
|
|
42
|
+
return value
|
|
43
|
+
self.fail(f"{value!r} is not a valid email address", param, ctx)
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
class IPType(click.ParamType):
|
|
47
|
+
"""Validates IP address (IPv4 or IPv6)."""
|
|
48
|
+
|
|
49
|
+
name = "ip"
|
|
50
|
+
|
|
51
|
+
def convert(self, value: Any, param: click.Parameter | None, ctx: click.Context | None) -> str:
|
|
52
|
+
try:
|
|
53
|
+
ip_address(value)
|
|
54
|
+
return str(value)
|
|
55
|
+
except ValueError:
|
|
56
|
+
self.fail(f"{value!r} is not a valid IP address", param, ctx)
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
class IPv4Type(click.ParamType):
|
|
60
|
+
"""Validates IPv4 address."""
|
|
61
|
+
|
|
62
|
+
name = "ipv4"
|
|
63
|
+
|
|
64
|
+
def convert(self, value: Any, param: click.Parameter | None, ctx: click.Context | None) -> str:
|
|
65
|
+
try:
|
|
66
|
+
IPv4Address(value)
|
|
67
|
+
return str(value)
|
|
68
|
+
except ValueError:
|
|
69
|
+
self.fail(f"{value!r} is not a valid IPv4 address", param, ctx)
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
class IPv6Type(click.ParamType):
|
|
73
|
+
"""Validates IPv6 address."""
|
|
74
|
+
|
|
75
|
+
name = "ipv6"
|
|
76
|
+
|
|
77
|
+
def convert(self, value: Any, param: click.Parameter | None, ctx: click.Context | None) -> str:
|
|
78
|
+
try:
|
|
79
|
+
IPv6Address(value)
|
|
80
|
+
return str(value)
|
|
81
|
+
except ValueError:
|
|
82
|
+
self.fail(f"{value!r} is not a valid IPv6 address", param, ctx)
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
class DateTimeType(click.ParamType):
|
|
86
|
+
"""Validates datetime in format YYYY-MM-DDTHH:MM:SS."""
|
|
87
|
+
|
|
88
|
+
name = "datetime"
|
|
89
|
+
|
|
90
|
+
def convert(self, value: Any, param: click.Parameter | None, ctx: click.Context | None) -> str:
|
|
91
|
+
if isinstance(value, str):
|
|
92
|
+
try:
|
|
93
|
+
datetime.fromisoformat(value)
|
|
94
|
+
return value
|
|
95
|
+
except ValueError:
|
|
96
|
+
pass
|
|
97
|
+
self.fail(f"{value!r} is not a valid datetime (expected YYYY-MM-DDTHH:MM:SS format)", param, ctx)
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
# Type registry for dynamic parameter creation
|
|
101
|
+
TYPE_MAPPING = {
|
|
102
|
+
"str": click.STRING,
|
|
103
|
+
"int": click.INT,
|
|
104
|
+
"float": click.FLOAT,
|
|
105
|
+
"bool": click.BOOL,
|
|
106
|
+
"path": click.Path(),
|
|
107
|
+
"datetime": DateTimeType(),
|
|
108
|
+
"hostname": HostnameType(),
|
|
109
|
+
"email": EmailType(),
|
|
110
|
+
"ip": IPType(),
|
|
111
|
+
"ipv4": IPv4Type(),
|
|
112
|
+
"ipv6": IPv6Type(),
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
def get_click_type(type_name: str) -> click.ParamType:
|
|
117
|
+
"""Get Click parameter type by name.
|
|
118
|
+
|
|
119
|
+
Args:
|
|
120
|
+
type_name: Type name from task definition (e.g., 'str', 'int', 'hostname')
|
|
121
|
+
|
|
122
|
+
Returns:
|
|
123
|
+
Click parameter type instance
|
|
124
|
+
|
|
125
|
+
Raises:
|
|
126
|
+
ValueError: If type_name is not recognized
|
|
127
|
+
"""
|
|
128
|
+
if type_name not in TYPE_MAPPING:
|
|
129
|
+
raise ValueError(f"Unknown type: {type_name}")
|
|
130
|
+
return TYPE_MAPPING[type_name]
|