pydagu 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pydagu/__init__.py ADDED
@@ -0,0 +1,7 @@
1
+ """Pydagu - Pydantic models for Dagu DAG validation"""
2
+
3
+ from pydagu.models import * # noqa: F401, F403
4
+ from pydagu.builder import DagBuilder, StepBuilder
5
+
6
+ __version__ = "0.1.0"
7
+ __all__ = ["DagBuilder", "StepBuilder"]
pydagu/builder.py ADDED
@@ -0,0 +1,508 @@
1
+ """Fluent builder API for creating Dagu DAGs programmatically"""
2
+
3
+ from typing import Any, Literal
4
+ import yaml
5
+
6
+ from .models.base import Precondition
7
+ from .models.dag import Dag
8
+ from .models.step import Step, RetryPolicy, ContinueOn, ParallelConfig
9
+ from .models.executor import (
10
+ ExecutorConfig,
11
+ HTTPExecutorConfig,
12
+ SSHExecutorConfig,
13
+ MailExecutorConfig,
14
+ DockerExecutorConfig,
15
+ JQExecutorConfig,
16
+ ShellExecutorConfig,
17
+ )
18
+ from .models.handlers import HandlerConfig, HandlerOn
19
+ from .models.notifications import MailOn, SMTPConfig
20
+ from .models.infrastructure import ContainerConfig, SSHConfig
21
+
22
+
23
+ class DagBuilder:
24
+ """Fluent builder class for creating Dagu DAGs programmatically
25
+
26
+ Example:
27
+ >>> dag = (DagBuilder("my-dag")
28
+ ... .description("My ETL pipeline")
29
+ ... .schedule("0 2 * * *")
30
+ ... .add_tag("production")
31
+ ... .add_step(command="python extract.py")
32
+ ... .add_step(command="python transform.py", depends="extract")
33
+ ... .build())
34
+ """
35
+
36
+ def __init__(
37
+ self,
38
+ name: str,
39
+ description: str = "",
40
+ schedule: str | None = None,
41
+ **kwargs: Any,
42
+ ) -> None:
43
+ """Initialize a new DAG builder
44
+
45
+ Args:
46
+ name: DAG name
47
+ description: DAG description
48
+ schedule: Cron expression for scheduling
49
+ **kwargs: Additional DAG configuration options
50
+ """
51
+ self._dag_config = {
52
+ "name": name,
53
+ "description": description,
54
+ "schedule": schedule,
55
+ "steps": [],
56
+ **kwargs,
57
+ }
58
+
59
+ def description(self, desc: str) -> "DagBuilder":
60
+ """Set the DAG description"""
61
+ self._dag_config["description"] = desc
62
+ return self
63
+
64
+ def schedule(self, cron: str) -> "DagBuilder":
65
+ """Set the DAG schedule using a cron expression
66
+
67
+ Args:
68
+ cron: Cron expression (e.g., "0 2 * * *" for daily at 2 AM)
69
+ """
70
+ self._dag_config["schedule"] = cron
71
+ return self
72
+
73
+ def add_tag(self, tag: str) -> "DagBuilder":
74
+ """Add a tag to the DAG"""
75
+ if "tags" not in self._dag_config:
76
+ self._dag_config["tags"] = []
77
+ self._dag_config["tags"].append(tag)
78
+ return self
79
+
80
+ def tags(self, *tags: str) -> "DagBuilder":
81
+ """Set multiple tags for the DAG"""
82
+ self._dag_config["tags"] = list(tags)
83
+ return self
84
+
85
+ def max_active_runs(self, limit: int) -> "DagBuilder":
86
+ """Set maximum concurrent DAG runs"""
87
+ self._dag_config["maxActiveRuns"] = limit
88
+ return self
89
+
90
+ def max_active_steps(self, limit: int) -> "DagBuilder":
91
+ """Set maximum concurrent steps"""
92
+ self._dag_config["maxActiveSteps"] = limit
93
+ return self
94
+
95
+ def timeout(self, seconds: int) -> "DagBuilder":
96
+ """Set DAG execution timeout in seconds"""
97
+ self._dag_config["timeoutSec"] = seconds
98
+ return self
99
+
100
+ def history_retention(self, days: int) -> "DagBuilder":
101
+ """Set history retention period in days"""
102
+ self._dag_config["histRetentionDays"] = days
103
+ return self
104
+
105
+ def add_param(self, key: str, value: str) -> "DagBuilder":
106
+ """Add a parameter to the DAG"""
107
+ if "params" not in self._dag_config:
108
+ self._dag_config["params"] = []
109
+ self._dag_config["params"].append({key: value})
110
+ return self
111
+
112
+ def add_env(self, key: str, value: str) -> "DagBuilder":
113
+ """Add an environment variable to the DAG"""
114
+ if "env" not in self._dag_config:
115
+ self._dag_config["env"] = []
116
+ self._dag_config["env"].append({key: value})
117
+ return self
118
+
119
+ def dotenv(self, *paths: str) -> "DagBuilder":
120
+ """Set dotenv file paths"""
121
+ self._dag_config["dotenv"] = list(paths)
122
+ return self
123
+
124
+ def container(
125
+ self,
126
+ image: str,
127
+ pull_policy: Literal["always", "missing", "never"] | None = None,
128
+ env: list[str] | None = None,
129
+ volumes: list[str] | None = None,
130
+ ) -> "DagBuilder":
131
+ """Set default container configuration for all steps"""
132
+ self._dag_config["container"] = ContainerConfig(
133
+ image=image,
134
+ pullPolicy=pull_policy,
135
+ env=env,
136
+ volumes=volumes,
137
+ )
138
+ return self
139
+
140
+ def ssh_config(
141
+ self,
142
+ user: str,
143
+ host: str,
144
+ port: int = 22,
145
+ key: str | None = None,
146
+ password: str | None = None,
147
+ **kwargs: Any,
148
+ ) -> "DagBuilder":
149
+ """Set SSH configuration for the DAG"""
150
+ self._dag_config["ssh"] = SSHConfig(
151
+ user=user,
152
+ host=host,
153
+ port=port,
154
+ key=key,
155
+ password=password,
156
+ **kwargs,
157
+ )
158
+ return self
159
+
160
+ def smtp_config(
161
+ self,
162
+ host: str,
163
+ port: str,
164
+ username: str | None = None,
165
+ password: str | None = None,
166
+ ) -> "DagBuilder":
167
+ """Set SMTP configuration for email notifications"""
168
+ self._dag_config["smtp"] = SMTPConfig(
169
+ host=host,
170
+ port=port,
171
+ username=username,
172
+ password=password,
173
+ )
174
+ return self
175
+
176
+ def mail_on_failure(self, enabled: bool = True) -> "DagBuilder":
177
+ """Enable/disable email notifications on failure"""
178
+ if "mailOn" not in self._dag_config:
179
+ self._dag_config["mailOn"] = MailOn(failure=enabled, success=None)
180
+ else:
181
+ self._dag_config["mailOn"].failure = enabled
182
+ return self
183
+
184
+ def mail_on_success(self, enabled: bool = True) -> "DagBuilder":
185
+ """Enable/disable email notifications on success"""
186
+ if "mailOn" not in self._dag_config:
187
+ self._dag_config["mailOn"] = MailOn(failure=None, success=enabled)
188
+ else:
189
+ self._dag_config["mailOn"].success = enabled
190
+ return self
191
+
192
+ def add_precondition(self, condition: str, expected: str) -> "DagBuilder":
193
+ """Add a DAG-level precondition"""
194
+ if "preconditions" not in self._dag_config:
195
+ self._dag_config["preconditions"] = []
196
+ self._dag_config["preconditions"].append(
197
+ Precondition(condition=condition, expected=expected)
198
+ )
199
+ return self
200
+
201
+ def on_success(
202
+ self, command: str | None = None, executor: ExecutorConfig | None = None
203
+ ) -> "DagBuilder":
204
+ """Set success handler"""
205
+ if "handlerOn" not in self._dag_config:
206
+ self._dag_config["handlerOn"] = HandlerOn(
207
+ success=None, failure=None, cancel=None, exit=None
208
+ )
209
+ self._dag_config["handlerOn"].success = HandlerConfig(
210
+ command=command, executor=executor
211
+ )
212
+ return self
213
+
214
+ def on_failure(
215
+ self, command: str | None = None, executor: ExecutorConfig | None = None
216
+ ) -> "DagBuilder":
217
+ """Set failure handler"""
218
+ if "handlerOn" not in self._dag_config:
219
+ self._dag_config["handlerOn"] = HandlerOn(
220
+ success=None, failure=None, cancel=None, exit=None
221
+ )
222
+ self._dag_config["handlerOn"].failure = HandlerConfig(
223
+ command=command, executor=executor
224
+ )
225
+ return self
226
+
227
+ def on_exit(
228
+ self, command: str | None = None, executor: ExecutorConfig | None = None
229
+ ) -> "DagBuilder":
230
+ """Set exit handler"""
231
+ if "handlerOn" not in self._dag_config:
232
+ self._dag_config["handlerOn"] = HandlerOn(
233
+ success=None, failure=None, cancel=None, exit=None
234
+ )
235
+ self._dag_config["handlerOn"].exit = HandlerConfig(
236
+ command=command, executor=executor
237
+ )
238
+ return self
239
+
240
+ def add_step(
241
+ self,
242
+ name: str | None = None,
243
+ command: str | None = None,
244
+ script: str | None = None,
245
+ **kwargs: Any,
246
+ ) -> "DagBuilder":
247
+ """Add a step to the DAG
248
+
249
+ Args:
250
+ name: Step name
251
+ command: Command to execute
252
+ script: Script path to execute
253
+ **kwargs: Additional step configuration (depends, output, params, etc.)
254
+ """
255
+ if not command and not script:
256
+ raise ValueError("Either command or script must be provided for a step")
257
+ step = Step(name=name, command=command, script=script, **kwargs)
258
+ self._dag_config["steps"].append(step)
259
+ return self
260
+
261
+ def add_simple_step(self, command_or_script: str) -> "DagBuilder":
262
+ """Add a simple step with just a command or script path"""
263
+ self._dag_config["steps"].append(command_or_script)
264
+ return self
265
+
266
+ def add_step_models(self, *steps: Step) -> "DagBuilder":
267
+ """Add one or more pre-built Step object to the DAG"""
268
+ for step in steps:
269
+ self._dag_config["steps"].append(step)
270
+ return self
271
+
272
+ def build(self) -> Dag:
273
+ """Build and return the final DAG model"""
274
+ return Dag(**self._dag_config)
275
+
276
+ def to_yaml(self, exclude_none: bool = True) -> str:
277
+ """Export the DAG to YAML format
278
+
279
+ Args:
280
+ exclude_none: Exclude None values from output
281
+ """
282
+ dag = self.build()
283
+ dag_dict = dag.model_dump(exclude_none=exclude_none)
284
+ return yaml.dump(dag_dict, default_flow_style=False, sort_keys=False)
285
+
286
+ def to_dict(self, exclude_none: bool = True) -> dict[str, Any]:
287
+ """Export the DAG to a dictionary
288
+
289
+ Args:
290
+ exclude_none: Exclude None values from output
291
+ """
292
+ dag = self.build()
293
+ return dag.model_dump(exclude_none=exclude_none)
294
+
295
+ def save(self, filepath: str, exclude_none: bool = True) -> None:
296
+ """Save the DAG to a YAML file
297
+
298
+ Args:
299
+ filepath: Path to save the YAML file
300
+ exclude_none: Exclude None values from output
301
+ """
302
+ with open(filepath, "w") as f:
303
+ f.write(self.to_yaml(exclude_none=exclude_none))
304
+
305
+
306
+ class StepBuilder:
307
+ """Builder for creating individual steps with complex configurations
308
+
309
+ Example:
310
+ >>> step = (StepBuilder("extract")
311
+ ... .command("python extract.py")
312
+ ... .depends_on("validate")
313
+ ... .retry(limit=3, interval=60)
314
+ ... .docker_executor(image="python:3.11")
315
+ ... .build())
316
+ """
317
+
318
+ def __init__(self, name: str | None = None):
319
+ """Initialize a step builder
320
+
321
+ Args:
322
+ name: Step name
323
+ """
324
+ self._step_config: dict[str, Any] = {"name": name}
325
+
326
+ def command(self, cmd: str) -> "StepBuilder":
327
+ """Set the command to execute"""
328
+ self._step_config["command"] = cmd
329
+ return self
330
+
331
+ def script(self, path: str) -> "StepBuilder":
332
+ """Set the script path to execute"""
333
+ self._step_config["script"] = path
334
+ return self
335
+
336
+ def description(self, desc: str) -> "StepBuilder":
337
+ """Set the step description"""
338
+ self._step_config["description"] = desc
339
+ return self
340
+
341
+ def depends_on(self, *steps: str) -> "StepBuilder":
342
+ """Set step dependencies"""
343
+ if len(steps) == 1:
344
+ self._step_config["depends"] = steps[0]
345
+ else:
346
+ self._step_config["depends"] = list(steps)
347
+ return self
348
+
349
+ def output(self, var_name: str) -> "StepBuilder":
350
+ """Set output variable name"""
351
+ self._step_config["output"] = var_name
352
+ return self
353
+
354
+ def params(self, *params: str) -> "StepBuilder":
355
+ """Set step parameters"""
356
+ if len(params) == 1:
357
+ self._step_config["params"] = params[0]
358
+ else:
359
+ self._step_config["params"] = list(params)
360
+ return self
361
+
362
+ def working_dir(self, path: str) -> "StepBuilder":
363
+ """Set working directory"""
364
+ self._step_config["dir"] = path
365
+ return self
366
+
367
+ def retry(self, limit: int, interval: int) -> "StepBuilder":
368
+ """Set retry policy
369
+
370
+ Args:
371
+ limit: Maximum number of retries
372
+ interval: Interval between retries in seconds
373
+ """
374
+ self._step_config["retryPolicy"] = RetryPolicy(
375
+ limit=limit, intervalSec=interval
376
+ )
377
+ return self
378
+
379
+ def continue_on_failure(self, enabled: bool = True) -> "StepBuilder":
380
+ """Continue execution even if this step fails"""
381
+ if "continueOn" not in self._step_config:
382
+ self._step_config["continueOn"] = ContinueOn(failure=enabled, skipped=None)
383
+ else:
384
+ self._step_config["continueOn"].failure = enabled
385
+ return self
386
+
387
+ def parallel(
388
+ self, items: list[str], max_concurrent: int | None = None
389
+ ) -> "StepBuilder":
390
+ """Set parallel execution configuration
391
+
392
+ Args:
393
+ items: Items to process in parallel
394
+ max_concurrent: Maximum concurrent items
395
+ """
396
+ self._step_config["parallel"] = ParallelConfig(
397
+ items=items, maxConcurrent=max_concurrent
398
+ )
399
+ return self
400
+
401
+ def docker_executor(
402
+ self,
403
+ image: str,
404
+ pull: bool | None = None,
405
+ env: list[str] | dict[str, str] | None = None,
406
+ volumes: list[str] | None = None,
407
+ **kwargs: Any,
408
+ ) -> "StepBuilder":
409
+ """Set Docker executor for this step"""
410
+ self._step_config["executor"] = ExecutorConfig(
411
+ type="docker",
412
+ config=DockerExecutorConfig(
413
+ image=image, pull=pull, env=env, volumes=volumes, **kwargs
414
+ ),
415
+ )
416
+ return self
417
+
418
+ def http_executor(
419
+ self,
420
+ headers: dict[str, str] | None = None,
421
+ query: dict[str, str] | None = None,
422
+ body: str | dict[str, Any] | None = None,
423
+ timeout: int | None = None,
424
+ **kwargs: Any,
425
+ ) -> "StepBuilder":
426
+ """Set HTTP executor for this step"""
427
+ self._step_config["executor"] = ExecutorConfig(
428
+ type="http",
429
+ config=HTTPExecutorConfig(
430
+ headers=headers, query=query, body=body, timeout=timeout, **kwargs
431
+ ),
432
+ )
433
+ return self
434
+
435
+ def ssh_executor(
436
+ self,
437
+ user: str,
438
+ host: str,
439
+ port: int = 22,
440
+ key: str | None = None,
441
+ password: str | None = None,
442
+ **kwargs: Any,
443
+ ) -> "StepBuilder":
444
+ """Set SSH executor for this step"""
445
+ self._step_config["executor"] = ExecutorConfig(
446
+ type="ssh",
447
+ config=SSHExecutorConfig(
448
+ user=user, host=host, port=port, key=key, password=password, **kwargs
449
+ ),
450
+ )
451
+ return self
452
+
453
+ def mail_executor(
454
+ self,
455
+ to: str | list[str],
456
+ subject: str | None = None,
457
+ body: str | None = None,
458
+ **kwargs: Any,
459
+ ) -> "StepBuilder":
460
+ """Set mail executor for this step"""
461
+ self._step_config["executor"] = ExecutorConfig(
462
+ type="mail",
463
+ config=MailExecutorConfig(to=to, subject=subject, body=body, **kwargs),
464
+ )
465
+ return self
466
+
467
+ def shell_executor(
468
+ self,
469
+ shell: str = "bash",
470
+ env: dict[str, str] | None = None,
471
+ ) -> "StepBuilder":
472
+ """Set shell executor for this step"""
473
+ self._step_config["executor"] = ExecutorConfig(
474
+ type="shell",
475
+ config=ShellExecutorConfig(shell=shell, env=env), # nosec: B604
476
+ )
477
+ return self
478
+
479
+ def jq_executor(
480
+ self,
481
+ query: str,
482
+ raw: bool | None = None,
483
+ compact: bool | None = None,
484
+ ) -> "StepBuilder":
485
+ """Set jq executor for this step"""
486
+ self._step_config["executor"] = ExecutorConfig(
487
+ type="jq",
488
+ config=JQExecutorConfig(query=query, raw=raw, compact=compact),
489
+ )
490
+ return self
491
+
492
+ def mail_on_error(self, enabled: bool = True) -> "StepBuilder":
493
+ """Send email notification on step error"""
494
+ self._step_config["mailOnError"] = enabled
495
+ return self
496
+
497
+ def add_precondition(self, condition: str, expected: str) -> "StepBuilder":
498
+ """Add a step-level precondition"""
499
+ if "preconditions" not in self._step_config:
500
+ self._step_config["preconditions"] = []
501
+ self._step_config["preconditions"].append(
502
+ Precondition(condition=condition, expected=expected)
503
+ )
504
+ return self
505
+
506
+ def build(self) -> Step:
507
+ """Build and return the final Step model"""
508
+ return Step(**self._step_config)
pydagu/http.py ADDED
@@ -0,0 +1,88 @@
1
+ # Functions for calling Dagu's HTTP API
2
+ import re
3
+
4
+ import httpx
5
+ from yaml import dump, Dumper, safe_load
6
+
7
+ from .models import Dag, StartDagRun, DagRunId, DagResponseMessage, DagRunResult
8
+
9
+
10
+ url_pattern = re.compile(r"^https?://[^\s/$.?#].[^\s]*$")
11
+
12
+
13
+ class DaguHttpClient:
14
+ def __init__(self, dag_name: str, url_root: str) -> None:
15
+ self.dag_name = dag_name
16
+ self.url_root = url_root.strip().rstrip("/")
17
+ if not url_pattern.match(self.url_root):
18
+ raise ValueError(f"Invalid URL root: {self.url_root}")
19
+
20
+ def get_dag_spec(self) -> Dag:
21
+ """Fetch a DAG from the Dagu HTTP API by its ID."""
22
+ url = f"{self.url_root}/dags/{self.dag_name}/spec"
23
+ response = httpx.get(url)
24
+ response.raise_for_status()
25
+ dag_data = response.json()
26
+ dag_yaml = dag_data["spec"]
27
+ dag_dict = safe_load(dag_yaml)
28
+ return Dag.model_validate(dag_dict)
29
+
30
+ def post_dag(self, dag: Dag) -> None | DagResponseMessage:
31
+ """Post a DAG to the Dagu HTTP API"""
32
+ url = f"{self.url_root}/dags"
33
+
34
+ dagu_dict = dag.model_dump(exclude_unset=True, exclude_none=True)
35
+ dag_yaml = dump(dagu_dict, Dumper=Dumper)
36
+
37
+ body_json = {
38
+ "name": self.dag_name,
39
+ "spec": dag_yaml,
40
+ }
41
+ response = httpx.post(url, json=body_json)
42
+
43
+ if response.status_code in (400, 409):
44
+ return DagResponseMessage.model_validate(response.json())
45
+
46
+ response.raise_for_status()
47
+ return None
48
+
49
+ def delete_dag(self) -> None:
50
+ """Delete a DAG from the Dagu HTTP API by its name."""
51
+ url = f"{self.url_root}/dags/{self.dag_name}"
52
+ httpx.delete(url).raise_for_status()
53
+
54
+ def start_dag_run(
55
+ self, start_request: StartDagRun
56
+ ) -> DagRunId | DagResponseMessage:
57
+ """
58
+ Start a DAG run via the Dagu HTTP API.
59
+
60
+ Returns DagRunId unless StartDagRun.singleton is True and is already running,
61
+ in which case it returns DagResponseMessage.
62
+ """
63
+ url = f"{self.url_root}/dags/{self.dag_name}/start"
64
+ response = httpx.post(url, json=start_request.model_dump())
65
+ response.raise_for_status()
66
+
67
+ status_code = response.status_code
68
+ if status_code in (200, 409):
69
+ dag_run_data = response.json()
70
+ if status_code == 409:
71
+ return DagResponseMessage.model_validate(dag_run_data)
72
+ else:
73
+ return DagRunId.model_validate(dag_run_data)
74
+
75
+ response.raise_for_status()
76
+ raise httpx.HTTPError(f"Unexpected status code: {status_code}")
77
+
78
+ def get_dag_run_status(self, dag_run_id: str) -> DagRunResult:
79
+ """
80
+ Get the status of a DAG run via the Dagu HTTP API.
81
+
82
+ dag_run_id: The ID of the DAG run to fetch or "latest" for the most recent run.
83
+ """
84
+ url = f"{self.url_root}/dag-runs/{self.dag_name}/{dag_run_id}"
85
+ response = httpx.get(url)
86
+ response.raise_for_status()
87
+ dag_run_data = response.json()
88
+ return DagRunResult.model_validate(dag_run_data["dagRunDetails"])
@@ -0,0 +1,59 @@
1
+ """Pydantic models for Dagu DAG validation"""
2
+
3
+ from pydagu.models.dag import Dag
4
+ from pydagu.models.base import Precondition
5
+ from pydagu.models.step import (
6
+ Step,
7
+ RetryPolicy,
8
+ ContinueOn,
9
+ ParallelConfig,
10
+ )
11
+ from pydagu.models.executor import (
12
+ ExecutorConfig,
13
+ HTTPExecutorConfig,
14
+ SSHExecutorConfig,
15
+ MailExecutorConfig,
16
+ DockerExecutorConfig,
17
+ JQExecutorConfig,
18
+ ShellExecutorConfig,
19
+ )
20
+ from pydagu.models.handlers import HandlerConfig, HandlerOn
21
+ from pydagu.models.notifications import MailOn, SMTPConfig
22
+ from pydagu.models.infrastructure import ContainerConfig, SSHConfig, LogConfig
23
+ from pydagu.models.request import StartDagRun
24
+ from pydagu.models.response import DagRunId, DagResponseMessage, DagRunResult
25
+
26
+ __all__ = [
27
+ # Main DAG
28
+ "Dag",
29
+ # Step related
30
+ "Step",
31
+ "Precondition",
32
+ "RetryPolicy",
33
+ "ContinueOn",
34
+ "ParallelConfig",
35
+ # Executors
36
+ "ExecutorConfig",
37
+ "HTTPExecutorConfig",
38
+ "SSHExecutorConfig",
39
+ "MailExecutorConfig",
40
+ "DockerExecutorConfig",
41
+ "JQExecutorConfig",
42
+ "ShellExecutorConfig",
43
+ # Handlers
44
+ "HandlerConfig",
45
+ "HandlerOn",
46
+ # Notifications
47
+ "MailOn",
48
+ "SMTPConfig",
49
+ # Infrastructure
50
+ "ContainerConfig",
51
+ "SSHConfig",
52
+ "LogConfig",
53
+ # Requests
54
+ "StartDagRun",
55
+ # Responses
56
+ "DagRunId",
57
+ "DagResponseMessage",
58
+ "DagRunResult",
59
+ ]
pydagu/models/base.py ADDED
@@ -0,0 +1,12 @@
1
+ """Base models and common types"""
2
+
3
+ from pydantic import BaseModel, Field
4
+
5
+
6
+ class Precondition(BaseModel):
7
+ """Precondition that must be met before DAG execution"""
8
+
9
+ condition: str = Field(
10
+ examples=["`date +%u`", "test -f /data/ready.flag", "$STATUS"]
11
+ )
12
+ expected: str = Field(examples=["re:[1-5]", "0", "success"])