ddeutil-workflow 0.0.36__py3-none-any.whl → 0.0.37__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- __version__: str = "0.0.36"
1
+ __version__: str = "0.0.37"
@@ -27,7 +27,7 @@ from .repeat import repeat_at
27
27
  from .routes import job, log
28
28
 
29
29
  load_dotenv()
30
- logger = get_logger("ddeutil.workflow")
30
+ logger = get_logger("uvicorn.error")
31
31
 
32
32
 
33
33
  class State(TypedDict):
@@ -151,6 +151,7 @@ if config.enable_route_schedule:
151
151
  async def validation_exception_handler(
152
152
  request: Request, exc: RequestValidationError
153
153
  ):
154
+ _ = request
154
155
  return UJSONResponse(
155
156
  status_code=st.HTTP_422_UNPROCESSABLE_ENTITY,
156
157
  content=jsonable_encoder({"detail": exc.errors(), "body": exc.body}),
@@ -164,4 +165,5 @@ if __name__ == "__main__":
164
165
  app,
165
166
  host="0.0.0.0",
166
167
  port=80,
168
+ log_level="DEBUG",
167
169
  )
@@ -0,0 +1,59 @@
1
+ from ..conf import config
2
+
3
+ LOGGING_CONFIG = { # pragma: no cov
4
+ "version": 1,
5
+ "disable_existing_loggers": False,
6
+ "formatters": {
7
+ "standard": {
8
+ "format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s"
9
+ },
10
+ "custom_formatter": {
11
+ "format": config.log_format,
12
+ "datefmt": config.log_datetime_format,
13
+ },
14
+ },
15
+ "root": {
16
+ "level": "DEBUG" if config.debug else "INFO",
17
+ },
18
+ "handlers": {
19
+ "default": {
20
+ "formatter": "standard",
21
+ "class": "logging.StreamHandler",
22
+ "stream": "ext://sys.stderr",
23
+ },
24
+ "stream_handler": {
25
+ "formatter": "custom_formatter",
26
+ "class": "logging.StreamHandler",
27
+ "stream": "ext://sys.stdout",
28
+ },
29
+ "file_handler": {
30
+ "formatter": "custom_formatter",
31
+ "class": "logging.handlers.RotatingFileHandler",
32
+ "filename": "logs/app.log",
33
+ "maxBytes": 1024 * 1024 * 1,
34
+ "backupCount": 3,
35
+ },
36
+ },
37
+ "loggers": {
38
+ "uvicorn": {
39
+ "handlers": ["default", "file_handler"],
40
+ "level": "DEBUG" if config.debug else "INFO",
41
+ "propagate": False,
42
+ },
43
+ "uvicorn.access": {
44
+ "handlers": ["stream_handler", "file_handler"],
45
+ "level": "DEBUG" if config.debug else "INFO",
46
+ "propagate": False,
47
+ },
48
+ "uvicorn.error": {
49
+ "handlers": ["stream_handler", "file_handler"],
50
+ "level": "DEBUG" if config.debug else "INFO",
51
+ "propagate": False,
52
+ },
53
+ # "uvicorn.asgi": {
54
+ # "handlers": ["stream_handler", "file_handler"],
55
+ # "level": "TRACE",
56
+ # "propagate": False,
57
+ # },
58
+ },
59
+ }
@@ -15,7 +15,7 @@ from starlette.concurrency import run_in_threadpool
15
15
  from ..__cron import CronJob
16
16
  from ..conf import config, get_logger
17
17
 
18
- logger = get_logger("ddeutil.workflow")
18
+ logger = get_logger("uvicorn.error")
19
19
 
20
20
 
21
21
  def get_cronjob_delta(cron: str) -> float:
@@ -17,7 +17,7 @@ from ...exceptions import JobException
17
17
  from ...job import Job
18
18
  from ...result import Result
19
19
 
20
- logger = get_logger("ddeutil.workflow")
20
+ logger = get_logger("uvicorn.error")
21
21
 
22
22
 
23
23
  job_route = APIRouter(
@@ -7,6 +7,7 @@
7
7
  from __future__ import annotations
8
8
 
9
9
  from fastapi import APIRouter
10
+ from fastapi import status as st
10
11
  from fastapi.responses import UJSONResponse
11
12
 
12
13
  from ...audit import get_audit
@@ -14,47 +15,116 @@ from ...logs import get_trace_obj
14
15
 
15
16
  log_route = APIRouter(
16
17
  prefix="/logs",
17
- tags=["logs", "trace", "audit"],
18
+ tags=["logs"],
18
19
  default_response_class=UJSONResponse,
19
20
  )
20
21
 
21
22
 
22
- @log_route.get(path="/trace/")
23
+ @log_route.get(
24
+ path="/traces/",
25
+ response_class=UJSONResponse,
26
+ status_code=st.HTTP_200_OK,
27
+ summary="Read all trace logs.",
28
+ tags=["trace"],
29
+ )
23
30
  async def get_traces():
24
- """Get all trace logs."""
31
+ """Return all trace logs from the current trace log path that config with
32
+ `WORKFLOW_LOG_PATH` environment variable name.
33
+ """
25
34
  return {
26
35
  "message": "Getting trace logs",
27
- "traces": list(get_trace_obj().find_logs()),
36
+ "traces": [
37
+ trace.model_dump(
38
+ by_alias=True,
39
+ exclude_none=True,
40
+ exclude_unset=True,
41
+ exclude_defaults=True,
42
+ )
43
+ for trace in get_trace_obj().find_logs()
44
+ ],
28
45
  }
29
46
 
30
47
 
31
- @log_route.get(path="/trace/{run_id}")
48
+ @log_route.get(
49
+ path="/traces/{run_id}",
50
+ response_class=UJSONResponse,
51
+ status_code=st.HTTP_200_OK,
52
+ summary="Read trace log with specific running ID.",
53
+ tags=["trace"],
54
+ )
32
55
  async def get_trace_with_id(run_id: str):
33
- """Get trace log with specific running ID."""
34
- return get_trace_obj().find_log_with_id(run_id)
56
+ """Return trace log with specific running ID from the current trace log path
57
+ that config with `WORKFLOW_LOG_PATH` environment variable name.
58
+
59
+ - **run_id**: A running ID that want to search a trace log from the log
60
+ path.
61
+ """
62
+ return {
63
+ "message": f"Getting trace log with specific running ID: {run_id}",
64
+ "trace": (
65
+ get_trace_obj()
66
+ .find_log_with_id(run_id)
67
+ .model_dump(
68
+ by_alias=True,
69
+ exclude_none=True,
70
+ exclude_unset=True,
71
+ exclude_defaults=True,
72
+ )
73
+ ),
74
+ }
35
75
 
36
76
 
37
- @log_route.get(path="/audit/")
77
+ @log_route.get(
78
+ path="/audits/",
79
+ response_class=UJSONResponse,
80
+ status_code=st.HTTP_200_OK,
81
+ summary="Read all audit logs.",
82
+ tags=["audit"],
83
+ )
38
84
  async def get_audits():
39
- """Get all audit logs."""
85
+ """Return all audit logs from the current audit log path that config with
86
+ `WORKFLOW_AUDIT_PATH` environment variable name.
87
+ """
40
88
  return {
41
89
  "message": "Getting audit logs",
42
90
  "audits": list(get_audit().find_audits(name="demo")),
43
91
  }
44
92
 
45
93
 
46
- @log_route.get(path="/audit/{workflow}/")
94
+ @log_route.get(
95
+ path="/audits/{workflow}/",
96
+ response_class=UJSONResponse,
97
+ status_code=st.HTTP_200_OK,
98
+ summary="Read all audit logs with specific workflow name.",
99
+ tags=["audit"],
100
+ )
47
101
  async def get_audit_with_workflow(workflow: str):
48
- """Get all audit logs."""
102
+ """Return all audit logs with specific workflow name from the current audit
103
+ log path that config with `WORKFLOW_AUDIT_PATH` environment variable name.
104
+
105
+ - **workflow**: A specific workflow name that want to find audit logs.
106
+ """
49
107
  return {
50
108
  "message": f"Getting audit logs with workflow name {workflow}",
51
109
  "audits": list(get_audit().find_audits(name="demo")),
52
110
  }
53
111
 
54
112
 
55
- @log_route.get(path="/audit/{workflow}/{release}")
113
+ @log_route.get(
114
+ path="/audits/{workflow}/{release}",
115
+ response_class=UJSONResponse,
116
+ status_code=st.HTTP_200_OK,
117
+ summary="Read all audit logs with specific workflow name and release date.",
118
+ tags=["audit"],
119
+ )
56
120
  async def get_audit_with_workflow_release(workflow: str, release: str):
57
- """Get all audit logs."""
121
+ """Return all audit logs with specific workflow name and release date from
122
+ the current audit log path that config with `WORKFLOW_AUDIT_PATH`
123
+ environment variable name.
124
+
125
+ - **workflow**: A specific workflow name that want to find audit logs.
126
+ - **release**: A release date with a string format `%Y%m%d%H%M%S`.
127
+ """
58
128
  return {
59
129
  "message": (
60
130
  f"Getting audit logs with workflow name {workflow} and release "
@@ -62,3 +132,34 @@ async def get_audit_with_workflow_release(workflow: str, release: str):
62
132
  ),
63
133
  "audits": list(get_audit().find_audits(name="demo")),
64
134
  }
135
+
136
+
137
+ @log_route.get(
138
+ path="/audits/{workflow}/{release}/{run_id}",
139
+ response_class=UJSONResponse,
140
+ status_code=st.HTTP_200_OK,
141
+ summary=(
142
+ "Read all audit logs with specific workflow name, release date "
143
+ "and running ID."
144
+ ),
145
+ tags=["audit"],
146
+ )
147
+ async def get_audit_with_workflow_release_run_id(
148
+ workflow: str, release: str, run_id: str
149
+ ):
150
+ """Return all audit logs with specific workflow name and release date from
151
+ the current audit log path that config with `WORKFLOW_AUDIT_PATH`
152
+ environment variable name.
153
+
154
+ - **workflow**: A specific workflow name that want to find audit logs.
155
+ - **release**: A release date with a string format `%Y%m%d%H%M%S`.
156
+ - **run_id**: A running ID that want to search audit log from this release
157
+ date.
158
+ """
159
+ return {
160
+ "message": (
161
+ f"Getting audit logs with workflow name {workflow}, release "
162
+ f"{release}, and running ID {run_id}"
163
+ ),
164
+ "audits": list(get_audit().find_audits(name="demo")),
165
+ }
@@ -15,7 +15,7 @@ from fastapi.responses import UJSONResponse
15
15
  from ...conf import config, get_logger
16
16
  from ...scheduler import Schedule
17
17
 
18
- logger = get_logger("ddeutil.workflow")
18
+ logger = get_logger("uvicorn.error")
19
19
 
20
20
  schedule_route = APIRouter(
21
21
  prefix="/schedules",
@@ -24,7 +24,7 @@ schedule_route = APIRouter(
24
24
  )
25
25
 
26
26
 
27
- @schedule_route.get(path="/{name}")
27
+ @schedule_route.get(path="/{name}", status_code=st.HTTP_200_OK)
28
28
  async def get_schedules(name: str):
29
29
  """Get schedule object."""
30
30
  try:
@@ -42,13 +42,13 @@ async def get_schedules(name: str):
42
42
  )
43
43
 
44
44
 
45
- @schedule_route.get(path="/deploy/")
45
+ @schedule_route.get(path="/deploy/", status_code=st.HTTP_200_OK)
46
46
  async def get_deploy_schedulers(request: Request):
47
47
  snapshot = copy.deepcopy(request.state.scheduler)
48
48
  return {"schedule": snapshot}
49
49
 
50
50
 
51
- @schedule_route.get(path="/deploy/{name}")
51
+ @schedule_route.get(path="/deploy/{name}", status_code=st.HTTP_200_OK)
52
52
  async def get_deploy_scheduler(request: Request, name: str):
53
53
  if name in request.state.scheduler:
54
54
  schedule = Schedule.from_loader(name)
@@ -76,7 +76,7 @@ async def get_deploy_scheduler(request: Request, name: str):
76
76
  )
77
77
 
78
78
 
79
- @schedule_route.post(path="/deploy/{name}")
79
+ @schedule_route.post(path="/deploy/{name}", status_code=st.HTTP_202_ACCEPTED)
80
80
  async def add_deploy_scheduler(request: Request, name: str):
81
81
  """Adding schedule name to application state store."""
82
82
  if name in request.state.scheduler:
@@ -116,7 +116,7 @@ async def add_deploy_scheduler(request: Request, name: str):
116
116
  }
117
117
 
118
118
 
119
- @schedule_route.delete(path="/deploy/{name}")
119
+ @schedule_route.delete(path="/deploy/{name}", status_code=st.HTTP_202_ACCEPTED)
120
120
  async def del_deploy_scheduler(request: Request, name: str):
121
121
  """Delete workflow task on the schedule listener."""
122
122
  if name in request.state.scheduler:
@@ -20,7 +20,7 @@ from ...conf import Loader, get_logger
20
20
  from ...result import Result
21
21
  from ...workflow import Workflow
22
22
 
23
- logger = get_logger("ddeutil.workflow")
23
+ logger = get_logger("uvicorn.error")
24
24
 
25
25
  workflow_route = APIRouter(
26
26
  prefix="/workflows",
@@ -29,7 +29,7 @@ workflow_route = APIRouter(
29
29
  )
30
30
 
31
31
 
32
- @workflow_route.get(path="/")
32
+ @workflow_route.get(path="/", status_code=st.HTTP_200_OK)
33
33
  async def get_workflows() -> DictData:
34
34
  """Return all workflow workflows that exists in config path."""
35
35
  workflows: DictData = dict(Loader.finds(Workflow))
@@ -40,7 +40,7 @@ async def get_workflows() -> DictData:
40
40
  }
41
41
 
42
42
 
43
- @workflow_route.get(path="/{name}")
43
+ @workflow_route.get(path="/{name}", status_code=st.HTTP_200_OK)
44
44
  async def get_workflow_by_name(name: str) -> DictData:
45
45
  """Return model of workflow that passing an input workflow name."""
46
46
  try:
@@ -66,7 +66,7 @@ class ExecutePayload(BaseModel):
66
66
 
67
67
 
68
68
  @workflow_route.post(path="/{name}/execute", status_code=st.HTTP_202_ACCEPTED)
69
- async def execute_workflow(name: str, payload: ExecutePayload) -> DictData:
69
+ async def workflow_execute(name: str, payload: ExecutePayload) -> DictData:
70
70
  """Return model of workflow that passing an input workflow name."""
71
71
  try:
72
72
  workflow: Workflow = Workflow.from_loader(name=name, externals={})
@@ -90,7 +90,7 @@ async def execute_workflow(name: str, payload: ExecutePayload) -> DictData:
90
90
  return asdict(result)
91
91
 
92
92
 
93
- @workflow_route.get(path="/{name}/audits")
93
+ @workflow_route.get(path="/{name}/audits", status_code=st.HTTP_200_OK)
94
94
  async def get_workflow_audits(name: str):
95
95
  try:
96
96
  return {
@@ -112,11 +112,13 @@ async def get_workflow_audits(name: str):
112
112
  ) from None
113
113
 
114
114
 
115
- @workflow_route.get(path="/{name}/audits/{release}")
115
+ @workflow_route.get(path="/{name}/audits/{release}", status_code=st.HTTP_200_OK)
116
116
  async def get_workflow_release_audit(name: str, release: str):
117
+ """Get Workflow audit log with an input release value."""
117
118
  try:
118
119
  audit: Audit = get_audit().find_audit_with_release(
119
- name=name, release=datetime.strptime(release, "%Y%m%d%H%M%S")
120
+ name=name,
121
+ release=datetime.strptime(release, "%Y%m%d%H%M%S"),
120
122
  )
121
123
  except FileNotFoundError:
122
124
  raise HTTPException(
@@ -91,7 +91,9 @@ def make_registry(submodule: str) -> dict[str, Registry]:
91
91
  for fstr, func in inspect.getmembers(importer, inspect.isfunction):
92
92
  # NOTE: check function attribute that already set tag by
93
93
  # ``utils.tag`` decorator.
94
- if not (hasattr(func, "tag") and hasattr(func, "name")):
94
+ if not (
95
+ hasattr(func, "tag") and hasattr(func, "name")
96
+ ): # pragma: no cov
95
97
  continue
96
98
 
97
99
  # NOTE: Define type of the func value.
ddeutil/workflow/conf.py CHANGED
@@ -31,7 +31,6 @@ def glob_files(path: Path) -> Iterator[Path]: # pragma: no cov
31
31
 
32
32
 
33
33
  __all__: TupleStr = (
34
- "LOGGING_CONFIG",
35
34
  "env",
36
35
  "get_logger",
37
36
  "Config",
@@ -422,62 +421,3 @@ def get_logger(name: str):
422
421
 
423
422
  logger.setLevel(logging.DEBUG if config.debug else logging.INFO)
424
423
  return logger
425
-
426
-
427
- LOGGING_CONFIG = { # pragma: no cov
428
- "version": 1,
429
- "disable_existing_loggers": False,
430
- "formatters": {
431
- "standard": {
432
- "format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s"
433
- },
434
- "custom_formatter": {
435
- "format": config.log_format,
436
- "datefmt": config.log_datetime_format,
437
- },
438
- },
439
- "root": {
440
- "level": "DEBUG" if config.debug else "INFO",
441
- },
442
- "handlers": {
443
- "default": {
444
- "formatter": "standard",
445
- "class": "logging.StreamHandler",
446
- "stream": "ext://sys.stderr",
447
- },
448
- "stream_handler": {
449
- "formatter": "custom_formatter",
450
- "class": "logging.StreamHandler",
451
- "stream": "ext://sys.stdout",
452
- },
453
- "file_handler": {
454
- "formatter": "custom_formatter",
455
- "class": "logging.handlers.RotatingFileHandler",
456
- "filename": "logs/app.log",
457
- "maxBytes": 1024 * 1024 * 1,
458
- "backupCount": 3,
459
- },
460
- },
461
- "loggers": {
462
- "uvicorn": {
463
- "handlers": ["default", "file_handler"],
464
- "level": "DEBUG" if config.debug else "INFO",
465
- "propagate": False,
466
- },
467
- "uvicorn.access": {
468
- "handlers": ["stream_handler", "file_handler"],
469
- "level": "DEBUG" if config.debug else "INFO",
470
- "propagate": False,
471
- },
472
- "uvicorn.error": {
473
- "handlers": ["stream_handler", "file_handler"],
474
- "level": "DEBUG" if config.debug else "INFO",
475
- "propagate": False,
476
- },
477
- # "uvicorn.asgi": {
478
- # "handlers": ["stream_handler", "file_handler"],
479
- # "level": "TRACE",
480
- # "propagate": False,
481
- # },
482
- },
483
- }
ddeutil/workflow/job.py CHANGED
@@ -60,6 +60,8 @@ __all__: TupleStr = (
60
60
  "RunsOnSelfHosted",
61
61
  "RunsOnK8s",
62
62
  "make",
63
+ "local_execute_strategy",
64
+ "local_execute",
63
65
  )
64
66
 
65
67
 
@@ -228,6 +230,10 @@ class RunsOnType(str, Enum):
228
230
 
229
231
 
230
232
  class BaseRunsOn(BaseModel): # pragma: no cov
233
+ """Base Runs-On Model for generate runs-on types via inherit this model
234
+ object and override execute method.
235
+ """
236
+
231
237
  model_config = ConfigDict(use_enum_values=True)
232
238
 
233
239
  type: Literal[RunsOnType.LOCAL]
@@ -243,12 +249,17 @@ class RunsOnLocal(BaseRunsOn): # pragma: no cov
243
249
  type: Literal[RunsOnType.LOCAL] = Field(default=RunsOnType.LOCAL)
244
250
 
245
251
 
252
+ class SelfHostedArgs(BaseModel):
253
+ host: str
254
+
255
+
246
256
  class RunsOnSelfHosted(BaseRunsOn): # pragma: no cov
247
257
  """Runs-on self-hosted."""
248
258
 
249
259
  type: Literal[RunsOnType.SELF_HOSTED] = Field(
250
260
  default=RunsOnType.SELF_HOSTED
251
261
  )
262
+ args: SelfHostedArgs = Field(alias="with")
252
263
 
253
264
 
254
265
  class RunsOnK8s(BaseRunsOn): # pragma: no cov
@@ -454,271 +465,314 @@ class Job(BaseModel):
454
465
  )
455
466
  return to
456
467
 
457
- def execute_strategy(
468
+ def execute(
458
469
  self,
459
- strategy: DictData,
460
470
  params: DictData,
461
471
  *,
472
+ run_id: str | None = None,
473
+ parent_run_id: str | None = None,
462
474
  result: Result | None = None,
463
- event: Event | None = None,
464
475
  ) -> Result:
465
- """Job Strategy execution with passing dynamic parameters from the
466
- workflow execution to strategy matrix.
467
-
468
- This execution is the minimum level of execution of this job model.
469
- It different with `self.execute` because this method run only one
470
- strategy and return with context of this strategy data.
471
-
472
- The result of this execution will return result with strategy ID
473
- that generated from the `gen_id` function with an input strategy value.
474
-
475
- :raise JobException: If it has any error from `StageException` or
476
- `UtilException`.
476
+ """Job execution with passing dynamic parameters from the workflow
477
+ execution. It will generate matrix values at the first step and run
478
+ multithread on this metrics to the `stages` field of this job.
477
479
 
478
- :param strategy: A strategy metrix value that use on this execution.
479
- This value will pass to the `matrix` key for templating.
480
- :param params: A dynamic parameters that will deepcopy to the context.
480
+ :param params: An input parameters that use on job execution.
481
+ :param run_id: A job running ID for this execution.
482
+ :param parent_run_id: A parent workflow running ID for this release.
481
483
  :param result: (Result) A result object for keeping context and status
482
484
  data.
483
- :param event: An event manager that pass to the PoolThreadExecutor.
484
485
 
485
486
  :rtype: Result
486
487
  """
487
488
  if result is None: # pragma: no cov
488
- result: Result = Result(run_id=gen_id(self.id or "", unique=True))
489
-
490
- strategy_id: str = gen_id(strategy)
489
+ result: Result = Result(
490
+ run_id=(run_id or gen_id(self.id or "", unique=True)),
491
+ parent_run_id=parent_run_id,
492
+ )
493
+ elif parent_run_id: # pragma: no cov
494
+ result.set_parent_run_id(parent_run_id)
491
495
 
492
- # PARAGRAPH:
493
- #
494
- # Create strategy execution context and update a matrix and copied
495
- # of params. So, the context value will have structure like;
496
- #
497
- # {
498
- # "params": { ... }, <== Current input params
499
- # "jobs": { ... }, <== Current input params
500
- # "matrix": { ... } <== Current strategy value
501
- # "stages": { ... } <== Catching stage outputs
502
- # }
503
- #
504
- context: DictData = copy.deepcopy(params)
505
- context.update({"matrix": strategy, "stages": {}})
496
+ if self.runs_on.type == RunsOnType.LOCAL:
497
+ return local_execute(
498
+ job=self,
499
+ params=params,
500
+ result=result,
501
+ )
502
+ raise NotImplementedError(
503
+ f"The job runs-on other type: {self.runs_on.type} does not "
504
+ f"support yet."
505
+ )
506
506
 
507
- # IMPORTANT: The stage execution only run sequentially one-by-one.
508
- for stage in self.stages:
509
507
 
510
- if stage.is_skipped(params=context):
511
- result.trace.info(f"[JOB]: Skip stage: {stage.iden!r}")
512
- continue
508
+ def local_execute_strategy(
509
+ job: Job,
510
+ strategy: DictData,
511
+ params: DictData,
512
+ *,
513
+ result: Result | None = None,
514
+ event: Event | None = None,
515
+ ) -> Result:
516
+ """Local job strategy execution with passing dynamic parameters from the
517
+ workflow execution to strategy matrix.
518
+
519
+ This execution is the minimum level of execution of this job model.
520
+ It different with `self.execute` because this method run only one
521
+ strategy and return with context of this strategy data.
522
+
523
+ The result of this execution will return result with strategy ID
524
+ that generated from the `gen_id` function with an input strategy value.
525
+
526
+ :raise JobException: If it has any error from `StageException` or
527
+ `UtilException`.
528
+
529
+ :param job: (Job) A job model that want to execute.
530
+ :param strategy: A strategy metrix value that use on this execution.
531
+ This value will pass to the `matrix` key for templating.
532
+ :param params: A dynamic parameters that will deepcopy to the context.
533
+ :param result: (Result) A result object for keeping context and status
534
+ data.
535
+ :param event: (Event) An event manager that pass to the PoolThreadExecutor.
536
+
537
+ :rtype: Result
538
+ """
539
+ if result is None: # pragma: no cov
540
+ result: Result = Result(run_id=gen_id(job.id or "", unique=True))
541
+
542
+ strategy_id: str = gen_id(strategy)
543
+
544
+ # PARAGRAPH:
545
+ #
546
+ # Create strategy execution context and update a matrix and copied
547
+ # of params. So, the context value will have structure like;
548
+ #
549
+ # {
550
+ # "params": { ... }, <== Current input params
551
+ # "jobs": { ... }, <== Current input params
552
+ # "matrix": { ... } <== Current strategy value
553
+ # "stages": { ... } <== Catching stage outputs
554
+ # }
555
+ #
556
+ context: DictData = copy.deepcopy(params)
557
+ context.update({"matrix": strategy, "stages": {}})
558
+
559
+ # IMPORTANT: The stage execution only run sequentially one-by-one.
560
+ for stage in job.stages:
561
+
562
+ if stage.is_skipped(params=context):
563
+ result.trace.info(f"[JOB]: Skip stage: {stage.iden!r}")
564
+ continue
513
565
 
514
- result.trace.info(f"[JOB]: Execute stage: {stage.iden!r}")
566
+ result.trace.info(f"[JOB]: Execute stage: {stage.iden!r}")
515
567
 
516
- # NOTE: Logging a matrix that pass on this stage execution.
517
- if strategy:
518
- result.trace.info(f"[JOB]: ... Matrix: {strategy}")
568
+ # NOTE: Logging a matrix that pass on this stage execution.
569
+ if strategy:
570
+ result.trace.info(f"[JOB]: ... Matrix: {strategy}")
519
571
 
520
- # NOTE: Force stop this execution if event was set from main
521
- # execution.
522
- if event and event.is_set():
523
- error_msg: str = (
524
- "Job strategy was canceled from event that had set before "
525
- "strategy execution."
526
- )
527
- return result.catch(
528
- status=1,
529
- context={
530
- strategy_id: {
531
- "matrix": strategy,
532
- # NOTE: If job strategy executor use multithreading,
533
- # it will not filter function object from context.
534
- # ---
535
- # "stages": filter_func(context.pop("stages", {})),
536
- #
537
- "stages": context.pop("stages", {}),
538
- "errors": {
539
- "class": JobException(error_msg),
540
- "name": "JobException",
541
- "message": error_msg,
542
- },
572
+ # NOTE: Force stop this execution if event was set from main
573
+ # execution.
574
+ if event and event.is_set():
575
+ error_msg: str = (
576
+ "Job strategy was canceled from event that had set before "
577
+ "strategy execution."
578
+ )
579
+ return result.catch(
580
+ status=1,
581
+ context={
582
+ strategy_id: {
583
+ "matrix": strategy,
584
+ # NOTE: If job strategy executor use multithreading,
585
+ # it will not filter function object from context.
586
+ # ---
587
+ # "stages": filter_func(context.pop("stages", {})),
588
+ #
589
+ "stages": context.pop("stages", {}),
590
+ "errors": {
591
+ "class": JobException(error_msg),
592
+ "name": "JobException",
593
+ "message": error_msg,
543
594
  },
544
595
  },
545
- )
596
+ },
597
+ )
546
598
 
547
- # PARAGRAPH:
548
- #
549
- # I do not use below syntax because `params` dict be the
550
- # reference memory pointer, and it was changed when I action
551
- # anything like update or re-construct this.
552
- #
553
- # ... params |= stage.execute(params=params)
554
- #
555
- # This step will add the stage result to `stages` key in
556
- # that stage id. It will have structure like;
557
- #
558
- # {
559
- # "params": { ... },
560
- # "jobs": { ... },
561
- # "matrix": { ... },
562
- # "stages": { { "stage-id-1": ... }, ... }
563
- # }
564
- #
565
- # IMPORTANT:
566
- # This execution change all stage running IDs to the current job
567
- # running ID, but it still trac log to the same parent running ID
568
- # (with passing `run_id` and `parent_run_id` to the stage
569
- # execution arguments).
570
- #
571
- try:
572
- stage.set_outputs(
573
- stage.handler_execute(
574
- params=context,
575
- run_id=result.run_id,
576
- parent_run_id=result.parent_run_id,
577
- ).context,
578
- to=context,
579
- )
580
- except (StageException, UtilException) as err:
581
- result.trace.error(f"[JOB]: {err.__class__.__name__}: {err}")
582
- if config.job_raise_error:
583
- raise JobException(
584
- f"Stage execution error: {err.__class__.__name__}: "
585
- f"{err}"
586
- ) from None
587
-
588
- return result.catch(
589
- status=1,
590
- context={
591
- strategy_id: {
592
- "matrix": strategy,
593
- "stages": context.pop("stages", {}),
594
- "errors": {
595
- "class": err,
596
- "name": err.__class__.__name__,
597
- "message": f"{err.__class__.__name__}: {err}",
598
- },
599
+ # PARAGRAPH:
600
+ #
601
+ # I do not use below syntax because `params` dict be the
602
+ # reference memory pointer, and it was changed when I action
603
+ # anything like update or re-construct this.
604
+ #
605
+ # ... params |= stage.execute(params=params)
606
+ #
607
+ # This step will add the stage result to `stages` key in
608
+ # that stage id. It will have structure like;
609
+ #
610
+ # {
611
+ # "params": { ... },
612
+ # "jobs": { ... },
613
+ # "matrix": { ... },
614
+ # "stages": { { "stage-id-1": ... }, ... }
615
+ # }
616
+ #
617
+ # IMPORTANT:
618
+ # This execution change all stage running IDs to the current job
619
+ # running ID, but it still trac log to the same parent running ID
620
+ # (with passing `run_id` and `parent_run_id` to the stage
621
+ # execution arguments).
622
+ #
623
+ try:
624
+ stage.set_outputs(
625
+ stage.handler_execute(
626
+ params=context,
627
+ run_id=result.run_id,
628
+ parent_run_id=result.parent_run_id,
629
+ ).context,
630
+ to=context,
631
+ )
632
+ except (StageException, UtilException) as err:
633
+ result.trace.error(f"[JOB]: {err.__class__.__name__}: {err}")
634
+ if config.job_raise_error:
635
+ raise JobException(
636
+ f"Stage execution error: {err.__class__.__name__}: "
637
+ f"{err}"
638
+ ) from None
639
+
640
+ return result.catch(
641
+ status=1,
642
+ context={
643
+ strategy_id: {
644
+ "matrix": strategy,
645
+ "stages": context.pop("stages", {}),
646
+ "errors": {
647
+ "class": err,
648
+ "name": err.__class__.__name__,
649
+ "message": f"{err.__class__.__name__}: {err}",
599
650
  },
600
651
  },
601
- )
652
+ },
653
+ )
602
654
 
603
- # NOTE: Remove the current stage object for saving memory.
604
- del stage
655
+ # NOTE: Remove the current stage object for saving memory.
656
+ del stage
605
657
 
606
- return result.catch(
607
- status=Status.SUCCESS,
608
- context={
609
- strategy_id: {
610
- "matrix": strategy,
611
- "stages": filter_func(context.pop("stages", {})),
612
- },
658
+ return result.catch(
659
+ status=Status.SUCCESS,
660
+ context={
661
+ strategy_id: {
662
+ "matrix": strategy,
663
+ "stages": filter_func(context.pop("stages", {})),
613
664
  },
614
- )
615
-
616
- def execute(
617
- self,
618
- params: DictData,
619
- *,
620
- run_id: str | None = None,
621
- parent_run_id: str | None = None,
622
- result: Result | None = None,
623
- ) -> Result:
624
- """Job execution with passing dynamic parameters from the workflow
625
- execution. It will generate matrix values at the first step and run
626
- multithread on this metrics to the `stages` field of this job.
665
+ },
666
+ )
627
667
 
628
- :param params: An input parameters that use on job execution.
629
- :param run_id: A job running ID for this execution.
630
- :param parent_run_id: A parent workflow running ID for this release.
631
- :param result: (Result) A result object for keeping context and status
632
- data.
633
668
 
634
- :rtype: Result
635
- """
636
- if result is None: # pragma: no cov
637
- result: Result = Result(
638
- run_id=(run_id or gen_id(self.id or "", unique=True)),
639
- parent_run_id=parent_run_id,
669
+ def local_execute(
670
+ job: Job,
671
+ params: DictData,
672
+ *,
673
+ run_id: str | None = None,
674
+ parent_run_id: str | None = None,
675
+ result: Result | None = None,
676
+ ) -> Result:
677
+ """Local job execution with passing dynamic parameters from the workflow
678
+ execution. It will generate matrix values at the first step and run
679
+ multithread on this metrics to the `stages` field of this job.
680
+
681
+ :param job: A job model that want to execute.
682
+ :param params: An input parameters that use on job execution.
683
+ :param run_id: A job running ID for this execution.
684
+ :param parent_run_id: A parent workflow running ID for this release.
685
+ :param result: (Result) A result object for keeping context and status
686
+ data.
687
+
688
+ :rtype: Result
689
+ """
690
+ if result is None: # pragma: no cov
691
+ result: Result = Result(
692
+ run_id=(run_id or gen_id(job.id or "", unique=True)),
693
+ parent_run_id=parent_run_id,
694
+ )
695
+ elif parent_run_id: # pragma: no cov
696
+ result.set_parent_run_id(parent_run_id)
697
+
698
+ # NOTE: Normal Job execution without parallel strategy matrix. It uses
699
+ # for-loop to control strategy execution sequentially.
700
+ if (not job.strategy.is_set()) or job.strategy.max_parallel == 1:
701
+
702
+ for strategy in job.strategy.make():
703
+ result: Result = local_execute_strategy(
704
+ job=job,
705
+ strategy=strategy,
706
+ params=params,
707
+ result=result,
640
708
  )
641
- elif parent_run_id: # pragma: no cov
642
- result.set_parent_run_id(parent_run_id)
643
709
 
644
- # NOTE: Normal Job execution without parallel strategy matrix. It uses
645
- # for-loop to control strategy execution sequentially.
646
- if (not self.strategy.is_set()) or self.strategy.max_parallel == 1:
710
+ return result.catch(status=Status.SUCCESS)
711
+
712
+ # NOTE: Create event for cancel executor by trigger stop running event.
713
+ event: Event = Event()
714
+
715
+ # IMPORTANT: Start running strategy execution by multithreading because
716
+ # it will run by strategy values without waiting previous execution.
717
+ with ThreadPoolExecutor(
718
+ max_workers=job.strategy.max_parallel,
719
+ thread_name_prefix="job_strategy_exec_",
720
+ ) as executor:
721
+
722
+ futures: list[Future] = [
723
+ executor.submit(
724
+ local_execute_strategy,
725
+ job=job,
726
+ strategy=strategy,
727
+ params=params,
728
+ result=result,
729
+ event=event,
730
+ )
731
+ for strategy in job.strategy.make()
732
+ ]
733
+
734
+ context: DictData = {}
735
+ status: Status = Status.SUCCESS
736
+ fail_fast_flag: bool = job.strategy.fail_fast
737
+
738
+ if not fail_fast_flag:
739
+ done = as_completed(futures, timeout=1800)
740
+ else:
741
+ # NOTE: Get results from a collection of tasks with a timeout
742
+ # that has the first exception.
743
+ done, not_done = wait(
744
+ futures, timeout=1800, return_when=FIRST_EXCEPTION
745
+ )
746
+ nd: str = (
747
+ f", the strategies do not run is {not_done}" if not_done else ""
748
+ )
749
+ result.trace.debug(f"[JOB]: Strategy is set Fail Fast{nd}")
647
750
 
648
- for strategy in self.strategy.make():
649
- result: Result = self.execute_strategy(
650
- strategy=strategy,
651
- params=params,
652
- result=result,
653
- )
751
+ # NOTE: Stop all running tasks with setting the event manager
752
+ # and cancel any scheduled tasks.
753
+ if len(done) != len(futures):
754
+ event.set()
755
+ for future in not_done:
756
+ future.cancel()
654
757
 
655
- return result.catch(status=Status.SUCCESS)
656
-
657
- # NOTE: Create event for cancel executor by trigger stop running event.
658
- event: Event = Event()
659
-
660
- # IMPORTANT: Start running strategy execution by multithreading because
661
- # it will run by strategy values without waiting previous execution.
662
- with ThreadPoolExecutor(
663
- max_workers=self.strategy.max_parallel,
664
- thread_name_prefix="job_strategy_exec_",
665
- ) as executor:
666
-
667
- futures: list[Future] = [
668
- executor.submit(
669
- self.execute_strategy,
670
- strategy=strategy,
671
- params=params,
672
- result=result,
673
- event=event,
674
- )
675
- for strategy in self.strategy.make()
676
- ]
677
-
678
- context: DictData = {}
679
- status: Status = Status.SUCCESS
680
- fail_fast_flag: bool = self.strategy.fail_fast
681
-
682
- if fail_fast_flag:
683
- # NOTE: Get results from a collection of tasks with a timeout
684
- # that has the first exception.
685
- done, not_done = wait(
686
- futures, timeout=1800, return_when=FIRST_EXCEPTION
687
- )
688
- nd: str = (
689
- f", the strategies do not run is {not_done}"
690
- if not_done
691
- else ""
758
+ for future in done:
759
+ try:
760
+ future.result()
761
+ except JobException as err:
762
+ status = Status.FAILED
763
+ ls: str = "Fail-Fast" if fail_fast_flag else "All-Completed"
764
+ result.trace.error(
765
+ f"[JOB]: {ls} Catch:\n\t{err.__class__.__name__}:"
766
+ f"\n\t{err}"
692
767
  )
693
- result.trace.debug(f"[JOB]: Strategy is set Fail Fast{nd}")
694
-
695
- # NOTE: Stop all running tasks with setting the event manager
696
- # and cancel any scheduled tasks.
697
- if len(done) != len(futures):
698
- event.set()
699
- for future in not_done:
700
- future.cancel()
701
- else:
702
- done = as_completed(futures, timeout=1800)
703
-
704
- for future in done:
705
- try:
706
- future.result()
707
- except JobException as err:
708
- status = Status.FAILED
709
- ls: str = "Fail-Fast" if fail_fast_flag else "All-Completed"
710
- result.trace.error(
711
- f"[JOB]: {ls} Catch:\n\t{err.__class__.__name__}:"
712
- f"\n\t{err}"
713
- )
714
- context.update(
715
- {
716
- "errors": {
717
- "class": err,
718
- "name": err.__class__.__name__,
719
- "message": f"{err.__class__.__name__}: {err}",
720
- },
768
+ context.update(
769
+ {
770
+ "errors": {
771
+ "class": err,
772
+ "name": err.__class__.__name__,
773
+ "message": f"{err.__class__.__name__}: {err}",
721
774
  },
722
- )
775
+ },
776
+ )
723
777
 
724
- return result.catch(status=status, context=context)
778
+ return result.catch(status=status, context=context)
@@ -83,6 +83,11 @@ class ScheduleWorkflow(BaseModel):
83
83
  the Schedule model. it should not use Workflow model directly because on the
84
84
  schedule config it can adjust crontab value that different from the Workflow
85
85
  model.
86
+
87
+ This on field does not equal to the on field of Workflow model, but it
88
+ uses same logic to generate running release date with crontab object. It use
89
+ for override the on field if the schedule time was change but you do not
90
+ want to change on the workflow model.
86
91
  """
87
92
 
88
93
  alias: Optional[str] = Field(
@@ -97,7 +102,7 @@ class ScheduleWorkflow(BaseModel):
97
102
  values: DictData = Field(
98
103
  default_factory=dict,
99
104
  description=(
100
- "A value that want to pass to the workflow parameters when "
105
+ "A value that want to pass to the workflow params field when auto "
101
106
  "calling release method."
102
107
  ),
103
108
  alias="params",
@@ -222,8 +227,8 @@ class ScheduleWorkflow(BaseModel):
222
227
  class Schedule(BaseModel):
223
228
  """Schedule Pydantic model that use to run with any scheduler package.
224
229
 
225
- It does not equal the on value in Workflow model, but it uses same logic
226
- to running release date with crontab interval.
230
+ The workflows field of this model include ScheduleWorkflow objects that
231
+ enhance the workflow object by adding the alias and values fields.
227
232
  """
228
233
 
229
234
  desc: Optional[str] = Field(
@@ -477,7 +482,9 @@ def schedule_task(
477
482
  current_release: datetime = current_date.replace(
478
483
  second=0, microsecond=0
479
484
  )
480
- if (first_date := q.first_queue.date) > current_release:
485
+ if (
486
+ first_date := q.first_queue.date
487
+ ) > current_release: # pragma: no cov
481
488
  result.trace.debug(
482
489
  f"[WORKFLOW]: Skip schedule "
483
490
  f"{first_date:%Y-%m-%d %H:%M:%S} for : {task.alias!r}"
@@ -800,8 +800,12 @@ class HookStage(BaseStage): # pragma: no cov
800
800
 
801
801
  # TODO: Not implement this stages yet
802
802
  class DockerStage(BaseStage): # pragma: no cov
803
+ """Docker container stage execution."""
804
+
803
805
  image: str
804
806
  env: DictData = Field(default_factory=dict)
807
+ volume: DictData = Field(default_factory=dict)
808
+ auth: DictData = Field(default_factory=dict)
805
809
 
806
810
  def execute(
807
811
  self, params: DictData, *, result: Result | None = None
ddeutil/workflow/utils.py CHANGED
@@ -32,8 +32,10 @@ def get_dt_now(
32
32
  ) -> datetime: # pragma: no cov
33
33
  """Return the current datetime object.
34
34
 
35
- :param tz:
36
- :param offset:
35
+ :param tz: A ZoneInfo object for replace timezone of return datetime object.
36
+ :param offset: An offset second value.
37
+
38
+ :rtype: datetime
37
39
  :return: The current datetime object that use an input timezone or UTC.
38
40
  """
39
41
  return datetime.now(tz=(tz or UTC)) - timedelta(seconds=offset)
@@ -42,6 +44,14 @@ def get_dt_now(
42
44
  def get_d_now(
43
45
  tz: ZoneInfo | None = None, offset: float = 0.0
44
46
  ) -> date: # pragma: no cov
47
+ """Return the current date object.
48
+
49
+ :param tz: A ZoneInfo object for replace timezone of return date object.
50
+ :param offset: An offset second value.
51
+
52
+ :rtype: date
53
+ :return: The current date object that use an input timezone or UTC.
54
+ """
45
55
  return (datetime.now(tz=(tz or UTC)) - timedelta(seconds=offset)).date()
46
56
 
47
57
 
@@ -52,8 +62,10 @@ def get_diff_sec(
52
62
  current datetime with specific timezone.
53
63
 
54
64
  :param dt:
55
- :param tz:
56
- :param offset:
65
+ :param tz: A ZoneInfo object for replace timezone of return datetime object.
66
+ :param offset: An offset second value.
67
+
68
+ :rtype: int
57
69
  """
58
70
  return round(
59
71
  (
@@ -67,6 +79,10 @@ def reach_next_minute(
67
79
  ) -> bool:
68
80
  """Check this datetime object is not in range of minute level on the current
69
81
  datetime.
82
+
83
+ :param dt:
84
+ :param tz: A ZoneInfo object for replace timezone of return datetime object.
85
+ :param offset: An offset second value.
70
86
  """
71
87
  diff: float = (
72
88
  dt.replace(second=0, microsecond=0)
@@ -128,13 +144,14 @@ def gen_id(
128
144
  value: str = str(value)
129
145
 
130
146
  if config.gen_id_simple_mode:
131
- return hash_str(f"{(value if sensitive else value.lower())}", n=10) + (
132
- f"{datetime.now(tz=config.tz):%Y%m%d%H%M%S%f}" if unique else ""
133
- )
147
+ return (
148
+ f"{datetime.now(tz=config.tz):%Y%m%d%H%M%S%f}T" if unique else ""
149
+ ) + hash_str(f"{(value if sensitive else value.lower())}", n=10)
150
+
134
151
  return md5(
135
152
  (
136
- f"{(value if sensitive else value.lower())}"
137
- + (f"{datetime.now(tz=config.tz):%Y%m%d%H%M%S%f}" if unique else "")
153
+ (f"{datetime.now(tz=config.tz):%Y%m%d%H%M%S%f}T" if unique else "")
154
+ + f"{(value if sensitive else value.lower())}"
138
155
  ).encode()
139
156
  ).hexdigest()
140
157
 
@@ -179,9 +196,13 @@ def dash2underscore(
179
196
  ) -> DictData:
180
197
  """Change key name that has dash to underscore.
181
198
 
182
- :param key
183
- :param values
184
- :param fixed
199
+ :param key:
200
+ :param values:
201
+ :param fixed:
202
+
203
+ Examples:
204
+ >>> dash2underscore('foo-bar', {"foo-bar": "demo"})
205
+ {'foo_bar': 'demo'}
185
206
 
186
207
  :rtype: DictData
187
208
  """
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.36
3
+ Version: 0.0.37
4
4
  Summary: Lightweight workflow orchestration
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
@@ -61,10 +61,10 @@ configuration. It called **Metadata Driven Data Workflow**.
61
61
 
62
62
  **:pushpin: <u>Rules of This Workflow engine</u>**:
63
63
 
64
- 1. The Minimum frequency unit of scheduling is **1 minute** :warning:
65
- 2. Can not re-run only failed stage and its pending downstream :rotating_light:
66
- 3. All parallel tasks inside workflow engine use Multi-Threading
67
- (🐍 Python 3.13 unlock GIL :unlock:)
64
+ 1. The Minimum frequency unit of scheduling is **1 Minute** 🕘
65
+ 2. **Can not** re-run only failed stage and its pending downstream ↩️
66
+ 3. All parallel tasks inside workflow engine use **Multi-Threading**
67
+ (Python 3.13 unlock GIL 🐍🔓)
68
68
 
69
69
  ---
70
70
 
@@ -0,0 +1,32 @@
1
+ ddeutil/workflow/__about__.py,sha256=UJZ9dzvQ9h4mY_1tAZ0imJIZbhNW3TD-wWNEVs22HwA,28
2
+ ddeutil/workflow/__cron.py,sha256=3i-wmjTlh0ADCzN9pLKaWHzJkXzC72aIBmVEQSbyCCE,26895
3
+ ddeutil/workflow/__init__.py,sha256=d643WDkk93MjCt9ujD46hX07Mb7yc9eTzKV3QwnEZQg,1845
4
+ ddeutil/workflow/__types.py,sha256=CK1jfzyHP9P-MB0ElhpJZ59ZFGJC9MkQuAop5739_9k,4304
5
+ ddeutil/workflow/audit.py,sha256=wx70RKRdHj1d2431ilpt9OPTInMByjqXkYff7l5pvF4,8230
6
+ ddeutil/workflow/caller.py,sha256=pmZ9a5m1JlBTzR_xePOWZa98zyFE7jgJUlAXCx874Fs,5521
7
+ ddeutil/workflow/conf.py,sha256=MHzBeLZukFeIQ-YhxOz5uKCnGYqbhYdpwAEh9A9h_OM,12216
8
+ ddeutil/workflow/cron.py,sha256=j8EeoHst70toRfnD_frix41vrI-eLYVJkZ9yeJtpfnI,8871
9
+ ddeutil/workflow/exceptions.py,sha256=5ghT443VLq0IeU87loHNEqqrrrctklP7YfxwJ51ImWU,949
10
+ ddeutil/workflow/job.py,sha256=WnNkk_XhZytmLPzN6Kb41_BdfvBdtYhbn0SnDJ5ZgEw,25417
11
+ ddeutil/workflow/logs.py,sha256=EJDb9Xt3XWjTGE8CeEvw0eDU8kyaeStALQNAtTl5HQw,10027
12
+ ddeutil/workflow/params.py,sha256=qw9XJyjh2ocf9pf6h_XiYHLOvQN4R5TMqPElmItKnRM,8019
13
+ ddeutil/workflow/result.py,sha256=fbM2An3VyweMjAy4Iw7h8H-KkoQsDrZe_KjGztXAFkE,4319
14
+ ddeutil/workflow/scheduler.py,sha256=YMebYpNjqg6RWaE17sicwM3uthupeBGSGCnDGy4aKd8,26286
15
+ ddeutil/workflow/stages.py,sha256=SJD7T7BdIhxoPSaDf8s5I8U6sv7wJsG2BynG-_aZ004,28165
16
+ ddeutil/workflow/templates.py,sha256=A0JgZFGkBv-AX-EskZj656nG5zFd3j1PpLpyXihf6Xg,10967
17
+ ddeutil/workflow/utils.py,sha256=JppsS2c545hPqog0GWjpQnTVMnzjqnhx4K8GkMV_CP0,8132
18
+ ddeutil/workflow/workflow.py,sha256=_LYfs15AcXWVpM8CaO4oH6SWoJnqzF5FU08QTDoHT5w,44529
19
+ ddeutil/workflow/api/__init__.py,sha256=F53NMBWtb9IKaDWkPU5KvybGGfKAcbehgn6TLBwHuuM,21
20
+ ddeutil/workflow/api/api.py,sha256=gGQtqkzyJNaJIfka_w2M1lrCS3Ep46re2Dznsk9RxYQ,5191
21
+ ddeutil/workflow/api/log.py,sha256=NMTnOnsBrDB5129329xF2myLdrb-z9k1MQrmrP7qXJw,1818
22
+ ddeutil/workflow/api/repeat.py,sha256=cycd1-91j-4v6uY1SkrZHd9l95e-YgVC4UCSNNFuGJ8,5277
23
+ ddeutil/workflow/api/routes/__init__.py,sha256=qoGtOMyVgQ5nTUc8J8wH27A8isaxl3IFCX8qoyibeCY,484
24
+ ddeutil/workflow/api/routes/job.py,sha256=vCUTtsoCOtubVqjgk6MYUcGYim_l5Vh_NdtnQGx1SYM,1898
25
+ ddeutil/workflow/api/routes/logs.py,sha256=7xPKu814PGxaMsij8zB3MLeXBfTC7NKT7GlTOJ-PV2U,5173
26
+ ddeutil/workflow/api/routes/schedules.py,sha256=uWYDOwlV8w56hKQmfkQFwdZ6t2gZSJeCdBIzMmJenAQ,4824
27
+ ddeutil/workflow/api/routes/workflows.py,sha256=KVywA7vD9b4QrfmWBdSFF5chj34yJe1zNCzl6iBMeGI,4538
28
+ ddeutil_workflow-0.0.37.dist-info/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
29
+ ddeutil_workflow-0.0.37.dist-info/METADATA,sha256=lazCqQyB5Cm-UWkYN9bGz3RnJpv_XhT0JOcmMf-7i5Y,19342
30
+ ddeutil_workflow-0.0.37.dist-info/WHEEL,sha256=beeZ86-EfXScwlR_HKu4SllMC9wUEj_8Z_4FJ3egI2w,91
31
+ ddeutil_workflow-0.0.37.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
32
+ ddeutil_workflow-0.0.37.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (76.0.0)
2
+ Generator: setuptools (76.1.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -1,31 +0,0 @@
1
- ddeutil/workflow/__about__.py,sha256=sUrITqcQ8T6XPgdiHuk6l8YqDYHRQk2RjtNlY0SjLcs,28
2
- ddeutil/workflow/__cron.py,sha256=3i-wmjTlh0ADCzN9pLKaWHzJkXzC72aIBmVEQSbyCCE,26895
3
- ddeutil/workflow/__init__.py,sha256=d643WDkk93MjCt9ujD46hX07Mb7yc9eTzKV3QwnEZQg,1845
4
- ddeutil/workflow/__types.py,sha256=CK1jfzyHP9P-MB0ElhpJZ59ZFGJC9MkQuAop5739_9k,4304
5
- ddeutil/workflow/audit.py,sha256=wx70RKRdHj1d2431ilpt9OPTInMByjqXkYff7l5pvF4,8230
6
- ddeutil/workflow/caller.py,sha256=qNfrr0B2ykLm6Y8JfhU1rnf5XGmp-usXrPmXM5uGRnM,5473
7
- ddeutil/workflow/conf.py,sha256=cFc2cd_SGXg9PMrkvCT7WWE85a5UN-DdH53_JIbFyzs,14031
8
- ddeutil/workflow/cron.py,sha256=j8EeoHst70toRfnD_frix41vrI-eLYVJkZ9yeJtpfnI,8871
9
- ddeutil/workflow/exceptions.py,sha256=5ghT443VLq0IeU87loHNEqqrrrctklP7YfxwJ51ImWU,949
10
- ddeutil/workflow/job.py,sha256=lpuWQdAFlFUmP0H7ha217sDwbvFlqSuqyAo4Q1yhAa8,24527
11
- ddeutil/workflow/logs.py,sha256=EJDb9Xt3XWjTGE8CeEvw0eDU8kyaeStALQNAtTl5HQw,10027
12
- ddeutil/workflow/params.py,sha256=qw9XJyjh2ocf9pf6h_XiYHLOvQN4R5TMqPElmItKnRM,8019
13
- ddeutil/workflow/result.py,sha256=fbM2An3VyweMjAy4Iw7h8H-KkoQsDrZe_KjGztXAFkE,4319
14
- ddeutil/workflow/scheduler.py,sha256=6KozyZZSXtDqQxpUwS4o-J1h_HhGbwFV5Abul3_I4W4,25940
15
- ddeutil/workflow/stages.py,sha256=euT_v_xk0iqxWOFVZCHuFN8zREgc3-d06j5LMY8AJaE,28020
16
- ddeutil/workflow/templates.py,sha256=A0JgZFGkBv-AX-EskZj656nG5zFd3j1PpLpyXihf6Xg,10967
17
- ddeutil/workflow/utils.py,sha256=Djzd7Bz5QYAw0GqTFv6a0yWm_D8Md0XDxjWudmscQI0,7406
18
- ddeutil/workflow/workflow.py,sha256=_LYfs15AcXWVpM8CaO4oH6SWoJnqzF5FU08QTDoHT5w,44529
19
- ddeutil/workflow/api/__init__.py,sha256=F53NMBWtb9IKaDWkPU5KvybGGfKAcbehgn6TLBwHuuM,21
20
- ddeutil/workflow/api/api.py,sha256=fBFfJtpf7CL4Vym4iiZtWsrrnICGnwYKHBTwLoatPB4,5151
21
- ddeutil/workflow/api/repeat.py,sha256=g7VAP4O8ocKj5Uts5Q9P-OAfeDhKk7a4VWQDlOttTXA,5280
22
- ddeutil/workflow/api/routes/__init__.py,sha256=qoGtOMyVgQ5nTUc8J8wH27A8isaxl3IFCX8qoyibeCY,484
23
- ddeutil/workflow/api/routes/job.py,sha256=BXh8ikYKicCgzt13MysbtGgLrAPuts4dpgkD7iHTjrs,1901
24
- ddeutil/workflow/api/routes/logs.py,sha256=sHhQuigOQ6YQ34V56d9Is6eHbV-zE45v6gPntD7lgS8,1909
25
- ddeutil/workflow/api/routes/schedules.py,sha256=Oj8QLJJ852Lv2y40wGhuXjY2IEdhjAoGBxSWCkxe3YY,4675
26
- ddeutil/workflow/api/routes/workflows.py,sha256=LQiLlB3tESUtOZWcuLUKIHKvnit_yX3fKzYB4FqhavI,4354
27
- ddeutil_workflow-0.0.36.dist-info/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
28
- ddeutil_workflow-0.0.36.dist-info/METADATA,sha256=OwDce5Gz0qVtJLQbmvbgthMfH6bD68Xaqm-B8RfLeRg,19354
29
- ddeutil_workflow-0.0.36.dist-info/WHEEL,sha256=52BFRY2Up02UkjOa29eZOS2VxUrpPORXg1pkohGGUS8,91
30
- ddeutil_workflow-0.0.36.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
31
- ddeutil_workflow-0.0.36.dist-info/RECORD,,