ddeutil-workflow 0.0.62__py3-none-any.whl → 0.0.64__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -55,15 +55,21 @@ from textwrap import dedent
55
55
  from threading import Event
56
56
  from typing import Annotated, Any, Optional, TypeVar, Union, get_type_hints
57
57
 
58
- from pydantic import BaseModel, Field
58
+ from pydantic import BaseModel, Field, ValidationError
59
59
  from pydantic.functional_validators import model_validator
60
60
  from typing_extensions import Self
61
61
 
62
62
  from .__types import DictData, DictStr, StrOrInt, StrOrNone, TupleStr
63
- from .conf import dynamic
63
+ from .conf import dynamic, pass_env
64
64
  from .exceptions import StageException, to_dict
65
65
  from .result import CANCEL, FAILED, SUCCESS, WAIT, Result, Status
66
- from .reusables import TagFunc, extract_call, not_in_template, param2template
66
+ from .reusables import (
67
+ TagFunc,
68
+ create_model_from_caller,
69
+ extract_call,
70
+ not_in_template,
71
+ param2template,
72
+ )
67
73
  from .utils import (
68
74
  delay,
69
75
  dump_all,
@@ -626,10 +632,10 @@ class BashStage(BaseAsyncStage):
626
632
  await f.write(f"#!/bin/{f_shebang}\n\n")
627
633
 
628
634
  # NOTE: add setting environment variable before bash skip statement.
629
- await f.writelines([f"{k}='{env[k]}';\n" for k in env])
635
+ await f.writelines(pass_env([f"{k}='{env[k]}';\n" for k in env]))
630
636
 
631
637
  # NOTE: make sure that shell script file does not have `\r` char.
632
- await f.write("\n" + bash.replace("\r\n", "\n"))
638
+ await f.write("\n" + pass_env(bash.replace("\r\n", "\n")))
633
639
 
634
640
  # NOTE: Make this .sh file able to executable.
635
641
  make_exec(f"./{f_name}")
@@ -662,10 +668,10 @@ class BashStage(BaseAsyncStage):
662
668
  f.write(f"#!/bin/{f_shebang}\n\n")
663
669
 
664
670
  # NOTE: add setting environment variable before bash skip statement.
665
- f.writelines([f"{k}='{env[k]}';\n" for k in env])
671
+ f.writelines(pass_env([f"{k}='{env[k]}';\n" for k in env]))
666
672
 
667
673
  # NOTE: make sure that shell script file does not have `\r` char.
668
- f.write("\n" + bash.replace("\r\n", "\n"))
674
+ f.write("\n" + pass_env(bash.replace("\r\n", "\n")))
669
675
 
670
676
  # NOTE: Make this .sh file able to executable.
671
677
  make_exec(f"./{f_name}")
@@ -895,7 +901,9 @@ class PyStage(BaseAsyncStage):
895
901
  # WARNING: The exec build-in function is very dangerous. So, it
896
902
  # should use the re module to validate exec-string before running.
897
903
  exec(
898
- param2template(dedent(self.run), params, extras=self.extras),
904
+ pass_env(
905
+ param2template(dedent(self.run), params, extras=self.extras)
906
+ ),
899
907
  gb,
900
908
  lc,
901
909
  )
@@ -1060,12 +1068,12 @@ class CallStage(BaseAsyncStage):
1060
1068
  args: DictData = {"result": result} | param2template(
1061
1069
  self.args, params, extras=self.extras
1062
1070
  )
1063
- ips = inspect.signature(call_func)
1071
+ sig = inspect.signature(call_func)
1064
1072
  necessary_params: list[str] = []
1065
1073
  has_keyword: bool = False
1066
- for k in ips.parameters:
1074
+ for k in sig.parameters:
1067
1075
  if (
1068
- v := ips.parameters[k]
1076
+ v := sig.parameters[k]
1069
1077
  ).default == Parameter.empty and v.kind not in (
1070
1078
  Parameter.VAR_KEYWORD,
1071
1079
  Parameter.VAR_POSITIONAL,
@@ -1083,11 +1091,10 @@ class CallStage(BaseAsyncStage):
1083
1091
  f"does not set to args, {list(args.keys())}."
1084
1092
  )
1085
1093
 
1086
- if "result" not in ips.parameters and not has_keyword:
1094
+ if "result" not in sig.parameters and not has_keyword:
1087
1095
  args.pop("result")
1088
1096
 
1089
- args = self.parse_model_args(call_func, args, result)
1090
-
1097
+ args = self.validate_model_args(call_func, args, result)
1091
1098
  if inspect.iscoroutinefunction(call_func):
1092
1099
  loop = asyncio.get_event_loop()
1093
1100
  rs: DictData = loop.run_until_complete(
@@ -1149,12 +1156,12 @@ class CallStage(BaseAsyncStage):
1149
1156
  args: DictData = {"result": result} | param2template(
1150
1157
  self.args, params, extras=self.extras
1151
1158
  )
1152
- ips = inspect.signature(call_func)
1159
+ sig = inspect.signature(call_func)
1153
1160
  necessary_params: list[str] = []
1154
1161
  has_keyword: bool = False
1155
- for k in ips.parameters:
1162
+ for k in sig.parameters:
1156
1163
  if (
1157
- v := ips.parameters[k]
1164
+ v := sig.parameters[k]
1158
1165
  ).default == Parameter.empty and v.kind not in (
1159
1166
  Parameter.VAR_KEYWORD,
1160
1167
  Parameter.VAR_POSITIONAL,
@@ -1172,10 +1179,10 @@ class CallStage(BaseAsyncStage):
1172
1179
  f"does not set to args, {list(args.keys())}."
1173
1180
  )
1174
1181
 
1175
- if "result" not in ips.parameters and not has_keyword:
1182
+ if "result" not in sig.parameters and not has_keyword:
1176
1183
  args.pop("result")
1177
1184
 
1178
- args = self.parse_model_args(call_func, args, result)
1185
+ args = self.validate_model_args(call_func, args, result)
1179
1186
  if inspect.iscoroutinefunction(call_func):
1180
1187
  rs: DictOrModel = await call_func(
1181
1188
  **param2template(args, params, extras=self.extras)
@@ -1198,13 +1205,12 @@ class CallStage(BaseAsyncStage):
1198
1205
  return result.catch(status=SUCCESS, context=dump_all(rs, by_alias=True))
1199
1206
 
1200
1207
  @staticmethod
1201
- def parse_model_args(
1208
+ def validate_model_args(
1202
1209
  func: TagFunc,
1203
1210
  args: DictData,
1204
1211
  result: Result,
1205
1212
  ) -> DictData:
1206
- """Parse Pydantic model from any dict data before parsing to target
1207
- caller function.
1213
+ """Validate an input arguments before passing to the caller function.
1208
1214
 
1209
1215
  :param func: A tag function that want to get typing.
1210
1216
  :param args: An arguments before passing to this tag function.
@@ -1214,7 +1220,25 @@ class CallStage(BaseAsyncStage):
1214
1220
  :rtype: DictData
1215
1221
  """
1216
1222
  try:
1223
+ model_instance = create_model_from_caller(func).model_validate(args)
1224
+ override = dict(model_instance)
1225
+ args.update(override)
1226
+
1217
1227
  type_hints: dict[str, Any] = get_type_hints(func)
1228
+
1229
+ for arg in type_hints:
1230
+
1231
+ if arg == "return":
1232
+ continue
1233
+
1234
+ if arg.removeprefix("_") in args:
1235
+ args[arg] = args.pop(arg.removeprefix("_"))
1236
+
1237
+ return args
1238
+ except ValidationError as e:
1239
+ raise StageException(
1240
+ "Validate argument from the caller function raise invalid type."
1241
+ ) from e
1218
1242
  except TypeError as e:
1219
1243
  result.trace.warning(
1220
1244
  f"[STAGE]: Get type hint raise TypeError: {e}, so, it skip "
@@ -1222,26 +1246,6 @@ class CallStage(BaseAsyncStage):
1222
1246
  )
1223
1247
  return args
1224
1248
 
1225
- for arg in type_hints:
1226
-
1227
- if arg == "return":
1228
- continue
1229
-
1230
- if arg.removeprefix("_") in args:
1231
- args[arg] = args.pop(arg.removeprefix("_"))
1232
-
1233
- t: Any = type_hints[arg]
1234
-
1235
- # NOTE: Check Result argument was passed to this caller function.
1236
- #
1237
- # if is_dataclass(t) and t.__name__ == "Result" and arg not in args:
1238
- # args[arg] = result
1239
-
1240
- if issubclass(t, BaseModel) and arg in args:
1241
- args[arg] = t.model_validate(obj=args[arg])
1242
-
1243
- return args
1244
-
1245
1249
 
1246
1250
  class TriggerStage(BaseStage):
1247
1251
  """Trigger workflow executor stage that run an input trigger Workflow
@@ -1295,7 +1299,7 @@ class TriggerStage(BaseStage):
1295
1299
  _trigger: str = param2template(self.trigger, params, extras=self.extras)
1296
1300
  result.trace.info(f"[STAGE]: Execute Trigger-Stage: {_trigger!r}")
1297
1301
  rs: Result = Workflow.from_conf(
1298
- name=_trigger,
1302
+ name=pass_env(_trigger),
1299
1303
  extras=self.extras | {"stage_raise_error": True},
1300
1304
  ).execute(
1301
1305
  params=param2template(self.params, params, extras=self.extras),
@@ -2417,9 +2421,11 @@ class DockerStage(BaseStage): # pragma: no cov
2417
2421
  )
2418
2422
 
2419
2423
  resp = client.api.pull(
2420
- repository=f"{self.image}",
2421
- tag=self.tag,
2422
- auth_config=param2template(self.auth, params, extras=self.extras),
2424
+ repository=pass_env(self.image),
2425
+ tag=pass_env(self.tag),
2426
+ auth_config=pass_env(
2427
+ param2template(self.auth, params, extras=self.extras)
2428
+ ),
2423
2429
  stream=True,
2424
2430
  decode=True,
2425
2431
  )
@@ -2438,10 +2444,10 @@ class DockerStage(BaseStage): # pragma: no cov
2438
2444
 
2439
2445
  unique_image_name: str = f"{self.image}_{datetime.now():%Y%m%d%H%M%S%f}"
2440
2446
  container = client.containers.run(
2441
- image=f"{self.image}:{self.tag}",
2447
+ image=pass_env(f"{self.image}:{self.tag}"),
2442
2448
  name=unique_image_name,
2443
- environment=self.env,
2444
- volumes=(
2449
+ environment=pass_env(self.env),
2450
+ volumes=pass_env(
2445
2451
  {
2446
2452
  Path.cwd()
2447
2453
  / f".docker.{result.run_id}.logs": {
@@ -2549,8 +2555,10 @@ class VirtualPyStage(PyStage): # pragma: no cov
2549
2555
  f_name: str = f"{run_id}.py"
2550
2556
  with open(f"./{f_name}", mode="w", newline="\n") as f:
2551
2557
  # NOTE: Create variable mapping that write before running statement.
2552
- vars_str: str = "\n ".join(
2553
- f"{var} = {value!r}" for var, value in values.items()
2558
+ vars_str: str = pass_env(
2559
+ "\n ".join(
2560
+ f"{var} = {value!r}" for var, value in values.items()
2561
+ )
2554
2562
  )
2555
2563
 
2556
2564
  # NOTE: `uv` supports PEP 723 — inline TOML metadata.
@@ -2568,7 +2576,7 @@ class VirtualPyStage(PyStage): # pragma: no cov
2568
2576
  )
2569
2577
 
2570
2578
  # NOTE: make sure that py script file does not have `\r` char.
2571
- f.write("\n" + py.replace("\r\n", "\n"))
2579
+ f.write("\n" + pass_env(py.replace("\r\n", "\n")))
2572
2580
 
2573
2581
  # NOTE: Make this .py file able to executable.
2574
2582
  make_exec(f"./{f_name}")
ddeutil/workflow/utils.py CHANGED
@@ -293,11 +293,13 @@ def cut_id(run_id: str, *, num: int = 6) -> str:
293
293
 
294
294
 
295
295
  @overload
296
- def dump_all(value: BaseModel, by_alias: bool = False) -> DictData: ...
296
+ def dump_all(
297
+ value: BaseModel, by_alias: bool = False
298
+ ) -> DictData: ... # pragma: no cov
297
299
 
298
300
 
299
301
  @overload
300
- def dump_all(value: T, by_alias: bool = False) -> T: ...
302
+ def dump_all(value: T, by_alias: bool = False) -> T: ... # pragma: no cov
301
303
 
302
304
 
303
305
  def dump_all(
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.62
3
+ Version: 0.0.64
4
4
  Summary: Lightweight workflow orchestration
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
@@ -25,6 +25,7 @@ License-File: LICENSE
25
25
  Requires-Dist: ddeutil[checksum]>=0.4.8
26
26
  Requires-Dist: ddeutil-io[toml,yaml]>=0.2.13
27
27
  Requires-Dist: pydantic==2.11.4
28
+ Requires-Dist: pydantic-extra-types==2.10.4
28
29
  Requires-Dist: python-dotenv==1.1.0
29
30
  Requires-Dist: schedule<2.0.0,==1.2.2
30
31
  Provides-Extra: all
@@ -215,19 +216,23 @@ registry-caller/
215
216
  This function will store as module that will import from `WORKFLOW_CORE_REGISTRY_CALLER`
216
217
  value (This config can override by extra parameters with `registry_caller` key).
217
218
 
219
+ > [!NOTE]
220
+ > You can use Pydantic Model as argument of your caller function. The core workflow
221
+ > engine will auto use the `model_validate` method before run your caller function.
222
+
218
223
  ```python
219
- from ddeutil.workflow import Result, tag
224
+ from ddeutil.workflow import Result, CallerSecret, tag
220
225
  from ddeutil.workflow.exceptions import StageException
221
- from pydantic import BaseModel, SecretStr
226
+ from pydantic import BaseModel
222
227
 
223
228
  class AwsCredential(BaseModel):
224
229
  path: str
225
230
  access_client_id: str
226
- access_client_secret: SecretStr
231
+ access_client_secret: CallerSecret
227
232
 
228
233
  class RestAuth(BaseModel):
229
234
  type: str
230
- keys: SecretStr
235
+ keys: CallerSecret
231
236
 
232
237
  @tag("requests", alias="get-api-with-oauth-to-s3")
233
238
  def get_api_with_oauth_to_s3(
@@ -243,6 +248,7 @@ def get_api_with_oauth_to_s3(
243
248
  result.trace.info(f"... {method}: {url}")
244
249
  if method != "post":
245
250
  raise StageException(f"RestAPI does not support for {method} action.")
251
+ # NOTE: If you want to use secret, you can use `auth.keys.get_secret_value()`.
246
252
  return {"records": 1000}
247
253
  ```
248
254
 
@@ -259,45 +265,6 @@ result: Result = workflow.execute(
259
265
  )
260
266
  ```
261
267
 
262
- > [!NOTE]
263
- > So, this package provide the `Schedule` template for this action, and you can
264
- > pass the parameters dynamically for changing align with that running time by
265
- > the `release` prefix.
266
- >
267
- > ```yaml
268
- > schedule-run-local-wf:
269
- >
270
- > # Validate model that use to parsing exists for template file
271
- > type: Schedule
272
- > workflows:
273
- >
274
- > # Map existing workflow that want to deploy with scheduler application.
275
- > # It allows you to pass release parameter that dynamic change depend on the
276
- > # current context of this scheduler application releasing that time.
277
- > - name: run-py-local
278
- > params:
279
- > source-extract: "USD-THB"
280
- > run-date: "${{ release.logical_date }}"
281
- > ```
282
- >
283
- > The main method of the `Schedule` model that use to running is `pending`. If you
284
- > do not pass the `stop` date on this method, it will use config with
285
- > `WORKFLOW_APP_STOP_BOUNDARY_DELTA` key for generate this stop date.
286
- >
287
- > ```python
288
- > from ddeutil.workflow import Schedule
289
- >
290
- > (
291
- > Schedule
292
- > .from_conf("schedule-run-local-wf")
293
- > .pending(stop=None)
294
- > )
295
- > ```
296
-
297
- > [!WARNING]
298
- > The scheduler feature is the expensive feature of this project. You should
299
- > avoid to use it and find a scheduler tool instead.
300
-
301
268
  ## :cookie: Configuration
302
269
 
303
270
  The main configuration that use to dynamic changing this workflow engine for your
@@ -327,19 +294,6 @@ it will use default value and do not raise any error to you.
327
294
  | **TRACE_ENABLE_WRITE** | Log | `false` | |
328
295
  | **AUDIT_PATH** | Log | `./audits` | |
329
296
  | **AUDIT_ENABLE_WRITE** | Log | `true` | A flag that enable logging object saving log to its destination. |
330
- | **MAX_PROCESS** | App | `2` | The maximum process worker number that run in scheduler app module. |
331
- | **MAX_SCHEDULE_PER_PROCESS** | App | `100` | A schedule per process that run parallel. |
332
- | **STOP_BOUNDARY_DELTA** | App | `'{"minutes": 5, "seconds": 20}'` | A time delta value that use to stop scheduler app in json string format. |
333
-
334
- **API Application**:
335
-
336
- This config part use for the workflow application that build from the FastAPI
337
- only.
338
-
339
- | Environment | Component | Default | Description |
340
- |:---------------------------|:-----------:|---------|------------------------------------------------------------------------------------|
341
- | **ENABLE_ROUTE_WORKFLOW** | API | `true` | A flag that enable workflow route to manage execute manually and workflow logging. |
342
- | **ENABLE_ROUTE_SCHEDULE** | API | `true` | A flag that enable run scheduler. |
343
297
 
344
298
  ## :rocket: Deployment
345
299
 
@@ -0,0 +1,28 @@
1
+ ddeutil/workflow/__about__.py,sha256=u0hgLvitHp32P7aLr3vGCiFbiGUkcbr4qFyevZfSorQ,28
2
+ ddeutil/workflow/__cron.py,sha256=BOKQcreiex0SAigrK1gnLxpvOeF3aca_rQwyz9Kfve4,28751
3
+ ddeutil/workflow/__init__.py,sha256=g-NTBGwIJaXmvuWoIRuUE65nqsIuXEM2CGWlLSszVmM,905
4
+ ddeutil/workflow/__main__.py,sha256=x-sYedl4T8p6054aySk-EQX6vhytvPR0HvaBNYxMzp0,364
5
+ ddeutil/workflow/__types.py,sha256=uNfoRbVmNK5O37UUMVnqcmoghD9oMS1q9fXC0APnjSI,4584
6
+ ddeutil/workflow/conf.py,sha256=trcsW2_jVeSFT_f58H_s99n2h8oRLIurRIF1ZrEjPYg,14927
7
+ ddeutil/workflow/event.py,sha256=S2eJAZZx_V5TuQ0l417hFVCtjWXnfNPZBgSCICzxQ48,11041
8
+ ddeutil/workflow/exceptions.py,sha256=_USvPsqPDkjmZycfjahpWHFhE-5fA6KRoc6ii0jRsVA,2401
9
+ ddeutil/workflow/job.py,sha256=Php1b3n6c-jddel8PTSa61kAW22QBTetzoLVR4XXM4E,35240
10
+ ddeutil/workflow/logs.py,sha256=iVtyl8i69y7t07tAuWkihc54WlkHCcBy_Ur0WtzJ_lM,31367
11
+ ddeutil/workflow/params.py,sha256=1u8gXs1ZyMq-2eD9H8L7Yjfu5t7b_OzjA0fJvhxdYWY,12505
12
+ ddeutil/workflow/result.py,sha256=4M9VCcveI8Yz6ZrnI-67SZlry-Z8G7e0hziy1k-pklk,5906
13
+ ddeutil/workflow/reusables.py,sha256=PP1K264a4YeFpEFsXVLicAFSjHdDXnPVAGmASXJpVmc,21706
14
+ ddeutil/workflow/stages.py,sha256=kS4g7N7kg31kEoveI6cZ9bzk_V0hm87lzyZpGU4-fss,93525
15
+ ddeutil/workflow/utils.py,sha256=1i82dWXCnbpEXhYtztNz6IPy7uOyNDY2BZcXJCAfzno,9611
16
+ ddeutil/workflow/workflow.py,sha256=8Z_h8OtNHkaGf8MJixTHNeXsyA4mBlYtHDqj0oEVFBs,44858
17
+ ddeutil/workflow/api/__init__.py,sha256=0UIilYwW29RL6HrCRHACSWvnATJVLSJzXiCMny0bHQk,2627
18
+ ddeutil/workflow/api/logs.py,sha256=NMTnOnsBrDB5129329xF2myLdrb-z9k1MQrmrP7qXJw,1818
19
+ ddeutil/workflow/api/routes/__init__.py,sha256=jC1pM7q4_eo45IyO3hQbbe6RnL9B8ibRq_K6aCMP6Ag,434
20
+ ddeutil/workflow/api/routes/job.py,sha256=PWDbIvvoPKbG7JQHwM80e6YIB7nnsGeZaqNSOTQE99k,2155
21
+ ddeutil/workflow/api/routes/logs.py,sha256=QJH8IF102897WLfCJ29-1g15wl29M9Yq6omroZfbahs,5305
22
+ ddeutil/workflow/api/routes/workflows.py,sha256=Gmg3e-K5rfi95pbRtWI_aIr5C089sIde_vefZVvh3U0,4420
23
+ ddeutil_workflow-0.0.64.dist-info/licenses/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
24
+ ddeutil_workflow-0.0.64.dist-info/METADATA,sha256=ytJpKiesulHXeHt1JaSuaeOsp8o6D7b37r8bMuwnXCg,16957
25
+ ddeutil_workflow-0.0.64.dist-info/WHEEL,sha256=DnLRTWE75wApRYVsjgc6wsVswC54sMSJhAEd4xhDpBk,91
26
+ ddeutil_workflow-0.0.64.dist-info/entry_points.txt,sha256=qDTpPSauL0ciO6T4iSVt8bJeYrVEkkoEEw_RlGx6Kgk,63
27
+ ddeutil_workflow-0.0.64.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
28
+ ddeutil_workflow-0.0.64.dist-info/RECORD,,
@@ -1,141 +0,0 @@
1
- # ------------------------------------------------------------------------------
2
- # Copyright (c) 2022 Korawich Anuttra. All rights reserved.
3
- # Licensed under the MIT License. See LICENSE in the project root for
4
- # license information.
5
- # ------------------------------------------------------------------------------
6
- from __future__ import annotations
7
-
8
- import copy
9
- from datetime import datetime, timedelta
10
-
11
- from fastapi import APIRouter, HTTPException, Request
12
- from fastapi import status as st
13
- from fastapi.responses import UJSONResponse
14
-
15
- from ...conf import config
16
- from ...logs import get_logger
17
- from ...scheduler import Schedule
18
-
19
- logger = get_logger("uvicorn.error")
20
- schedule_route = APIRouter(
21
- prefix="/schedules",
22
- tags=["schedules"],
23
- default_response_class=UJSONResponse,
24
- )
25
-
26
-
27
- @schedule_route.get(path="/{name}", status_code=st.HTTP_200_OK)
28
- async def get_schedules(name: str):
29
- """Get schedule object."""
30
- try:
31
- schedule: Schedule = Schedule.from_conf(name=name, extras={})
32
- except ValueError:
33
- raise HTTPException(
34
- status_code=st.HTTP_404_NOT_FOUND,
35
- detail=f"Schedule name: {name!r} does not found in /conf path",
36
- ) from None
37
- return schedule.model_dump(
38
- by_alias=True,
39
- exclude_none=True,
40
- exclude_unset=True,
41
- exclude_defaults=True,
42
- )
43
-
44
-
45
- @schedule_route.get(path="/deploy/", status_code=st.HTTP_200_OK)
46
- async def get_deploy_schedulers(request: Request):
47
- snapshot = copy.deepcopy(request.state.scheduler)
48
- return {"schedule": snapshot}
49
-
50
-
51
- @schedule_route.get(path="/deploy/{name}", status_code=st.HTTP_200_OK)
52
- async def get_deploy_scheduler(request: Request, name: str):
53
- if name in request.state.scheduler:
54
- schedule = Schedule.from_conf(name)
55
- getter: list[dict[str, dict[str, list[datetime]]]] = []
56
- for workflow in schedule.workflows:
57
- getter.append(
58
- {
59
- workflow.name: {
60
- "queue": copy.deepcopy(
61
- request.state.workflow_queue[workflow.name]
62
- ),
63
- "running": copy.deepcopy(
64
- request.state.workflow_running[workflow.name]
65
- ),
66
- }
67
- }
68
- )
69
- return {
70
- "message": f"Getting {name!r} to schedule listener.",
71
- "scheduler": getter,
72
- }
73
- raise HTTPException(
74
- status_code=st.HTTP_404_NOT_FOUND,
75
- detail=f"Does not found {name!r} in schedule listener",
76
- )
77
-
78
-
79
- @schedule_route.post(path="/deploy/{name}", status_code=st.HTTP_202_ACCEPTED)
80
- async def add_deploy_scheduler(request: Request, name: str):
81
- """Adding schedule name to application state store."""
82
- if name in request.state.scheduler:
83
- raise HTTPException(
84
- status_code=st.HTTP_302_FOUND,
85
- detail=f"This schedule {name!r} already exists in scheduler list.",
86
- )
87
-
88
- request.state.scheduler.append(name)
89
-
90
- start_date: datetime = datetime.now(tz=config.tz)
91
- start_date_waiting: datetime = (start_date + timedelta(minutes=1)).replace(
92
- second=0, microsecond=0
93
- )
94
-
95
- # NOTE: Create a pair of workflow and on from schedule model.
96
- try:
97
- schedule: Schedule = Schedule.from_conf(name)
98
- except ValueError as err:
99
- request.state.scheduler.remove(name)
100
- logger.exception(err)
101
- raise HTTPException(
102
- status_code=st.HTTP_404_NOT_FOUND,
103
- detail=str(err),
104
- ) from None
105
-
106
- request.state.workflow_tasks.extend(
107
- schedule.tasks(
108
- start_date_waiting,
109
- queue=request.state.workflow_queue,
110
- ),
111
- )
112
- return {
113
- "message": f"Adding {name!r} to schedule listener.",
114
- "start_date": start_date_waiting,
115
- }
116
-
117
-
118
- @schedule_route.delete(path="/deploy/{name}", status_code=st.HTTP_202_ACCEPTED)
119
- async def del_deploy_scheduler(request: Request, name: str):
120
- """Delete workflow task on the schedule listener."""
121
- if name in request.state.scheduler:
122
-
123
- # NOTE: Remove current schedule name from the state.
124
- request.state.scheduler.remove(name)
125
-
126
- schedule: Schedule = Schedule.from_conf(name)
127
-
128
- for task in schedule.tasks(datetime.now(tz=config.tz), queue={}):
129
- if task in request.state.workflow_tasks:
130
- request.state.workflow_tasks.remove(task)
131
-
132
- for workflow in schedule.workflows:
133
- if workflow.alias in request.state.workflow_queue:
134
- request.state.workflow_queue.pop(workflow.alias)
135
-
136
- return {"message": f"Deleted schedule {name!r} in listener."}
137
-
138
- raise HTTPException(
139
- status_code=st.HTTP_404_NOT_FOUND,
140
- detail=f"Does not found schedule {name!r} in listener",
141
- )