ddeutil-workflow 0.0.43__tar.gz → 0.0.45__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/PKG-INFO +6 -7
  2. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/README.md +5 -5
  3. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/pyproject.toml +0 -1
  4. ddeutil_workflow-0.0.45/src/ddeutil/workflow/__about__.py +1 -0
  5. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/src/ddeutil/workflow/conf.py +33 -27
  6. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/src/ddeutil/workflow/job.py +65 -50
  7. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/src/ddeutil/workflow/logs.py +44 -5
  8. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/src/ddeutil/workflow/result.py +10 -1
  9. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/src/ddeutil/workflow/reusables.py +1 -0
  10. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/src/ddeutil/workflow/stages.py +9 -5
  11. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/src/ddeutil/workflow/workflow.py +75 -94
  12. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/src/ddeutil_workflow.egg-info/PKG-INFO +6 -7
  13. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/src/ddeutil_workflow.egg-info/requires.txt +0 -1
  14. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/tests/test_job_exec_strategy.py +8 -3
  15. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/tests/test_stage.py +0 -10
  16. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/tests/test_workflow.py +51 -0
  17. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/tests/test_workflow_exec.py +138 -2
  18. ddeutil_workflow-0.0.43/src/ddeutil/workflow/__about__.py +0 -1
  19. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/LICENSE +0 -0
  20. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/setup.cfg +0 -0
  21. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/src/ddeutil/workflow/__cron.py +0 -0
  22. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/src/ddeutil/workflow/__init__.py +0 -0
  23. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/src/ddeutil/workflow/__types.py +0 -0
  24. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/src/ddeutil/workflow/api/__init__.py +0 -0
  25. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/src/ddeutil/workflow/api/api.py +0 -0
  26. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/src/ddeutil/workflow/api/log.py +0 -0
  27. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/src/ddeutil/workflow/api/repeat.py +0 -0
  28. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/src/ddeutil/workflow/api/routes/__init__.py +0 -0
  29. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/src/ddeutil/workflow/api/routes/job.py +0 -0
  30. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/src/ddeutil/workflow/api/routes/logs.py +0 -0
  31. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/src/ddeutil/workflow/api/routes/schedules.py +0 -0
  32. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/src/ddeutil/workflow/api/routes/workflows.py +0 -0
  33. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/src/ddeutil/workflow/cron.py +0 -0
  34. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/src/ddeutil/workflow/exceptions.py +0 -0
  35. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/src/ddeutil/workflow/params.py +0 -0
  36. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/src/ddeutil/workflow/scheduler.py +0 -0
  37. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/src/ddeutil/workflow/utils.py +0 -0
  38. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/src/ddeutil_workflow.egg-info/SOURCES.txt +0 -0
  39. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/src/ddeutil_workflow.egg-info/dependency_links.txt +0 -0
  40. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/src/ddeutil_workflow.egg-info/top_level.txt +0 -0
  41. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/tests/test__cron.py +0 -0
  42. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/tests/test__regex.py +0 -0
  43. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/tests/test_conf.py +0 -0
  44. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/tests/test_cron_on.py +0 -0
  45. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/tests/test_job.py +0 -0
  46. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/tests/test_job_exec.py +0 -0
  47. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/tests/test_job_strategy.py +0 -0
  48. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/tests/test_logs_audit.py +0 -0
  49. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/tests/test_logs_trace.py +0 -0
  50. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/tests/test_params.py +0 -0
  51. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/tests/test_release.py +0 -0
  52. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/tests/test_release_queue.py +0 -0
  53. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/tests/test_result.py +0 -0
  54. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/tests/test_reusables_call_tag.py +0 -0
  55. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/tests/test_reusables_template.py +0 -0
  56. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/tests/test_reusables_template_filter.py +0 -0
  57. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/tests/test_schedule.py +0 -0
  58. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/tests/test_schedule_pending.py +0 -0
  59. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/tests/test_schedule_tasks.py +0 -0
  60. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/tests/test_schedule_workflow.py +0 -0
  61. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/tests/test_scheduler_control.py +0 -0
  62. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/tests/test_stage_handler_exec.py +0 -0
  63. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/tests/test_utils.py +0 -0
  64. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/tests/test_workflow_exec_job.py +0 -0
  65. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/tests/test_workflow_exec_poke.py +0 -0
  66. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/tests/test_workflow_exec_release.py +0 -0
  67. {ddeutil_workflow-0.0.43 → ddeutil_workflow-0.0.45}/tests/test_workflow_task.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.43
3
+ Version: 0.0.45
4
4
  Summary: Lightweight workflow orchestration
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
@@ -30,7 +30,6 @@ Requires-Dist: schedule<2.0.0,==1.2.2
30
30
  Provides-Extra: all
31
31
  Requires-Dist: fastapi<1.0.0,>=0.115.0; extra == "all"
32
32
  Requires-Dist: httpx; extra == "all"
33
- Requires-Dist: ujson; extra == "all"
34
33
  Requires-Dist: aiofiles; extra == "all"
35
34
  Requires-Dist: aiohttp; extra == "all"
36
35
  Provides-Extra: api
@@ -71,9 +70,9 @@ configuration. It called **Metadata Driven Data Workflow**.
71
70
 
72
71
  **:pushpin: <u>Rules of This Workflow engine</u>**:
73
72
 
74
- 1. The Minimum frequency unit of scheduling is **1 Minute** 🕘
73
+ 1. The Minimum frequency unit of built-in scheduling is **1 Minute** 🕘
75
74
  2. **Can not** re-run only failed stage and its pending downstream ↩️
76
- 3. All parallel tasks inside workflow engine use **Multi-Threading**
75
+ 3. All parallel tasks inside workflow core engine use **Multi-Threading** pool
77
76
  (Python 3.13 unlock GIL 🐍🔓)
78
77
 
79
78
  ---
@@ -266,11 +265,11 @@ it will use default value and do not raise any error to you.
266
265
  | **ROOT_PATH** | Core | `.` | No | The root path of the workflow application. |
267
266
  | **REGISTRY_CALLER** | Core | `.` | Yes | List of importable string for the call stage. |
268
267
  | **REGISTRY_FILTER** | Core | `ddeutil.workflow.templates` | Yes | List of importable string for the filter template. |
269
- | **CONF_PATH** | Core | `conf` | No | The config path that keep all template `.yaml` files. |
268
+ | **CONF_PATH** | Core | `conf` | Yes | The config path that keep all template `.yaml` files. |
270
269
  | **TIMEZONE** | Core | `Asia/Bangkok` | No | A Timezone string value that will pass to `ZoneInfo` object. |
271
- | **STAGE_DEFAULT_ID** | Core | `true` | No | A flag that enable default stage ID that use for catch an execution output. |
270
+ | **STAGE_DEFAULT_ID** | Core | `true` | Yes | A flag that enable default stage ID that use for catch an execution output. |
272
271
  | **STAGE_RAISE_ERROR** | Core | `false` | Yes | A flag that all stage raise StageException from stage execution. |
273
- | **JOB_DEFAULT_ID** | Core | `false` | No | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. |
272
+ | **JOB_DEFAULT_ID** | Core | `false` | Yes | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. |
274
273
  | **JOB_RAISE_ERROR** | Core | `true` | Yes | A flag that all job raise JobException from job strategy execution. |
275
274
  | **MAX_CRON_PER_WORKFLOW** | Core | `5` | No | |
276
275
  | **MAX_QUEUE_COMPLETE_HIST** | Core | `16` | No | |
@@ -27,9 +27,9 @@ configuration. It called **Metadata Driven Data Workflow**.
27
27
 
28
28
  **:pushpin: <u>Rules of This Workflow engine</u>**:
29
29
 
30
- 1. The Minimum frequency unit of scheduling is **1 Minute** 🕘
30
+ 1. The Minimum frequency unit of built-in scheduling is **1 Minute** 🕘
31
31
  2. **Can not** re-run only failed stage and its pending downstream ↩️
32
- 3. All parallel tasks inside workflow engine use **Multi-Threading**
32
+ 3. All parallel tasks inside workflow core engine use **Multi-Threading** pool
33
33
  (Python 3.13 unlock GIL 🐍🔓)
34
34
 
35
35
  ---
@@ -222,11 +222,11 @@ it will use default value and do not raise any error to you.
222
222
  | **ROOT_PATH** | Core | `.` | No | The root path of the workflow application. |
223
223
  | **REGISTRY_CALLER** | Core | `.` | Yes | List of importable string for the call stage. |
224
224
  | **REGISTRY_FILTER** | Core | `ddeutil.workflow.templates` | Yes | List of importable string for the filter template. |
225
- | **CONF_PATH** | Core | `conf` | No | The config path that keep all template `.yaml` files. |
225
+ | **CONF_PATH** | Core | `conf` | Yes | The config path that keep all template `.yaml` files. |
226
226
  | **TIMEZONE** | Core | `Asia/Bangkok` | No | A Timezone string value that will pass to `ZoneInfo` object. |
227
- | **STAGE_DEFAULT_ID** | Core | `true` | No | A flag that enable default stage ID that use for catch an execution output. |
227
+ | **STAGE_DEFAULT_ID** | Core | `true` | Yes | A flag that enable default stage ID that use for catch an execution output. |
228
228
  | **STAGE_RAISE_ERROR** | Core | `false` | Yes | A flag that all stage raise StageException from stage execution. |
229
- | **JOB_DEFAULT_ID** | Core | `false` | No | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. |
229
+ | **JOB_DEFAULT_ID** | Core | `false` | Yes | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. |
230
230
  | **JOB_RAISE_ERROR** | Core | `true` | Yes | A flag that all job raise JobException from job strategy execution. |
231
231
  | **MAX_CRON_PER_WORKFLOW** | Core | `5` | No | |
232
232
  | **MAX_QUEUE_COMPLETE_HIST** | Core | `16` | No | |
@@ -38,7 +38,6 @@ dynamic = ["version"]
38
38
  all = [
39
39
  "fastapi>=0.115.0,<1.0.0",
40
40
  "httpx",
41
- "ujson",
42
41
  "aiofiles",
43
42
  "aiohttp",
44
43
  ]
@@ -0,0 +1 @@
1
+ __version__: str = "0.0.45"
@@ -341,6 +341,32 @@ class SimLoad:
341
341
  )
342
342
 
343
343
 
344
+ config: Config = Config()
345
+ api_config: APIConfig = APIConfig()
346
+
347
+
348
+ def dynamic(
349
+ key: Optional[str] = None,
350
+ *,
351
+ f: Optional[T] = None,
352
+ extras: Optional[DictData] = None,
353
+ ) -> Optional[T]:
354
+ """Dynamic get config if extra value was passed at run-time.
355
+
356
+ :param key: (str) A config key that get from Config object.
357
+ :param f: An inner config function scope.
358
+ :param extras: An extra values that pass at run-time.
359
+ """
360
+ rsx: Optional[T] = extras[key] if extras and key in extras else None
361
+ rs: Optional[T] = f or getattr(config, key, None)
362
+ if rsx is not None and not isinstance(rsx, type(rs)):
363
+ raise TypeError(
364
+ f"Type of config {key!r} from extras: {rsx!r} does not valid "
365
+ f"as config {type(rs)}."
366
+ )
367
+ return rsx or rs
368
+
369
+
344
370
  class Loader(SimLoad):
345
371
  """Loader Object that get the config `yaml` file from current path.
346
372
 
@@ -355,6 +381,7 @@ class Loader(SimLoad):
355
381
  *,
356
382
  included: list[str] | None = None,
357
383
  excluded: list[str] | None = None,
384
+ path: Path | None = None,
358
385
  **kwargs,
359
386
  ) -> Iterator[tuple[str, DictData]]:
360
387
  """Override the find class method from the Simple Loader object.
@@ -362,44 +389,23 @@ class Loader(SimLoad):
362
389
  :param obj: An object that want to validate matching before return.
363
390
  :param included:
364
391
  :param excluded:
392
+ :param path:
365
393
 
366
394
  :rtype: Iterator[tuple[str, DictData]]
367
395
  """
368
396
  return super().finds(
369
397
  obj=obj,
370
- conf_path=config.conf_path,
398
+ conf_path=(path or config.conf_path),
371
399
  included=included,
372
400
  excluded=excluded,
373
401
  )
374
402
 
375
403
  def __init__(self, name: str, externals: DictData) -> None:
376
- super().__init__(name, conf_path=config.conf_path, externals=externals)
377
-
378
-
379
- config: Config = Config()
380
- api_config: APIConfig = APIConfig()
381
-
382
-
383
- def dynamic(
384
- key: Optional[str] = None,
385
- *,
386
- f: Optional[T] = None,
387
- extras: Optional[DictData] = None,
388
- ) -> Optional[T]:
389
- """Dynamic get config if extra value was passed at run-time.
390
-
391
- :param key: (str) A config key that get from Config object.
392
- :param f: An inner config function scope.
393
- :param extras: An extra values that pass at run-time.
394
- """
395
- rsx: Optional[T] = extras[key] if extras and key in extras else None
396
- rs: Optional[T] = f or getattr(config, key, None)
397
- if rsx is not None and not isinstance(rsx, type(rs)):
398
- raise TypeError(
399
- f"Type of config {key!r} from extras: {rsx!r} does not valid "
400
- f"as config {type(rs)}."
404
+ super().__init__(
405
+ name,
406
+ conf_path=dynamic("conf_path", extras=externals),
407
+ externals=externals,
401
408
  )
402
- return rsx or rs
403
409
 
404
410
 
405
411
  @lru_cache
@@ -38,6 +38,7 @@ from .exceptions import (
38
38
  JobException,
39
39
  StageException,
40
40
  UtilException,
41
+ to_dict,
41
42
  )
42
43
  from .result import FAILED, SKIP, SUCCESS, WAIT, Result, Status
43
44
  from .reusables import has_template, param2template
@@ -415,6 +416,7 @@ class Job(BaseModel):
415
416
  need_exist: dict[str, Any] = {
416
417
  need: jobs[need] for need in self.needs if need in jobs
417
418
  }
419
+
418
420
  if len(need_exist) != len(self.needs):
419
421
  return WAIT
420
422
  elif all("skipped" in need_exist[job] for job in need_exist):
@@ -630,19 +632,6 @@ def local_execute_strategy(
630
632
  result: Result = Result(run_id=gen_id(job.id or "not-set", unique=True))
631
633
 
632
634
  strategy_id: str = gen_id(strategy)
633
-
634
- # PARAGRAPH:
635
- #
636
- # Create strategy execution context and update a matrix and copied
637
- # of params. So, the context value will have structure like;
638
- #
639
- # {
640
- # "params": { ... }, <== Current input params
641
- # "jobs": { ... }, <== Current input params
642
- # "matrix": { ... } <== Current strategy value
643
- # "stages": { ... } <== Catching stage outputs
644
- # }
645
- #
646
635
  context: DictData = copy.deepcopy(params)
647
636
  context.update({"matrix": strategy, "stages": {}})
648
637
 
@@ -650,7 +639,6 @@ def local_execute_strategy(
650
639
  result.trace.info(f"[JOB]: Execute Strategy ID: {strategy_id}")
651
640
  result.trace.info(f"[JOB]: ... Matrix: {strategy_id}")
652
641
 
653
- # IMPORTANT: The stage execution only run sequentially one-by-one.
654
642
  for stage in job.stages:
655
643
 
656
644
  if stage.is_skipped(params=context):
@@ -674,34 +662,30 @@ def local_execute_strategy(
674
662
  },
675
663
  )
676
664
 
677
- # PARAGRAPH:
678
- #
679
- # This step will add the stage result to `stages` key in that
680
- # stage id. It will have structure like;
681
- #
682
- # {
683
- # "params": { ... },
684
- # "jobs": { ... },
685
- # "matrix": { ... },
686
- # "stages": { { "stage-id-01": { "outputs": { ... } } }, ... }
687
- # }
688
- #
689
- # IMPORTANT:
690
- # This execution change all stage running IDs to the current job
691
- # running ID, but it still trac log to the same parent running ID
692
- # (with passing `run_id` and `parent_run_id` to the stage
693
- # execution arguments).
694
- #
695
665
  try:
696
- stage.set_outputs(
697
- stage.handler_execute(
698
- params=context,
699
- run_id=result.run_id,
700
- parent_run_id=result.parent_run_id,
701
- event=event,
702
- ).context,
703
- to=context,
666
+ rs: Result = stage.handler_execute(
667
+ params=context,
668
+ run_id=result.run_id,
669
+ parent_run_id=result.parent_run_id,
670
+ event=event,
704
671
  )
672
+ stage.set_outputs(rs.context, to=context)
673
+ if rs.status == FAILED:
674
+ error_msg: str = (
675
+ f"Job strategy was break because it has a stage, "
676
+ f"{stage.iden}, failed without raise error."
677
+ )
678
+ return result.catch(
679
+ status=FAILED,
680
+ context={
681
+ strategy_id: {
682
+ "matrix": strategy,
683
+ "stages": context.pop("stages", {}),
684
+ "errors": JobException(error_msg).to_dict(),
685
+ },
686
+ },
687
+ )
688
+
705
689
  except (StageException, UtilException) as err:
706
690
  result.trace.error(f"[JOB]: {err.__class__.__name__}: {err}")
707
691
  do_raise: bool = dynamic(
@@ -746,8 +730,8 @@ def local_execute(
746
730
  raise_error: bool | None = None,
747
731
  ) -> Result:
748
732
  """Local job execution with passing dynamic parameters from the workflow
749
- execution. It will generate matrix values at the first step and run
750
- multithread on this metrics to the `stages` field of this job.
733
+ execution or itself execution. It will generate matrix values at the first
734
+ step and run multithread on this metrics to the `stages` field of this job.
751
735
 
752
736
  This method does not raise any JobException if it runs with
753
737
  multi-threading strategy.
@@ -798,7 +782,7 @@ def local_execute(
798
782
  raise_error=raise_error,
799
783
  )
800
784
 
801
- return result.catch(status=SUCCESS)
785
+ return result.catch(status=result.status)
802
786
 
803
787
  fail_fast_flag: bool = job.strategy.fail_fast
804
788
  ls: str = "Fail-Fast" if fail_fast_flag else "All-Completed"
@@ -818,8 +802,6 @@ def local_execute(
818
802
  },
819
803
  )
820
804
 
821
- # IMPORTANT: Start running strategy execution by multithreading because
822
- # it will run by strategy values without waiting previous execution.
823
805
  with ThreadPoolExecutor(
824
806
  max_workers=job.strategy.max_parallel,
825
807
  thread_name_prefix="job_strategy_exec_",
@@ -885,6 +867,22 @@ def self_hosted_execute(
885
867
  event: Event | None = None,
886
868
  raise_error: bool | None = None,
887
869
  ) -> Result: # pragma: no cov
870
+ """Self-Hosted job execution with passing dynamic parameters from the
871
+ workflow execution or itself execution. It will make request to the
872
+ self-hosted host url.
873
+
874
+ :param job: (Job) A job model that want to execute.
875
+ :param params: (DictData) An input parameters that use on job execution.
876
+ :param run_id: (str) A job running ID for this execution.
877
+ :param parent_run_id: (str) A parent workflow running ID for this release.
878
+ :param result: (Result) A result object for keeping context and status
879
+ data.
880
+ :param event: (Event) An event manager that pass to the PoolThreadExecutor.
881
+ :param raise_error: (bool) A flag that all this method raise error to the
882
+ strategy execution.
883
+
884
+ :rtype: Result
885
+ """
888
886
  result: Result = Result.construct_with_rs_or_id(
889
887
  result,
890
888
  run_id=run_id,
@@ -893,14 +891,31 @@ def self_hosted_execute(
893
891
  )
894
892
 
895
893
  if event and event.is_set():
896
- return result.catch(status=FAILED)
894
+ return result.catch(
895
+ status=FAILED,
896
+ context={
897
+ "errors": JobException(
898
+ "Job self-hosted execution was canceled from event that "
899
+ "had set before start execution."
900
+ ).to_dict()
901
+ },
902
+ )
897
903
 
898
904
  import requests
899
905
 
900
- resp = requests.post(
901
- job.runs_on.args.host,
902
- data={"job": job.model_dump(), "params": params},
903
- )
906
+ try:
907
+ resp = requests.post(
908
+ job.runs_on.args.host,
909
+ headers={"Auth": f"Barer {job.runs_on.args.token}"},
910
+ data={
911
+ "job": job.model_dump(),
912
+ "params": params,
913
+ "result": result.__dict__,
914
+ "raise_error": raise_error,
915
+ },
916
+ )
917
+ except requests.exceptions.RequestException as e:
918
+ return result.catch(status=FAILED, context={"errors": to_dict(e)})
904
919
 
905
920
  if resp.status_code != 200:
906
921
  do_raise: bool = dynamic(
@@ -80,6 +80,8 @@ class TraceMeda(BaseModel): # pragma: no cov
80
80
 
81
81
 
82
82
  class TraceData(BaseModel): # pragma: no cov
83
+ """Trace Data model for keeping data for any Trace models."""
84
+
83
85
  stdout: str = Field(description="A standard output trace data.")
84
86
  stderr: str = Field(description="A standard error trace data.")
85
87
  meta: list[TraceMeda] = Field(
@@ -92,6 +94,12 @@ class TraceData(BaseModel): # pragma: no cov
92
94
 
93
95
  @classmethod
94
96
  def from_path(cls, file: Path) -> Self:
97
+ """Construct this trace data model with a trace path.
98
+
99
+ :param file: (Path) A trace path.
100
+
101
+ :rtype: Self
102
+ """
95
103
  data: DictStr = {"stdout": "", "stderr": "", "meta": []}
96
104
 
97
105
  if (file / "stdout.txt").exists():
@@ -207,27 +215,52 @@ class BaseTraceLog(ABC): # pragma: no cov
207
215
  logger.exception(msg, stacklevel=2)
208
216
 
209
217
  async def adebug(self, message: str) -> None: # pragma: no cov
218
+ """Async write trace log with append mode and logging this message with
219
+ the DEBUG level.
220
+
221
+ :param message: (str) A message that want to log.
222
+ """
210
223
  msg: str = self.make_message(message)
211
224
  if config.debug:
212
225
  await self.awriter(msg)
213
226
  logger.info(msg, stacklevel=2)
214
227
 
215
228
  async def ainfo(self, message: str) -> None: # pragma: no cov
229
+ """Async write trace log with append mode and logging this message with
230
+ the INFO level.
231
+
232
+ :param message: (str) A message that want to log.
233
+ """
216
234
  msg: str = self.make_message(message)
217
235
  await self.awriter(msg)
218
236
  logger.info(msg, stacklevel=2)
219
237
 
220
238
  async def awarning(self, message: str) -> None: # pragma: no cov
239
+ """Async write trace log with append mode and logging this message with
240
+ the WARNING level.
241
+
242
+ :param message: (str) A message that want to log.
243
+ """
221
244
  msg: str = self.make_message(message)
222
245
  await self.awriter(msg)
223
246
  logger.warning(msg, stacklevel=2)
224
247
 
225
248
  async def aerror(self, message: str) -> None: # pragma: no cov
249
+ """Async write trace log with append mode and logging this message with
250
+ the ERROR level.
251
+
252
+ :param message: (str) A message that want to log.
253
+ """
226
254
  msg: str = self.make_message(message)
227
255
  await self.awriter(msg, is_err=True)
228
256
  logger.error(msg, stacklevel=2)
229
257
 
230
258
  async def aexception(self, message: str) -> None: # pragma: no cov
259
+ """Async write trace log with append mode and logging this message with
260
+ the EXCEPTION level.
261
+
262
+ :param message: (str) A message that want to log.
263
+ """
231
264
  msg: str = self.make_message(message)
232
265
  await self.awriter(msg, is_err=True)
233
266
  logger.exception(msg, stacklevel=2)
@@ -237,23 +270,29 @@ class FileTraceLog(BaseTraceLog): # pragma: no cov
237
270
  """Trace Log object that write file to the local storage."""
238
271
 
239
272
  @classmethod
240
- def find_logs(cls) -> Iterator[TraceData]: # pragma: no cov
273
+ def find_logs(
274
+ cls, path: Path | None = None
275
+ ) -> Iterator[TraceData]: # pragma: no cov
276
+ """Find trace logs."""
241
277
  for file in sorted(
242
- config.log_path.glob("./run_id=*"),
278
+ (path or config.log_path).glob("./run_id=*"),
243
279
  key=lambda f: f.lstat().st_mtime,
244
280
  ):
245
281
  yield TraceData.from_path(file)
246
282
 
247
283
  @classmethod
248
284
  def find_log_with_id(
249
- cls, run_id: str, force_raise: bool = True
285
+ cls, run_id: str, force_raise: bool = True, *, path: Path | None = None
250
286
  ) -> TraceData:
251
- file: Path = config.log_path / f"run_id={run_id}"
287
+ """Find trace log with an input specific run ID."""
288
+ base_path: Path = path or config.log_path
289
+ file: Path = base_path / f"run_id={run_id}"
252
290
  if file.exists():
253
291
  return TraceData.from_path(file)
254
292
  elif force_raise:
255
293
  raise FileNotFoundError(
256
- f"Trace log on path 'run_id={run_id}' does not found."
294
+ f"Trace log on path {base_path}, does not found trace "
295
+ f"'run_id={run_id}'."
257
296
  )
258
297
  return {}
259
298
 
@@ -72,6 +72,7 @@ class Result:
72
72
  ts: datetime = field(default_factory=get_dt_tznow, compare=False)
73
73
 
74
74
  trace: Optional[TraceLog] = field(default=None, compare=False, repr=False)
75
+ extras: DictData = field(default_factory=dict)
75
76
 
76
77
  @classmethod
77
78
  def construct_with_rs_or_id(
@@ -80,6 +81,8 @@ class Result:
80
81
  run_id: str | None = None,
81
82
  parent_run_id: str | None = None,
82
83
  id_logic: str | None = None,
84
+ *,
85
+ extras: DictData | None = None,
83
86
  ) -> Self:
84
87
  """Create the Result object or set parent running id if passing Result
85
88
  object.
@@ -88,16 +91,22 @@ class Result:
88
91
  :param run_id:
89
92
  :param parent_run_id:
90
93
  :param id_logic:
94
+ :param extras:
91
95
 
92
96
  :rtype: Self
93
97
  """
94
98
  if result is None:
95
- result: Result = cls(
99
+ return cls(
96
100
  run_id=(run_id or gen_id(id_logic or "", unique=True)),
97
101
  parent_run_id=parent_run_id,
102
+ extras=(extras or {}),
98
103
  )
99
104
  elif parent_run_id:
100
105
  result.set_parent_run_id(parent_run_id)
106
+
107
+ if extras is not None:
108
+ result.extras.update(extras)
109
+
101
110
  return result
102
111
 
103
112
  @model_validator(mode="after")
@@ -499,6 +499,7 @@ class CallSearchData:
499
499
 
500
500
  def extract_call(
501
501
  call: str,
502
+ *,
502
503
  registries: Optional[list[str]] = None,
503
504
  ) -> Callable[[], TagFunc]:
504
505
  """Extract Call function from string value to call partial function that
@@ -223,10 +223,11 @@ class BaseStage(BaseModel, ABC):
223
223
  ) from e
224
224
 
225
225
  errors: DictData = {"errors": to_dict(e)}
226
- if to is not None:
227
- return self.set_outputs(errors, to=to)
228
-
229
- return result.catch(status=FAILED, context=errors)
226
+ return (
227
+ self.set_outputs(errors, to=to)
228
+ if to is not None
229
+ else result.catch(status=FAILED, context=errors)
230
+ )
230
231
 
231
232
  def set_outputs(self, output: DictData, to: DictData) -> DictData:
232
233
  """Set an outputs from execution process to the received context. The
@@ -326,7 +327,10 @@ class BaseAsyncStage(BaseStage):
326
327
  *,
327
328
  result: Result | None = None,
328
329
  event: Event | None = None,
329
- ) -> Result: ...
330
+ ) -> Result:
331
+ raise NotImplementedError(
332
+ "Async Stage should implement `execute` method."
333
+ )
330
334
 
331
335
  @abstractmethod
332
336
  async def axecute(