ddeutil-workflow 0.0.47__tar.gz → 0.0.49__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (69) hide show
  1. {ddeutil_workflow-0.0.47/src/ddeutil_workflow.egg-info → ddeutil_workflow-0.0.49}/PKG-INFO +24 -26
  2. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/README.md +23 -25
  3. ddeutil_workflow-0.0.49/src/ddeutil/workflow/__about__.py +1 -0
  4. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/src/ddeutil/workflow/__init__.py +4 -2
  5. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/src/ddeutil/workflow/api/api.py +2 -1
  6. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/src/ddeutil/workflow/api/repeat.py +2 -1
  7. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/src/ddeutil/workflow/api/routes/job.py +1 -1
  8. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/src/ddeutil/workflow/api/routes/logs.py +6 -5
  9. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/src/ddeutil/workflow/api/routes/schedules.py +2 -1
  10. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/src/ddeutil/workflow/api/routes/workflows.py +2 -2
  11. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/src/ddeutil/workflow/conf.py +61 -66
  12. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/src/ddeutil/workflow/job.py +13 -5
  13. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/src/ddeutil/workflow/logs.py +282 -105
  14. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/src/ddeutil/workflow/result.py +19 -8
  15. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/src/ddeutil/workflow/reusables.py +4 -5
  16. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/src/ddeutil/workflow/scheduler.py +70 -50
  17. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/src/ddeutil/workflow/stages.py +288 -83
  18. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/src/ddeutil/workflow/utils.py +3 -3
  19. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/src/ddeutil/workflow/workflow.py +135 -103
  20. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49/src/ddeutil_workflow.egg-info}/PKG-INFO +24 -26
  21. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/tests/test_conf.py +2 -2
  22. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/tests/test_job.py +10 -11
  23. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/tests/test_logs_audit.py +2 -2
  24. ddeutil_workflow-0.0.49/tests/test_logs_trace.py +6 -0
  25. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/tests/test_reusables_call_tag.py +6 -4
  26. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/tests/test_reusables_template_filter.py +2 -2
  27. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/tests/test_stage_handler_exec.py +212 -1
  28. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/tests/test_workflow_exec.py +1 -1
  29. ddeutil_workflow-0.0.47/src/ddeutil/workflow/__about__.py +0 -1
  30. ddeutil_workflow-0.0.47/tests/test_logs_trace.py +0 -6
  31. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/LICENSE +0 -0
  32. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/pyproject.toml +0 -0
  33. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/setup.cfg +0 -0
  34. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/src/ddeutil/workflow/__cron.py +0 -0
  35. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/src/ddeutil/workflow/__main__.py +0 -0
  36. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/src/ddeutil/workflow/__types.py +0 -0
  37. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/src/ddeutil/workflow/api/__init__.py +0 -0
  38. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/src/ddeutil/workflow/api/log.py +0 -0
  39. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/src/ddeutil/workflow/api/routes/__init__.py +0 -0
  40. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/src/ddeutil/workflow/cron.py +0 -0
  41. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/src/ddeutil/workflow/exceptions.py +0 -0
  42. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/src/ddeutil/workflow/params.py +0 -0
  43. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/src/ddeutil_workflow.egg-info/SOURCES.txt +0 -0
  44. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/src/ddeutil_workflow.egg-info/dependency_links.txt +0 -0
  45. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/src/ddeutil_workflow.egg-info/requires.txt +0 -0
  46. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/src/ddeutil_workflow.egg-info/top_level.txt +0 -0
  47. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/tests/test__cron.py +0 -0
  48. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/tests/test__regex.py +0 -0
  49. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/tests/test_cron_on.py +0 -0
  50. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/tests/test_job_exec.py +0 -0
  51. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/tests/test_job_exec_strategy.py +0 -0
  52. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/tests/test_job_strategy.py +0 -0
  53. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/tests/test_params.py +0 -0
  54. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/tests/test_release.py +0 -0
  55. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/tests/test_release_queue.py +0 -0
  56. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/tests/test_result.py +0 -0
  57. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/tests/test_reusables_template.py +0 -0
  58. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/tests/test_schedule.py +0 -0
  59. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/tests/test_schedule_pending.py +0 -0
  60. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/tests/test_schedule_tasks.py +0 -0
  61. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/tests/test_schedule_workflow.py +0 -0
  62. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/tests/test_scheduler_control.py +0 -0
  63. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/tests/test_stage.py +0 -0
  64. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/tests/test_utils.py +0 -0
  65. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/tests/test_workflow.py +0 -0
  66. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/tests/test_workflow_exec_job.py +0 -0
  67. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/tests/test_workflow_exec_poke.py +0 -0
  68. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/tests/test_workflow_exec_release.py +0 -0
  69. {ddeutil_workflow-0.0.47 → ddeutil_workflow-0.0.49}/tests/test_workflow_task.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.47
3
+ Version: 0.0.49
4
4
  Summary: Lightweight workflow orchestration
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
@@ -260,31 +260,29 @@ it will use default value and do not raise any error to you.
260
260
  > The config value that you will set on the environment should combine with
261
261
  > prefix, component, and name which is `WORKFLOW_{component}_{name}` (Upper case).
262
262
 
263
- | Name | Component | Default | Override | Description |
264
- |:-----------------------------|:---------:|:--------------------------------------------------------------------------------------------------------------------------------|:--------:|:-------------------------------------------------------------------------------------------------------------------|
265
- | **ROOT_PATH** | Core | `.` | No | The root path of the workflow application. |
266
- | **REGISTRY_CALLER** | Core | `.` | Yes | List of importable string for the call stage. |
267
- | **REGISTRY_FILTER** | Core | `ddeutil.workflow.templates` | Yes | List of importable string for the filter template. |
268
- | **CONF_PATH** | Core | `conf` | Yes | The config path that keep all template `.yaml` files. |
269
- | **TIMEZONE** | Core | `Asia/Bangkok` | No | A Timezone string value that will pass to `ZoneInfo` object. |
270
- | **STAGE_DEFAULT_ID** | Core | `true` | Yes | A flag that enable default stage ID that use for catch an execution output. |
271
- | **STAGE_RAISE_ERROR** | Core | `false` | Yes | A flag that all stage raise StageException from stage execution. |
272
- | **JOB_DEFAULT_ID** | Core | `false` | Yes | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. |
273
- | **JOB_RAISE_ERROR** | Core | `true` | Yes | A flag that all job raise JobException from job strategy execution. |
274
- | **MAX_CRON_PER_WORKFLOW** | Core | `5` | No | |
275
- | **MAX_QUEUE_COMPLETE_HIST** | Core | `16` | No | |
276
- | **GENERATE_ID_SIMPLE_MODE** | Core | `true` | No | A flog that enable generating ID with `md5` algorithm. |
277
- | **DEBUG_MODE** | Log | `true` | No | A flag that enable logging with debug level mode. |
278
- | **FORMAT** | Log | `%(asctime)s.%(msecs)03d (%(name)-10s, %(process)-5d,%(thread)-5d) [%(levelname)-7s] %(message)-120s (%(filename)s:%(lineno)s)` | No | |
279
- | **FORMAT_FILE** | Log | `{datetime} ({process:5d}, {thread:5d}) {message:120s} ({filename}:{lineno})` | No | |
280
- | **DATETIME_FORMAT** | Log | `%Y-%m-%d %H:%M:%S` | No | |
281
- | **TRACE_PATH** | Log | `./logs` | No | The log path of the workflow saving log. |
282
- | **TRACE_ENABLE_WRITE** | Log | `false` | No | |
283
- | **AUDIT_PATH** | Log | `./audits` | No | |
284
- | **AUDIT_ENABLE_WRITE** | Log | `true` | No | A flag that enable logging object saving log to its destination. |
285
- | **MAX_PROCESS** | App | `2` | No | The maximum process worker number that run in scheduler app module. |
286
- | **MAX_SCHEDULE_PER_PROCESS** | App | `100` | No | A schedule per process that run parallel. |
287
- | **STOP_BOUNDARY_DELTA** | App | `'{"minutes": 5, "seconds": 20}'` | No | A time delta value that use to stop scheduler app in json string format. |
263
+ | Name | Component | Default | Description |
264
+ |:-----------------------------|:---------:|:--------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------|
265
+ | **REGISTRY_CALLER** | Core | `.` | List of importable string for the call stage. |
266
+ | **REGISTRY_FILTER** | Core | `ddeutil.workflow.templates` | List of importable string for the filter template. |
267
+ | **CONF_PATH** | Core | `./conf` | The config path that keep all template `.yaml` files. |
268
+ | **TIMEZONE** | Core | `Asia/Bangkok` | A Timezone string value that will pass to `ZoneInfo` object. |
269
+ | **STAGE_DEFAULT_ID** | Core | `false` | A flag that enable default stage ID that use for catch an execution output. |
270
+ | **STAGE_RAISE_ERROR** | Core | `false` | A flag that all stage raise StageException from stage execution. |
271
+ | **JOB_RAISE_ERROR** | Core | `true` | A flag that all job raise JobException from job strategy execution. |
272
+ | **MAX_CRON_PER_WORKFLOW** | Core | `5` | |
273
+ | **MAX_QUEUE_COMPLETE_HIST** | Core | `16` | |
274
+ | **GENERATE_ID_SIMPLE_MODE** | Core | `true` | A flog that enable generating ID with `md5` algorithm. |
275
+ | **DEBUG_MODE** | Log | `true` | A flag that enable logging with debug level mode. |
276
+ | **FORMAT** | Log | `%(asctime)s.%(msecs)03d (%(name)-10s, %(process)-5d,%(thread)-5d) [%(levelname)-7s] %(message)-120s (%(filename)s:%(lineno)s)` | |
277
+ | **FORMAT_FILE** | Log | `{datetime} ({process:5d}, {thread:5d}) {message:120s} ({filename}:{lineno})` | |
278
+ | **DATETIME_FORMAT** | Log | `%Y-%m-%d %H:%M:%S` | |
279
+ | **TRACE_PATH** | Log | `./logs` | The log path of the workflow saving log. |
280
+ | **TRACE_ENABLE_WRITE** | Log | `false` | |
281
+ | **AUDIT_PATH** | Log | `./audits` | |
282
+ | **AUDIT_ENABLE_WRITE** | Log | `true` | A flag that enable logging object saving log to its destination. |
283
+ | **MAX_PROCESS** | App | `2` | The maximum process worker number that run in scheduler app module. |
284
+ | **MAX_SCHEDULE_PER_PROCESS** | App | `100` | A schedule per process that run parallel. |
285
+ | **STOP_BOUNDARY_DELTA** | App | `'{"minutes": 5, "seconds": 20}'` | A time delta value that use to stop scheduler app in json string format. |
288
286
 
289
287
  **API Application**:
290
288
 
@@ -217,31 +217,29 @@ it will use default value and do not raise any error to you.
217
217
  > The config value that you will set on the environment should combine with
218
218
  > prefix, component, and name which is `WORKFLOW_{component}_{name}` (Upper case).
219
219
 
220
- | Name | Component | Default | Override | Description |
221
- |:-----------------------------|:---------:|:--------------------------------------------------------------------------------------------------------------------------------|:--------:|:-------------------------------------------------------------------------------------------------------------------|
222
- | **ROOT_PATH** | Core | `.` | No | The root path of the workflow application. |
223
- | **REGISTRY_CALLER** | Core | `.` | Yes | List of importable string for the call stage. |
224
- | **REGISTRY_FILTER** | Core | `ddeutil.workflow.templates` | Yes | List of importable string for the filter template. |
225
- | **CONF_PATH** | Core | `conf` | Yes | The config path that keep all template `.yaml` files. |
226
- | **TIMEZONE** | Core | `Asia/Bangkok` | No | A Timezone string value that will pass to `ZoneInfo` object. |
227
- | **STAGE_DEFAULT_ID** | Core | `true` | Yes | A flag that enable default stage ID that use for catch an execution output. |
228
- | **STAGE_RAISE_ERROR** | Core | `false` | Yes | A flag that all stage raise StageException from stage execution. |
229
- | **JOB_DEFAULT_ID** | Core | `false` | Yes | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. |
230
- | **JOB_RAISE_ERROR** | Core | `true` | Yes | A flag that all job raise JobException from job strategy execution. |
231
- | **MAX_CRON_PER_WORKFLOW** | Core | `5` | No | |
232
- | **MAX_QUEUE_COMPLETE_HIST** | Core | `16` | No | |
233
- | **GENERATE_ID_SIMPLE_MODE** | Core | `true` | No | A flog that enable generating ID with `md5` algorithm. |
234
- | **DEBUG_MODE** | Log | `true` | No | A flag that enable logging with debug level mode. |
235
- | **FORMAT** | Log | `%(asctime)s.%(msecs)03d (%(name)-10s, %(process)-5d,%(thread)-5d) [%(levelname)-7s] %(message)-120s (%(filename)s:%(lineno)s)` | No | |
236
- | **FORMAT_FILE** | Log | `{datetime} ({process:5d}, {thread:5d}) {message:120s} ({filename}:{lineno})` | No | |
237
- | **DATETIME_FORMAT** | Log | `%Y-%m-%d %H:%M:%S` | No | |
238
- | **TRACE_PATH** | Log | `./logs` | No | The log path of the workflow saving log. |
239
- | **TRACE_ENABLE_WRITE** | Log | `false` | No | |
240
- | **AUDIT_PATH** | Log | `./audits` | No | |
241
- | **AUDIT_ENABLE_WRITE** | Log | `true` | No | A flag that enable logging object saving log to its destination. |
242
- | **MAX_PROCESS** | App | `2` | No | The maximum process worker number that run in scheduler app module. |
243
- | **MAX_SCHEDULE_PER_PROCESS** | App | `100` | No | A schedule per process that run parallel. |
244
- | **STOP_BOUNDARY_DELTA** | App | `'{"minutes": 5, "seconds": 20}'` | No | A time delta value that use to stop scheduler app in json string format. |
220
+ | Name | Component | Default | Description |
221
+ |:-----------------------------|:---------:|:--------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------|
222
+ | **REGISTRY_CALLER** | Core | `.` | List of importable string for the call stage. |
223
+ | **REGISTRY_FILTER** | Core | `ddeutil.workflow.templates` | List of importable string for the filter template. |
224
+ | **CONF_PATH** | Core | `./conf` | The config path that keep all template `.yaml` files. |
225
+ | **TIMEZONE** | Core | `Asia/Bangkok` | A Timezone string value that will pass to `ZoneInfo` object. |
226
+ | **STAGE_DEFAULT_ID** | Core | `false` | A flag that enable default stage ID that use for catch an execution output. |
227
+ | **STAGE_RAISE_ERROR** | Core | `false` | A flag that all stage raise StageException from stage execution. |
228
+ | **JOB_RAISE_ERROR** | Core | `true` | A flag that all job raise JobException from job strategy execution. |
229
+ | **MAX_CRON_PER_WORKFLOW** | Core | `5` | |
230
+ | **MAX_QUEUE_COMPLETE_HIST** | Core | `16` | |
231
+ | **GENERATE_ID_SIMPLE_MODE** | Core | `true` | A flog that enable generating ID with `md5` algorithm. |
232
+ | **DEBUG_MODE** | Log | `true` | A flag that enable logging with debug level mode. |
233
+ | **FORMAT** | Log | `%(asctime)s.%(msecs)03d (%(name)-10s, %(process)-5d,%(thread)-5d) [%(levelname)-7s] %(message)-120s (%(filename)s:%(lineno)s)` | |
234
+ | **FORMAT_FILE** | Log | `{datetime} ({process:5d}, {thread:5d}) {message:120s} ({filename}:{lineno})` | |
235
+ | **DATETIME_FORMAT** | Log | `%Y-%m-%d %H:%M:%S` | |
236
+ | **TRACE_PATH** | Log | `./logs` | The log path of the workflow saving log. |
237
+ | **TRACE_ENABLE_WRITE** | Log | `false` | |
238
+ | **AUDIT_PATH** | Log | `./audits` | |
239
+ | **AUDIT_ENABLE_WRITE** | Log | `true` | A flag that enable logging object saving log to its destination. |
240
+ | **MAX_PROCESS** | App | `2` | The maximum process worker number that run in scheduler app module. |
241
+ | **MAX_SCHEDULE_PER_PROCESS** | App | `100` | A schedule per process that run parallel. |
242
+ | **STOP_BOUNDARY_DELTA** | App | `'{"minutes": 5, "seconds": 20}'` | A time delta value that use to stop scheduler app in json string format. |
245
243
 
246
244
  **API Application**:
247
245
 
@@ -0,0 +1 @@
1
+ __version__: str = "0.0.49"
@@ -10,7 +10,6 @@ from .conf import (
10
10
  Loader,
11
11
  config,
12
12
  env,
13
- get_logger,
14
13
  )
15
14
  from .cron import (
16
15
  On,
@@ -33,8 +32,11 @@ from .job import (
33
32
  )
34
33
  from .logs import (
35
34
  Audit,
35
+ AuditModel,
36
+ Trace,
36
37
  TraceData,
37
- TraceLog,
38
+ TraceMeta,
39
+ TraceModel,
38
40
  get_audit,
39
41
  get_dt_tznow,
40
42
  get_trace,
@@ -20,7 +20,8 @@ from fastapi.middleware.gzip import GZipMiddleware
20
20
  from fastapi.responses import UJSONResponse
21
21
 
22
22
  from ..__about__ import __version__
23
- from ..conf import api_config, config, get_logger
23
+ from ..conf import api_config, config
24
+ from ..logs import get_logger
24
25
  from ..scheduler import ReleaseThread, ReleaseThreads
25
26
  from ..workflow import ReleaseQueue, WorkflowTask
26
27
  from .repeat import repeat_at
@@ -13,7 +13,8 @@ from functools import wraps
13
13
  from starlette.concurrency import run_in_threadpool
14
14
 
15
15
  from ..__cron import CronJob
16
- from ..conf import config, get_logger
16
+ from ..conf import config
17
+ from ..logs import get_logger
17
18
 
18
19
  logger = get_logger("uvicorn.error")
19
20
 
@@ -12,9 +12,9 @@ from fastapi.responses import UJSONResponse
12
12
  from pydantic import BaseModel
13
13
 
14
14
  from ...__types import DictData
15
- from ...conf import get_logger
16
15
  from ...exceptions import JobException
17
16
  from ...job import Job
17
+ from ...logs import get_logger
18
18
  from ...result import Result
19
19
 
20
20
  logger = get_logger("uvicorn.error")
@@ -10,7 +10,8 @@ from fastapi import APIRouter, Path, Query
10
10
  from fastapi import status as st
11
11
  from fastapi.responses import UJSONResponse
12
12
 
13
- from ...logs import get_audit, get_trace_obj
13
+ from ...logs import get_audit
14
+ from ...result import Result
14
15
 
15
16
  log_route = APIRouter(
16
17
  prefix="/logs",
@@ -33,6 +34,7 @@ async def get_traces(
33
34
  """Return all trace logs from the current trace log path that config with
34
35
  `WORKFLOW_LOG_PATH` environment variable name.
35
36
  """
37
+ result = Result()
36
38
  return {
37
39
  "message": (
38
40
  f"Getting trace logs with offset: {offset} and limit: {limit}"
@@ -44,7 +46,7 @@ async def get_traces(
44
46
  exclude_unset=True,
45
47
  exclude_defaults=True,
46
48
  )
47
- for trace in get_trace_obj().find_logs()
49
+ for trace in result.trace.find_traces()
48
50
  ],
49
51
  }
50
52
 
@@ -63,12 +65,11 @@ async def get_trace_with_id(run_id: str):
63
65
  - **run_id**: A running ID that want to search a trace log from the log
64
66
  path.
65
67
  """
68
+ result = Result()
66
69
  return {
67
70
  "message": f"Getting trace log with specific running ID: {run_id}",
68
71
  "trace": (
69
- get_trace_obj()
70
- .find_log_with_id(run_id)
71
- .model_dump(
72
+ result.trace.find_trace_with_id(run_id).model_dump(
72
73
  by_alias=True,
73
74
  exclude_none=True,
74
75
  exclude_unset=True,
@@ -12,7 +12,8 @@ from fastapi import APIRouter, HTTPException, Request
12
12
  from fastapi import status as st
13
13
  from fastapi.responses import UJSONResponse
14
14
 
15
- from ...conf import config, get_logger
15
+ from ...conf import config
16
+ from ...logs import get_logger
16
17
  from ...scheduler import Schedule
17
18
 
18
19
  logger = get_logger("uvicorn.error")
@@ -15,8 +15,8 @@ from fastapi.responses import UJSONResponse
15
15
  from pydantic import BaseModel
16
16
 
17
17
  from ...__types import DictData
18
- from ...conf import Loader, get_logger
19
- from ...logs import Audit, get_audit
18
+ from ...conf import Loader
19
+ from ...logs import Audit, get_audit, get_logger
20
20
  from ...result import Result
21
21
  from ...workflow import Workflow
22
22
 
@@ -6,13 +6,12 @@
6
6
  from __future__ import annotations
7
7
 
8
8
  import json
9
- import logging
10
9
  import os
11
10
  from collections.abc import Iterator
12
11
  from datetime import timedelta
13
- from functools import cached_property, lru_cache
12
+ from functools import cached_property
14
13
  from pathlib import Path
15
- from typing import Optional, TypeVar
14
+ from typing import Final, Optional, TypeVar
16
15
  from zoneinfo import ZoneInfo
17
16
 
18
17
  from ddeutil.core import str2bool
@@ -22,12 +21,15 @@ from ddeutil.io.paths import glob_files, is_ignored, read_ignore
22
21
  from .__types import DictData, TupleStr
23
22
 
24
23
  T = TypeVar("T")
25
- PREFIX: str = "WORKFLOW"
24
+ PREFIX: Final[str] = "WORKFLOW"
26
25
 
27
26
 
28
27
  def env(var: str, default: str | None = None) -> str | None: # pragma: no cov
29
28
  """Get environment variable with uppercase and adding prefix string.
30
29
 
30
+ :param var: (str) A env variable name.
31
+ :param default: (str | None) A default value if an env var does not set.
32
+
31
33
  :rtype: str | None
32
34
  """
33
35
  return os.getenv(f"{PREFIX}_{var.upper().replace(' ', '_')}", default)
@@ -36,7 +38,6 @@ def env(var: str, default: str | None = None) -> str | None: # pragma: no cov
36
38
  __all__: TupleStr = (
37
39
  "api_config",
38
40
  "env",
39
- "get_logger",
40
41
  "Config",
41
42
  "SimLoad",
42
43
  "Loader",
@@ -53,34 +54,32 @@ class Config: # pragma: no cov
53
54
  """
54
55
 
55
56
  # NOTE: Core
56
- @property
57
- def root_path(self) -> Path:
58
- """Root path or the project path.
59
-
60
- :rtype: Path
61
- """
62
- return Path(env("CORE_ROOT_PATH", "."))
63
-
64
57
  @property
65
58
  def conf_path(self) -> Path:
66
- """Config path that use root_path class argument for this construction.
59
+ """Config path that keep all workflow template YAML files.
67
60
 
68
61
  :rtype: Path
69
62
  """
70
- return self.root_path / env("CORE_CONF_PATH", "conf")
63
+ return Path(env("CORE_CONF_PATH", "./conf"))
71
64
 
72
65
  @property
73
66
  def tz(self) -> ZoneInfo:
67
+ """Timezone value that return with the `ZoneInfo` object and use for all
68
+ datetime object in this workflow engine.
69
+
70
+ :rtype: ZoneInfo
71
+ """
74
72
  return ZoneInfo(env("CORE_TIMEZONE", "UTC"))
75
73
 
76
74
  @property
77
- def gen_id_simple_mode(self) -> bool:
75
+ def generate_id_simple_mode(self) -> bool:
78
76
  return str2bool(env("CORE_GENERATE_ID_SIMPLE_MODE", "true"))
79
77
 
80
78
  # NOTE: Register
81
79
  @property
82
- def regis_call(self) -> list[str]:
83
- """Register Caller module importer str.
80
+ def registry_caller(self) -> list[str]:
81
+ """Register Caller that is a list of importable string for the call
82
+ stage model can get.
84
83
 
85
84
  :rtype: list[str]
86
85
  """
@@ -88,8 +87,9 @@ class Config: # pragma: no cov
88
87
  return [r.strip() for r in regis_call_str.split(",")]
89
88
 
90
89
  @property
91
- def regis_filter(self) -> list[str]:
92
- """Register Filter module.
90
+ def registry_filter(self) -> list[str]:
91
+ """Register Filter that is a list of importable string for the filter
92
+ template.
93
93
 
94
94
  :rtype: list[str]
95
95
  """
@@ -100,7 +100,7 @@ class Config: # pragma: no cov
100
100
 
101
101
  # NOTE: Log
102
102
  @property
103
- def log_path(self) -> Path:
103
+ def trace_path(self) -> Path:
104
104
  return Path(env("LOG_TRACE_PATH", "./logs"))
105
105
 
106
106
  @property
@@ -159,11 +159,7 @@ class Config: # pragma: no cov
159
159
  return str2bool(env("CORE_JOB_RAISE_ERROR", "true"))
160
160
 
161
161
  @property
162
- def job_default_id(self) -> bool:
163
- return str2bool(env("CORE_JOB_DEFAULT_ID", "false"))
164
-
165
- @property
166
- def max_on_per_workflow(self) -> int:
162
+ def max_cron_per_workflow(self) -> int:
167
163
  """The maximum on value that store in workflow model.
168
164
 
169
165
  :rtype: int
@@ -222,15 +218,13 @@ class SimLoad:
222
218
  :param externals: An external parameters
223
219
 
224
220
  Noted:
225
-
226
221
  The config data should have ``type`` key for modeling validation that
227
222
  make this loader know what is config should to do pass to.
228
223
 
229
224
  ... <identity-key>:
230
225
  ... type: <importable-object>
231
- ... <key-data>: <value-data>
232
- ... ...
233
-
226
+ ... <key-data-1>: <value-data-1>
227
+ ... <key-data-2>: <value-data-2>
234
228
  """
235
229
 
236
230
  def __init__(
@@ -253,7 +247,10 @@ class SimLoad:
253
247
 
254
248
  # VALIDATE: check the data that reading should not empty.
255
249
  if not self.data:
256
- raise ValueError(f"Config {name!r} does not found on conf path")
250
+ raise ValueError(
251
+ f"Config {name!r} does not found on conf path: "
252
+ f"{self.conf_path}."
253
+ )
257
254
 
258
255
  self.data.update(self.externals)
259
256
 
@@ -298,11 +295,34 @@ class SimLoad:
298
295
  )
299
296
 
300
297
  @classmethod
301
- def is_ignore(cls, file: Path, conf_path: Path) -> bool:
302
- return is_ignored(file, read_ignore(conf_path / ".confignore"))
298
+ def is_ignore(
299
+ cls,
300
+ file: Path,
301
+ conf_path: Path,
302
+ *,
303
+ ignore_filename: Optional[str] = None,
304
+ ) -> bool:
305
+ """Check this file was ignored.
306
+
307
+ :param file: (Path) A file path that want to check.
308
+ :param conf_path: (Path) A config path that want to read the config
309
+ ignore file.
310
+ :param ignore_filename: (str) An ignore filename.
311
+
312
+ :rtype: bool
313
+ """
314
+ ignore_filename: str = ignore_filename or ".confignore"
315
+ return is_ignored(file, read_ignore(conf_path / ignore_filename))
303
316
 
304
317
  @classmethod
305
318
  def filter_yaml(cls, file: Path, name: str | None = None) -> DictData:
319
+ """Read a YAML file context from an input file path and specific name.
320
+
321
+ :param file: (Path) A file path that want to extract YAML context.
322
+ :param name: (str) A key name that search on a YAML context.
323
+
324
+ :rtype: DictData
325
+ """
306
326
  if any(file.suffix.endswith(s) for s in (".yml", ".yaml")):
307
327
  values: DictData = YamlFlResolve(file).read()
308
328
  if values is not None:
@@ -352,8 +372,8 @@ def dynamic(
352
372
  class Loader(SimLoad):
353
373
  """Loader Object that get the config `yaml` file from current path.
354
374
 
355
- :param name: A name of config data that will read by Yaml Loader object.
356
- :param externals: An external parameters
375
+ :param name: (str) A name of config data that will read by Yaml Loader object.
376
+ :param externals: (DictData) An external parameters
357
377
  """
358
378
 
359
379
  @classmethod
@@ -361,17 +381,19 @@ class Loader(SimLoad):
361
381
  cls,
362
382
  obj: object,
363
383
  *,
384
+ path: Path | None = None,
364
385
  included: list[str] | None = None,
365
386
  excluded: list[str] | None = None,
366
- path: Path | None = None,
367
387
  **kwargs,
368
388
  ) -> Iterator[tuple[str, DictData]]:
369
389
  """Override the find class method from the Simple Loader object.
370
390
 
371
391
  :param obj: An object that want to validate matching before return.
372
- :param included:
373
- :param excluded:
374
- :param path:
392
+ :param path: (Path) A override config path.
393
+ :param included: An excluded list of data key that want to reject this
394
+ data if any key exist.
395
+ :param excluded: An included list of data key that want to filter from
396
+ data.
375
397
 
376
398
  :rtype: Iterator[tuple[str, DictData]]
377
399
  """
@@ -388,30 +410,3 @@ class Loader(SimLoad):
388
410
  conf_path=dynamic("conf_path", extras=externals),
389
411
  externals=externals,
390
412
  )
391
-
392
-
393
- @lru_cache
394
- def get_logger(name: str):
395
- """Return logger object with an input module name.
396
-
397
- :param name: A module name that want to log.
398
- """
399
- logger = logging.getLogger(name)
400
-
401
- # NOTE: Developers using this package can then disable all logging just for
402
- # this package by;
403
- #
404
- # `logging.getLogger('ddeutil.workflow').propagate = False`
405
- #
406
- logger.addHandler(logging.NullHandler())
407
-
408
- formatter = logging.Formatter(
409
- fmt=config.log_format,
410
- datefmt=config.log_datetime_format,
411
- )
412
- stream = logging.StreamHandler()
413
- stream.setFormatter(formatter)
414
- logger.addHandler(stream)
415
-
416
- logger.setLevel(logging.DEBUG if config.debug else logging.INFO)
417
- return logger
@@ -483,7 +483,13 @@ class Job(BaseModel):
483
483
  except Exception as err:
484
484
  raise JobException(f"{err.__class__.__name__}: {err}") from err
485
485
 
486
- def set_outputs(self, output: DictData, to: DictData) -> DictData:
486
+ def set_outputs(
487
+ self,
488
+ output: DictData,
489
+ to: DictData,
490
+ *,
491
+ job_id: Optional[None] = None,
492
+ ) -> DictData:
487
493
  """Set an outputs from execution process to the received context. The
488
494
  result from execution will pass to value of `strategies` key.
489
495
 
@@ -511,22 +517,21 @@ class Job(BaseModel):
511
517
 
512
518
  :param output: An output context.
513
519
  :param to: A context data that want to add output result.
520
+ :param job_id: A job ID if the id field does not set.
514
521
 
515
522
  :rtype: DictData
516
523
  """
517
524
  if "jobs" not in to:
518
525
  to["jobs"] = {}
519
526
 
520
- if self.id is None and not dynamic(
521
- "job_default_id", extras=self.extras
522
- ):
527
+ if self.id is None and job_id is None:
523
528
  raise JobException(
524
529
  "This job do not set the ID before setting execution output."
525
530
  )
526
531
 
527
532
  # NOTE: If the job ID did not set, it will use index of jobs key
528
533
  # instead.
529
- _id: str = self.id or str(len(to["jobs"]) + 1)
534
+ _id: str = self.id or job_id
530
535
 
531
536
  errors: DictData = (
532
537
  {"errors": output.pop("errors", {})} if "errors" in output else {}
@@ -570,6 +575,7 @@ class Job(BaseModel):
570
575
  run_id=run_id,
571
576
  parent_run_id=parent_run_id,
572
577
  id_logic=(self.id or "not-set"),
578
+ extras=self.extras,
573
579
  )
574
580
 
575
581
  if self.runs_on.type == RunsOnType.LOCAL:
@@ -756,6 +762,7 @@ def local_execute(
756
762
  run_id=run_id,
757
763
  parent_run_id=parent_run_id,
758
764
  id_logic=(job.id or "not-set"),
765
+ extras=job.extras,
759
766
  )
760
767
  event: Event = Event() if event is None else event
761
768
 
@@ -891,6 +898,7 @@ def self_hosted_execute(
891
898
  run_id=run_id,
892
899
  parent_run_id=parent_run_id,
893
900
  id_logic=(job.id or "not-set"),
901
+ extras=job.extras,
894
902
  )
895
903
 
896
904
  if event and event.is_set():