ddeutil-workflow 0.0.20__tar.gz → 0.0.22__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ddeutil_workflow-0.0.20/src/ddeutil_workflow.egg-info → ddeutil_workflow-0.0.22}/PKG-INFO +3 -1
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/README.md +2 -0
- ddeutil_workflow-0.0.22/src/ddeutil/workflow/__about__.py +1 -0
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/src/ddeutil/workflow/__types.py +1 -0
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/src/ddeutil/workflow/conf.py +7 -0
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/src/ddeutil/workflow/job.py +82 -78
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/src/ddeutil/workflow/on.py +3 -0
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/src/ddeutil/workflow/stage.py +19 -11
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/src/ddeutil/workflow/utils.py +18 -2
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/src/ddeutil/workflow/workflow.py +296 -172
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22/src/ddeutil_workflow.egg-info}/PKG-INFO +3 -1
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/tests/test_job.py +26 -17
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/tests/test_job_exec_strategy.py +48 -11
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/tests/test_stage.py +1 -0
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/tests/test_utils.py +16 -0
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/tests/test_workflow.py +23 -15
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/tests/test_workflow_exec.py +52 -0
- ddeutil_workflow-0.0.22/tests/test_workflow_job_exec.py +62 -0
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/tests/test_workflow_poke.py +18 -1
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/tests/test_workflow_release.py +21 -1
- ddeutil_workflow-0.0.20/src/ddeutil/workflow/__about__.py +0 -1
- ddeutil_workflow-0.0.20/tests/test_workflow_job_exec.py +0 -28
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/LICENSE +0 -0
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/pyproject.toml +0 -0
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/setup.cfg +0 -0
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/src/ddeutil/workflow/__cron.py +0 -0
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/src/ddeutil/workflow/__init__.py +0 -0
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/src/ddeutil/workflow/api.py +0 -0
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/src/ddeutil/workflow/cli.py +0 -0
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/src/ddeutil/workflow/exceptions.py +0 -0
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/src/ddeutil/workflow/repeat.py +0 -0
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/src/ddeutil/workflow/route.py +0 -0
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/src/ddeutil/workflow/scheduler.py +0 -0
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/src/ddeutil_workflow.egg-info/SOURCES.txt +0 -0
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/src/ddeutil_workflow.egg-info/dependency_links.txt +0 -0
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/src/ddeutil_workflow.egg-info/entry_points.txt +0 -0
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/src/ddeutil_workflow.egg-info/requires.txt +0 -0
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/src/ddeutil_workflow.egg-info/top_level.txt +0 -0
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/tests/test__cron.py +0 -0
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/tests/test__regex.py +0 -0
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/tests/test_conf.py +0 -0
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/tests/test_conf_log.py +0 -0
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/tests/test_job_exec_py.py +0 -0
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/tests/test_job_strategy.py +0 -0
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/tests/test_on.py +0 -0
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/tests/test_params.py +0 -0
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/tests/test_scheduler.py +0 -0
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/tests/test_scheduler_tasks.py +0 -0
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/tests/test_stage_exec_bash.py +0 -0
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/tests/test_stage_exec_hook.py +0 -0
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/tests/test_stage_exec_py.py +0 -0
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/tests/test_stage_exec_trigger.py +0 -0
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/tests/test_utils_filter.py +0 -0
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/tests/test_utils_params.py +0 -0
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/tests/test_utils_result.py +0 -0
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/tests/test_utils_tag.py +0 -0
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/tests/test_utils_template.py +0 -0
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/tests/test_workflow_exec_hook.py +0 -0
- {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.22}/tests/test_workflow_exec_needs.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: ddeutil-workflow
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.22
|
4
4
|
Summary: Lightweight workflow orchestration with less dependencies
|
5
5
|
Author-email: ddeutils <korawich.anu@gmail.com>
|
6
6
|
License: MIT
|
@@ -195,6 +195,8 @@ and do not raise any error to you.
|
|
195
195
|
| `WORKFLOW_CORE_JOB_RAISE_ERROR` | Core | true | A flag that all job raise JobException from job strategy execution. | |
|
196
196
|
| `WORKFLOW_CORE_MAX_NUM_POKING` | Core | 4 | . | |
|
197
197
|
| `WORKFLOW_CORE_MAX_JOB_PARALLEL` | Core | 2 | The maximum job number that able to run parallel in workflow executor. | |
|
198
|
+
| `WORKFLOW_CORE_MAX_JOB_EXEC_TIMEOUT` | Core | 600 | | |
|
199
|
+
| `WORKFLOW_CORE_MAX_ON_PER_WORKFLOW` | Core | 5 | | |
|
198
200
|
| `WORKFLOW_CORE_GENERATE_ID_SIMPLE_MODE` | Core | true | A flog that enable generating ID with `md5` algorithm. | |
|
199
201
|
| `WORKFLOW_LOG_DEBUG_MODE` | Log | true | A flag that enable logging with debug level mode. | |
|
200
202
|
| `WORKFLOW_LOG_ENABLE_WRITE` | Log | true | A flag that enable logging object saving log to its destination. | |
|
@@ -162,6 +162,8 @@ and do not raise any error to you.
|
|
162
162
|
| `WORKFLOW_CORE_JOB_RAISE_ERROR` | Core | true | A flag that all job raise JobException from job strategy execution. | |
|
163
163
|
| `WORKFLOW_CORE_MAX_NUM_POKING` | Core | 4 | . | |
|
164
164
|
| `WORKFLOW_CORE_MAX_JOB_PARALLEL` | Core | 2 | The maximum job number that able to run parallel in workflow executor. | |
|
165
|
+
| `WORKFLOW_CORE_MAX_JOB_EXEC_TIMEOUT` | Core | 600 | | |
|
166
|
+
| `WORKFLOW_CORE_MAX_ON_PER_WORKFLOW` | Core | 5 | | |
|
165
167
|
| `WORKFLOW_CORE_GENERATE_ID_SIMPLE_MODE` | Core | true | A flog that enable generating ID with `md5` algorithm. | |
|
166
168
|
| `WORKFLOW_LOG_DEBUG_MODE` | Log | true | A flag that enable logging with debug level mode. | |
|
167
169
|
| `WORKFLOW_LOG_ENABLE_WRITE` | Log | true | A flag that enable logging object saving log to its destination. | |
|
@@ -0,0 +1 @@
|
|
1
|
+
__version__: str = "0.0.22"
|
@@ -100,9 +100,15 @@ class Config:
|
|
100
100
|
|
101
101
|
# NOTE: Workflow
|
102
102
|
max_job_parallel: int = int(env("WORKFLOW_CORE_MAX_JOB_PARALLEL", "2"))
|
103
|
+
max_job_exec_timeout: int = int(
|
104
|
+
env("WORKFLOW_CORE_MAX_JOB_EXEC_TIMEOUT", "600")
|
105
|
+
)
|
103
106
|
max_poking_pool_worker: int = int(
|
104
107
|
os.getenv("WORKFLOW_CORE_MAX_NUM_POKING", "4")
|
105
108
|
)
|
109
|
+
max_on_per_workflow: int = int(
|
110
|
+
env("WORKFLOW_CORE_MAX_ON_PER_WORKFLOW", "5")
|
111
|
+
)
|
106
112
|
|
107
113
|
# NOTE: Schedule App
|
108
114
|
max_schedule_process: int = int(env("WORKFLOW_APP_MAX_PROCESS", "2"))
|
@@ -459,6 +465,7 @@ class FileLog(BaseLog):
|
|
459
465
|
|
460
466
|
:param excluded: An excluded list of key name that want to pass in the
|
461
467
|
model_dump method.
|
468
|
+
|
462
469
|
:rtype: Self
|
463
470
|
"""
|
464
471
|
# NOTE: Check environ variable was set for real writing.
|
@@ -11,7 +11,6 @@ job.
|
|
11
11
|
from __future__ import annotations
|
12
12
|
|
13
13
|
import copy
|
14
|
-
import time
|
15
14
|
from concurrent.futures import (
|
16
15
|
FIRST_EXCEPTION,
|
17
16
|
Future,
|
@@ -23,7 +22,7 @@ from enum import Enum
|
|
23
22
|
from functools import lru_cache
|
24
23
|
from textwrap import dedent
|
25
24
|
from threading import Event
|
26
|
-
from typing import Optional, Union
|
25
|
+
from typing import Any, Optional, Union
|
27
26
|
|
28
27
|
from ddeutil.core import freeze_args
|
29
28
|
from pydantic import BaseModel, Field
|
@@ -41,6 +40,7 @@ from .stage import Stage
|
|
41
40
|
from .utils import (
|
42
41
|
Result,
|
43
42
|
cross_product,
|
43
|
+
cut_id,
|
44
44
|
dash2underscore,
|
45
45
|
filter_func,
|
46
46
|
gen_id,
|
@@ -48,13 +48,13 @@ from .utils import (
|
|
48
48
|
)
|
49
49
|
|
50
50
|
logger = get_logger("ddeutil.workflow")
|
51
|
-
|
52
|
-
MatrixExclude = list[dict[str, Union[str, int]]]
|
51
|
+
MatrixFilter = list[dict[str, Union[str, int]]]
|
53
52
|
|
54
53
|
|
55
54
|
__all__: TupleStr = (
|
56
55
|
"Strategy",
|
57
56
|
"Job",
|
57
|
+
"TriggerRules",
|
58
58
|
"make",
|
59
59
|
)
|
60
60
|
|
@@ -63,8 +63,8 @@ __all__: TupleStr = (
|
|
63
63
|
@lru_cache
|
64
64
|
def make(
|
65
65
|
matrix: Matrix,
|
66
|
-
include:
|
67
|
-
exclude:
|
66
|
+
include: MatrixFilter,
|
67
|
+
exclude: MatrixFilter,
|
68
68
|
) -> list[DictStr]:
|
69
69
|
"""Make a list of product of matrix values that already filter with
|
70
70
|
exclude matrix and add specific matrix with include.
|
@@ -124,7 +124,7 @@ def make(
|
|
124
124
|
|
125
125
|
|
126
126
|
class Strategy(BaseModel):
|
127
|
-
"""Strategy
|
127
|
+
"""Strategy model that will combine a matrix together for running the
|
128
128
|
special job with combination of matrix data.
|
129
129
|
|
130
130
|
This model does not be the part of job only because you can use it to
|
@@ -166,11 +166,11 @@ class Strategy(BaseModel):
|
|
166
166
|
"A matrix values that want to cross product to possible strategies."
|
167
167
|
),
|
168
168
|
)
|
169
|
-
include:
|
169
|
+
include: MatrixFilter = Field(
|
170
170
|
default_factory=list,
|
171
171
|
description="A list of additional matrix that want to adds-in.",
|
172
172
|
)
|
173
|
-
exclude:
|
173
|
+
exclude: MatrixFilter = Field(
|
174
174
|
default_factory=list,
|
175
175
|
description="A list of exclude matrix that want to filter-out.",
|
176
176
|
)
|
@@ -204,7 +204,7 @@ class Strategy(BaseModel):
|
|
204
204
|
|
205
205
|
|
206
206
|
class TriggerRules(str, Enum):
|
207
|
-
"""Trigger
|
207
|
+
"""Trigger rules enum object."""
|
208
208
|
|
209
209
|
all_success: str = "all_success"
|
210
210
|
all_failed: str = "all_failed"
|
@@ -215,8 +215,15 @@ class TriggerRules(str, Enum):
|
|
215
215
|
none_skipped: str = "none_skipped"
|
216
216
|
|
217
217
|
|
218
|
+
class RunsOn(str, Enum):
|
219
|
+
"""Runs-On enum object."""
|
220
|
+
|
221
|
+
local: str = "local"
|
222
|
+
docker: str = "docker"
|
223
|
+
|
224
|
+
|
218
225
|
class Job(BaseModel):
|
219
|
-
"""Job Pydantic model object (group of stages).
|
226
|
+
"""Job Pydantic model object (short descripte: a group of stages).
|
220
227
|
|
221
228
|
This job model allow you to use for-loop that call matrix strategy. If
|
222
229
|
you pass matrix mapping and it able to generate, you will see it running
|
@@ -327,7 +334,10 @@ class Job(BaseModel):
|
|
327
334
|
return self
|
328
335
|
|
329
336
|
def stage(self, stage_id: str) -> Stage:
|
330
|
-
"""Return stage
|
337
|
+
"""Return stage instance that exists in this job via passing an input
|
338
|
+
stage ID.
|
339
|
+
|
340
|
+
:raise ValueError: If an input stage ID does not found on this job.
|
331
341
|
|
332
342
|
:param stage_id: A stage ID that want to extract from this job.
|
333
343
|
:rtype: Stage
|
@@ -337,6 +347,13 @@ class Job(BaseModel):
|
|
337
347
|
return stage
|
338
348
|
raise ValueError(f"Stage ID {stage_id} does not exists")
|
339
349
|
|
350
|
+
def check_needs(self, jobs: dict[str, Any]) -> bool:
|
351
|
+
"""Return True if job's need exists in an input list of job's ID.
|
352
|
+
|
353
|
+
:rtype: bool
|
354
|
+
"""
|
355
|
+
return all(need in jobs for need in self.needs)
|
356
|
+
|
340
357
|
def set_outputs(self, output: DictData, to: DictData) -> DictData:
|
341
358
|
"""Set an outputs from execution process to the receive context. The
|
342
359
|
result from execution will pass to value of ``strategies`` key.
|
@@ -360,8 +377,12 @@ class Job(BaseModel):
|
|
360
377
|
}
|
361
378
|
}
|
362
379
|
|
380
|
+
:raise JobException: If the job's ID does not set and the setting
|
381
|
+
default job ID flag does not set.
|
382
|
+
|
363
383
|
:param output: An output context.
|
364
384
|
:param to: A context data that want to add output result.
|
385
|
+
|
365
386
|
:rtype: DictData
|
366
387
|
"""
|
367
388
|
if self.id is None and not config.job_default_id:
|
@@ -387,8 +408,8 @@ class Job(BaseModel):
|
|
387
408
|
self,
|
388
409
|
strategy: DictData,
|
389
410
|
params: DictData,
|
390
|
-
run_id: str | None = None,
|
391
411
|
*,
|
412
|
+
run_id: str | None = None,
|
392
413
|
event: Event | None = None,
|
393
414
|
) -> Result:
|
394
415
|
"""Job Strategy execution with passing dynamic parameters from the
|
@@ -398,11 +419,15 @@ class Job(BaseModel):
|
|
398
419
|
It different with ``self.execute`` because this method run only one
|
399
420
|
strategy and return with context of this strategy data.
|
400
421
|
|
422
|
+
The result of this execution will return result with strategy ID
|
423
|
+
that generated from the `gen_id` function with a input strategy value.
|
424
|
+
|
401
425
|
:raise JobException: If it has any error from ``StageException`` or
|
402
426
|
``UtilException``.
|
403
427
|
|
404
|
-
:param strategy: A metrix
|
405
|
-
|
428
|
+
:param strategy: A strategy metrix value that use on this execution.
|
429
|
+
This value will pass to the `matrix` key for templating.
|
430
|
+
:param params: A dynamic parameters that will deepcopy to the context.
|
406
431
|
:param run_id: A job running ID for this strategy execution.
|
407
432
|
:param event: An manger event that pass to the PoolThreadExecutor.
|
408
433
|
|
@@ -410,6 +435,7 @@ class Job(BaseModel):
|
|
410
435
|
"""
|
411
436
|
run_id: str = run_id or gen_id(self.id or "", unique=True)
|
412
437
|
strategy_id: str = gen_id(strategy)
|
438
|
+
rs: Result = Result(run_id=run_id)
|
413
439
|
|
414
440
|
# PARAGRAPH:
|
415
441
|
#
|
@@ -430,21 +456,27 @@ class Job(BaseModel):
|
|
430
456
|
for stage in self.stages:
|
431
457
|
|
432
458
|
if stage.is_skipped(params=context):
|
433
|
-
logger.info(
|
459
|
+
logger.info(
|
460
|
+
f"({cut_id(run_id)}) [JOB]: Skip stage: {stage.iden!r}"
|
461
|
+
)
|
434
462
|
continue
|
435
463
|
|
436
464
|
logger.info(
|
437
|
-
f"({run_id}) [JOB]:
|
465
|
+
f"({cut_id(run_id)}) [JOB]: Execute stage: {stage.iden!r}"
|
438
466
|
)
|
439
467
|
|
440
468
|
# NOTE: Logging a matrix that pass on this stage execution.
|
441
469
|
if strategy:
|
442
|
-
logger.info(f"({run_id}) [JOB]: Matrix: {strategy}")
|
470
|
+
logger.info(f"({cut_id(run_id)}) [JOB]: ... Matrix: {strategy}")
|
443
471
|
|
444
472
|
# NOTE: Force stop this execution if event was set from main
|
445
473
|
# execution.
|
446
474
|
if event and event.is_set():
|
447
|
-
|
475
|
+
error_msg: str = (
|
476
|
+
"Job strategy was canceled from event that had set before "
|
477
|
+
"strategy execution."
|
478
|
+
)
|
479
|
+
return rs.catch(
|
448
480
|
status=1,
|
449
481
|
context={
|
450
482
|
strategy_id: {
|
@@ -453,18 +485,12 @@ class Job(BaseModel):
|
|
453
485
|
# it will not filter function object from context.
|
454
486
|
# ---
|
455
487
|
# "stages": filter_func(context.pop("stages", {})),
|
488
|
+
#
|
456
489
|
"stages": context.pop("stages", {}),
|
457
|
-
"error": JobException(
|
458
|
-
|
459
|
-
"that had stopped before execution."
|
460
|
-
),
|
461
|
-
"error_message": (
|
462
|
-
"Job strategy was canceled from trigger event "
|
463
|
-
"that had stopped before execution."
|
464
|
-
),
|
490
|
+
"error": JobException(error_msg),
|
491
|
+
"error_message": error_msg,
|
465
492
|
},
|
466
493
|
},
|
467
|
-
run_id=run_id,
|
468
494
|
)
|
469
495
|
|
470
496
|
# PARAGRAPH:
|
@@ -492,14 +518,14 @@ class Job(BaseModel):
|
|
492
518
|
)
|
493
519
|
except (StageException, UtilException) as err:
|
494
520
|
logger.error(
|
495
|
-
f"({run_id}) [JOB]: {err.__class__.__name__}: {err}"
|
521
|
+
f"({cut_id(run_id)}) [JOB]: {err.__class__.__name__}: {err}"
|
496
522
|
)
|
497
523
|
if config.job_raise_error:
|
498
524
|
raise JobException(
|
499
525
|
f"Get stage execution error: {err.__class__.__name__}: "
|
500
526
|
f"{err}"
|
501
527
|
) from None
|
502
|
-
return
|
528
|
+
return rs.catch(
|
503
529
|
status=1,
|
504
530
|
context={
|
505
531
|
strategy_id: {
|
@@ -509,13 +535,12 @@ class Job(BaseModel):
|
|
509
535
|
"error_message": f"{err.__class__.__name__}: {err}",
|
510
536
|
},
|
511
537
|
},
|
512
|
-
run_id=run_id,
|
513
538
|
)
|
514
539
|
|
515
|
-
# NOTE: Remove the current stage object.
|
540
|
+
# NOTE: Remove the current stage object for saving memory.
|
516
541
|
del stage
|
517
542
|
|
518
|
-
return
|
543
|
+
return rs.catch(
|
519
544
|
status=0,
|
520
545
|
context={
|
521
546
|
strategy_id: {
|
@@ -523,7 +548,6 @@ class Job(BaseModel):
|
|
523
548
|
"stages": filter_func(context.pop("stages", {})),
|
524
549
|
},
|
525
550
|
},
|
526
|
-
run_id=run_id,
|
527
551
|
)
|
528
552
|
|
529
553
|
def execute(self, params: DictData, run_id: str | None = None) -> Result:
|
@@ -542,7 +566,8 @@ class Job(BaseModel):
|
|
542
566
|
run_id: str = run_id or gen_id(self.id or "", unique=True)
|
543
567
|
context: DictData = {}
|
544
568
|
|
545
|
-
# NOTE: Normal Job execution without parallel strategy.
|
569
|
+
# NOTE: Normal Job execution without parallel strategy matrix. It use
|
570
|
+
# for-loop to control strategy execution sequentially.
|
546
571
|
if (not self.strategy.is_set()) or self.strategy.max_parallel == 1:
|
547
572
|
for strategy in self.strategy.make():
|
548
573
|
rs: Result = self.execute_strategy(
|
@@ -566,6 +591,7 @@ class Job(BaseModel):
|
|
566
591
|
max_workers=self.strategy.max_parallel,
|
567
592
|
thread_name_prefix="job_strategy_exec_",
|
568
593
|
) as executor:
|
594
|
+
|
569
595
|
futures: list[Future] = [
|
570
596
|
executor.submit(
|
571
597
|
self.execute_strategy,
|
@@ -577,11 +603,8 @@ class Job(BaseModel):
|
|
577
603
|
for strategy in self.strategy.make()
|
578
604
|
]
|
579
605
|
|
580
|
-
# NOTE: Dynamic catching futures object with fail-fast flag.
|
581
606
|
return (
|
582
|
-
self.__catch_fail_fast(
|
583
|
-
event=event, futures=futures, run_id=run_id
|
584
|
-
)
|
607
|
+
self.__catch_fail_fast(event, futures=futures, run_id=run_id)
|
585
608
|
if self.strategy.fail_fast
|
586
609
|
else self.__catch_all_completed(futures=futures, run_id=run_id)
|
587
610
|
)
|
@@ -593,36 +616,32 @@ class Job(BaseModel):
|
|
593
616
|
run_id: str,
|
594
617
|
*,
|
595
618
|
timeout: int = 1800,
|
596
|
-
result_timeout: int = 60,
|
597
619
|
) -> Result:
|
598
620
|
"""Job parallel pool futures catching with fail-fast mode. That will
|
599
|
-
stop all not done futures if it receive the first
|
600
|
-
running futures.
|
621
|
+
stop and set event on all not done futures if it receive the first
|
622
|
+
exception from all running futures.
|
601
623
|
|
602
624
|
:param event: An event manager instance that able to set stopper on the
|
603
|
-
observing
|
625
|
+
observing multithreading.
|
604
626
|
:param futures: A list of futures.
|
605
627
|
:param run_id: A job running ID from execution.
|
606
628
|
:param timeout: A timeout to waiting all futures complete.
|
607
|
-
|
608
|
-
instance when it was running completely.
|
629
|
+
|
609
630
|
:rtype: Result
|
610
631
|
"""
|
611
|
-
rs_final: Result = Result()
|
632
|
+
rs_final: Result = Result(run_id=run_id)
|
612
633
|
context: DictData = {}
|
613
634
|
status: int = 0
|
614
635
|
|
615
636
|
# NOTE: Get results from a collection of tasks with a timeout that has
|
616
637
|
# the first exception.
|
617
638
|
done, not_done = wait(
|
618
|
-
futures,
|
619
|
-
timeout=timeout,
|
620
|
-
return_when=FIRST_EXCEPTION,
|
639
|
+
futures, timeout=timeout, return_when=FIRST_EXCEPTION
|
621
640
|
)
|
622
641
|
nd: str = (
|
623
642
|
f", the strategies do not run is {not_done}" if not_done else ""
|
624
643
|
)
|
625
|
-
logger.debug(f"({run_id}) [JOB]: Strategy is set Fail Fast{nd}")
|
644
|
+
logger.debug(f"({cut_id(run_id)}) [JOB]: Strategy is set Fail Fast{nd}")
|
626
645
|
|
627
646
|
# NOTE:
|
628
647
|
# Stop all running tasks with setting the event manager and cancel
|
@@ -635,11 +654,13 @@ class Job(BaseModel):
|
|
635
654
|
|
636
655
|
future: Future
|
637
656
|
for future in done:
|
657
|
+
|
658
|
+
# NOTE: Handle the first exception from feature
|
638
659
|
if err := future.exception():
|
639
660
|
status: int = 1
|
640
661
|
logger.error(
|
641
|
-
f"({run_id}) [JOB]:
|
642
|
-
f"{future.exception()}
|
662
|
+
f"({cut_id(run_id)}) [JOB]: Fail-fast catching:\n\t"
|
663
|
+
f"{future.exception()}"
|
643
664
|
)
|
644
665
|
context.update(
|
645
666
|
{
|
@@ -650,7 +671,7 @@ class Job(BaseModel):
|
|
650
671
|
continue
|
651
672
|
|
652
673
|
# NOTE: Update the result context to main job context.
|
653
|
-
context.update(future.result(
|
674
|
+
context.update(future.result().context)
|
654
675
|
|
655
676
|
return rs_final.catch(status=status, context=context)
|
656
677
|
|
@@ -660,45 +681,27 @@ class Job(BaseModel):
|
|
660
681
|
run_id: str,
|
661
682
|
*,
|
662
683
|
timeout: int = 1800,
|
663
|
-
result_timeout: int = 60,
|
664
684
|
) -> Result:
|
665
685
|
"""Job parallel pool futures catching with all-completed mode.
|
666
686
|
|
667
|
-
:param futures: A list of futures
|
668
|
-
result.
|
687
|
+
:param futures: A list of futures.
|
669
688
|
:param run_id: A job running ID from execution.
|
670
689
|
:param timeout: A timeout to waiting all futures complete.
|
671
|
-
|
672
|
-
instance when it was running completely.
|
690
|
+
|
673
691
|
:rtype: Result
|
674
692
|
"""
|
675
|
-
rs_final: Result = Result()
|
693
|
+
rs_final: Result = Result(run_id=run_id)
|
676
694
|
context: DictData = {}
|
677
695
|
status: int = 0
|
696
|
+
|
678
697
|
for future in as_completed(futures, timeout=timeout):
|
679
698
|
try:
|
680
|
-
context.update(future.result(
|
681
|
-
except TimeoutError: # pragma: no cov
|
682
|
-
status = 1
|
683
|
-
logger.warning(
|
684
|
-
f"({run_id}) [JOB]: Task is hanging. Attempting to "
|
685
|
-
f"kill."
|
686
|
-
)
|
687
|
-
future.cancel()
|
688
|
-
time.sleep(0.1)
|
689
|
-
|
690
|
-
stmt: str = (
|
691
|
-
"Failed to cancel the task."
|
692
|
-
if not future.cancelled()
|
693
|
-
else "Task canceled successfully."
|
694
|
-
)
|
695
|
-
logger.warning(f"({run_id}) [JOB]: {stmt}")
|
699
|
+
context.update(future.result().context)
|
696
700
|
except JobException as err:
|
697
701
|
status = 1
|
698
702
|
logger.error(
|
699
|
-
f"({run_id}) [JOB]:
|
700
|
-
f"
|
701
|
-
f"{err}"
|
703
|
+
f"({cut_id(run_id)}) [JOB]: All-completed catching:\n\t"
|
704
|
+
f"{err.__class__.__name__}:\n\t{err}"
|
702
705
|
)
|
703
706
|
context.update(
|
704
707
|
{
|
@@ -706,4 +709,5 @@ class Job(BaseModel):
|
|
706
709
|
"error_message": f"{err.__class__.__name__}: {err}",
|
707
710
|
},
|
708
711
|
)
|
712
|
+
|
709
713
|
return rs_final.catch(status=status, context=context)
|
@@ -55,6 +55,7 @@ from .utils import (
|
|
55
55
|
Registry,
|
56
56
|
Result,
|
57
57
|
TagFunc,
|
58
|
+
cut_id,
|
58
59
|
gen_id,
|
59
60
|
make_exec,
|
60
61
|
make_registry,
|
@@ -124,13 +125,16 @@ def handler_result(message: str | None = None) -> DecoratorResult:
|
|
124
125
|
run_id: str = gen_id(self.name + (self.id or ""), unique=True)
|
125
126
|
kwargs["run_id"] = run_id
|
126
127
|
|
128
|
+
rs_raise: Result = Result(status=1, run_id=run_id)
|
129
|
+
|
127
130
|
try:
|
128
131
|
# NOTE: Start calling origin function with a passing args.
|
129
132
|
return func(self, *args, **kwargs)
|
130
133
|
except Exception as err:
|
131
134
|
# NOTE: Start catching error from the stage execution.
|
132
135
|
logger.error(
|
133
|
-
f"({run_id}) [STAGE]: {err.__class__.__name__}:
|
136
|
+
f"({cut_id(run_id)}) [STAGE]: {err.__class__.__name__}: "
|
137
|
+
f"{err}"
|
134
138
|
)
|
135
139
|
if config.stage_raise_error:
|
136
140
|
# NOTE: If error that raise from stage execution course by
|
@@ -147,13 +151,12 @@ def handler_result(message: str | None = None) -> DecoratorResult:
|
|
147
151
|
|
148
152
|
# NOTE: Catching exception error object to result with
|
149
153
|
# error_message and error keys.
|
150
|
-
return
|
154
|
+
return rs_raise.catch(
|
151
155
|
status=1,
|
152
156
|
context={
|
153
157
|
"error": err,
|
154
158
|
"error_message": f"{err.__class__.__name__}: {err}",
|
155
159
|
},
|
156
|
-
run_id=run_id,
|
157
160
|
)
|
158
161
|
|
159
162
|
return wrapped
|
@@ -339,7 +342,7 @@ class EmptyStage(BaseStage):
|
|
339
342
|
:rtype: Result
|
340
343
|
"""
|
341
344
|
logger.info(
|
342
|
-
f"({run_id}) [STAGE]: Empty-Execute: {self.name!r}: "
|
345
|
+
f"({cut_id(run_id)}) [STAGE]: Empty-Execute: {self.name!r}: "
|
343
346
|
f"( {param2template(self.echo, params=params) or '...'} )"
|
344
347
|
)
|
345
348
|
if self.sleep > 0:
|
@@ -393,7 +396,9 @@ class BashStage(BaseStage):
|
|
393
396
|
f_name: str = f"{run_id}.sh"
|
394
397
|
f_shebang: str = "bash" if sys.platform.startswith("win") else "sh"
|
395
398
|
|
396
|
-
logger.debug(
|
399
|
+
logger.debug(
|
400
|
+
f"({cut_id(run_id)}) [STAGE]: Start create `{f_name}` file."
|
401
|
+
)
|
397
402
|
|
398
403
|
with open(f"./{f_name}", mode="w", newline="\n") as f:
|
399
404
|
# NOTE: write header of `.sh` file
|
@@ -425,7 +430,7 @@ class BashStage(BaseStage):
|
|
425
430
|
"""
|
426
431
|
bash: str = param2template(dedent(self.bash), params)
|
427
432
|
|
428
|
-
logger.info(f"({run_id}) [STAGE]: Shell-Execute: {self.name}")
|
433
|
+
logger.info(f"({cut_id(run_id)}) [STAGE]: Shell-Execute: {self.name}")
|
429
434
|
with self.create_sh_file(
|
430
435
|
bash=bash, env=param2template(self.env, params), run_id=run_id
|
431
436
|
) as sh:
|
@@ -496,6 +501,7 @@ class PyStage(BaseStage):
|
|
496
501
|
|
497
502
|
:param output: A output data that want to extract to an output key.
|
498
503
|
:param to: A context data that want to add output result.
|
504
|
+
|
499
505
|
:rtype: DictData
|
500
506
|
"""
|
501
507
|
# NOTE: The output will fileter unnecessary keys from locals.
|
@@ -534,7 +540,7 @@ class PyStage(BaseStage):
|
|
534
540
|
lc: DictData = {}
|
535
541
|
|
536
542
|
# NOTE: Start exec the run statement.
|
537
|
-
logger.info(f"({run_id}) [STAGE]: Py-Execute: {self.name}")
|
543
|
+
logger.info(f"({cut_id(run_id)}) [STAGE]: Py-Execute: {self.name}")
|
538
544
|
|
539
545
|
# WARNING: The exec build-in function is vary dangerous. So, it
|
540
546
|
# should us the re module to validate exec-string before running.
|
@@ -638,8 +644,7 @@ class HookStage(BaseStage):
|
|
638
644
|
|
639
645
|
:rtype: Result
|
640
646
|
"""
|
641
|
-
|
642
|
-
t_func: TagFunc = extract_hook(t_func_hook)()
|
647
|
+
t_func: TagFunc = extract_hook(param2template(self.uses, params))()
|
643
648
|
|
644
649
|
# VALIDATE: check input task caller parameters that exists before
|
645
650
|
# calling.
|
@@ -660,7 +665,8 @@ class HookStage(BaseStage):
|
|
660
665
|
args[k] = args.pop(k.removeprefix("_"))
|
661
666
|
|
662
667
|
logger.info(
|
663
|
-
f"({run_id}) [STAGE]: Hook-Execute:
|
668
|
+
f"({cut_id(run_id)}) [STAGE]: Hook-Execute: "
|
669
|
+
f"{t_func.name}@{t_func.tag}"
|
664
670
|
)
|
665
671
|
rs: DictData = t_func(**param2template(args, params))
|
666
672
|
|
@@ -716,7 +722,9 @@ class TriggerStage(BaseStage):
|
|
716
722
|
# NOTE: Set running workflow ID from running stage ID to external
|
717
723
|
# params on Loader object.
|
718
724
|
wf: Workflow = Workflow.from_loader(name=_trigger)
|
719
|
-
logger.info(
|
725
|
+
logger.info(
|
726
|
+
f"({cut_id(run_id)}) [STAGE]: Trigger-Execute: {_trigger!r}"
|
727
|
+
)
|
720
728
|
return wf.execute(
|
721
729
|
params=param2template(self.params, params),
|
722
730
|
run_id=run_id,
|
@@ -430,7 +430,7 @@ class Result:
|
|
430
430
|
return self
|
431
431
|
|
432
432
|
|
433
|
-
def make_exec(path: str | Path) -> None:
|
433
|
+
def make_exec(path: str | Path) -> None:
|
434
434
|
"""Change mode of file to be executable file.
|
435
435
|
|
436
436
|
:param path: A file path that want to make executable permission.
|
@@ -451,7 +451,9 @@ FILTERS: dict[str, callable] = { # pragma: no cov
|
|
451
451
|
|
452
452
|
|
453
453
|
class FilterFunc(Protocol):
|
454
|
-
"""Tag Function Protocol
|
454
|
+
"""Tag Function Protocol. This protocol that use to represent any callable
|
455
|
+
object that able to access the name attribute.
|
456
|
+
"""
|
455
457
|
|
456
458
|
name: str
|
457
459
|
|
@@ -814,3 +816,17 @@ def batch(iterable: Iterator[Any], n: int) -> Iterator[Any]:
|
|
814
816
|
|
815
817
|
def queue2str(queue: list[datetime]) -> Iterator[str]: # pragma: no cov
|
816
818
|
return (f"{q:%Y-%m-%d %H:%M:%S}" for q in queue)
|
819
|
+
|
820
|
+
|
821
|
+
def cut_id(run_id: str, *, num: int = 6):
|
822
|
+
"""Cutting running ID with length.
|
823
|
+
|
824
|
+
Example:
|
825
|
+
>>> cut_id(run_id='668931127320241228100331254567')
|
826
|
+
'254567'
|
827
|
+
|
828
|
+
:param run_id:
|
829
|
+
:param num:
|
830
|
+
:return:
|
831
|
+
"""
|
832
|
+
return run_id[-num:]
|