ddeutil-workflow 0.0.21__tar.gz → 0.0.22__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ddeutil_workflow-0.0.21/src/ddeutil_workflow.egg-info → ddeutil_workflow-0.0.22}/PKG-INFO +2 -1
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/README.md +1 -0
- ddeutil_workflow-0.0.22/src/ddeutil/workflow/__about__.py +1 -0
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/src/ddeutil/workflow/conf.py +4 -0
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/src/ddeutil/workflow/job.py +26 -16
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/src/ddeutil/workflow/on.py +3 -0
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/src/ddeutil/workflow/stage.py +17 -9
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/src/ddeutil/workflow/utils.py +18 -2
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/src/ddeutil/workflow/workflow.py +188 -112
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22/src/ddeutil_workflow.egg-info}/PKG-INFO +2 -1
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/tests/test_job.py +5 -1
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/tests/test_utils.py +16 -0
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/tests/test_workflow.py +2 -3
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/tests/test_workflow_exec.py +1 -1
- ddeutil_workflow-0.0.22/tests/test_workflow_job_exec.py +62 -0
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/tests/test_workflow_poke.py +18 -1
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/tests/test_workflow_release.py +21 -1
- ddeutil_workflow-0.0.21/src/ddeutil/workflow/__about__.py +0 -1
- ddeutil_workflow-0.0.21/tests/test_workflow_job_exec.py +0 -28
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/LICENSE +0 -0
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/pyproject.toml +0 -0
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/setup.cfg +0 -0
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/src/ddeutil/workflow/__cron.py +0 -0
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/src/ddeutil/workflow/__init__.py +0 -0
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/src/ddeutil/workflow/__types.py +0 -0
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/src/ddeutil/workflow/api.py +0 -0
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/src/ddeutil/workflow/cli.py +0 -0
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/src/ddeutil/workflow/exceptions.py +0 -0
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/src/ddeutil/workflow/repeat.py +0 -0
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/src/ddeutil/workflow/route.py +0 -0
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/src/ddeutil/workflow/scheduler.py +0 -0
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/src/ddeutil_workflow.egg-info/SOURCES.txt +0 -0
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/src/ddeutil_workflow.egg-info/dependency_links.txt +0 -0
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/src/ddeutil_workflow.egg-info/entry_points.txt +0 -0
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/src/ddeutil_workflow.egg-info/requires.txt +0 -0
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/src/ddeutil_workflow.egg-info/top_level.txt +0 -0
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/tests/test__cron.py +0 -0
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/tests/test__regex.py +0 -0
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/tests/test_conf.py +0 -0
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/tests/test_conf_log.py +0 -0
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/tests/test_job_exec_py.py +0 -0
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/tests/test_job_exec_strategy.py +0 -0
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/tests/test_job_strategy.py +0 -0
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/tests/test_on.py +0 -0
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/tests/test_params.py +0 -0
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/tests/test_scheduler.py +0 -0
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/tests/test_scheduler_tasks.py +0 -0
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/tests/test_stage.py +0 -0
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/tests/test_stage_exec_bash.py +0 -0
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/tests/test_stage_exec_hook.py +0 -0
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/tests/test_stage_exec_py.py +0 -0
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/tests/test_stage_exec_trigger.py +0 -0
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/tests/test_utils_filter.py +0 -0
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/tests/test_utils_params.py +0 -0
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/tests/test_utils_result.py +0 -0
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/tests/test_utils_tag.py +0 -0
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/tests/test_utils_template.py +0 -0
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/tests/test_workflow_exec_hook.py +0 -0
- {ddeutil_workflow-0.0.21 → ddeutil_workflow-0.0.22}/tests/test_workflow_exec_needs.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: ddeutil-workflow
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.22
|
4
4
|
Summary: Lightweight workflow orchestration with less dependencies
|
5
5
|
Author-email: ddeutils <korawich.anu@gmail.com>
|
6
6
|
License: MIT
|
@@ -196,6 +196,7 @@ and do not raise any error to you.
|
|
196
196
|
| `WORKFLOW_CORE_MAX_NUM_POKING` | Core | 4 | . | |
|
197
197
|
| `WORKFLOW_CORE_MAX_JOB_PARALLEL` | Core | 2 | The maximum job number that able to run parallel in workflow executor. | |
|
198
198
|
| `WORKFLOW_CORE_MAX_JOB_EXEC_TIMEOUT` | Core | 600 | | |
|
199
|
+
| `WORKFLOW_CORE_MAX_ON_PER_WORKFLOW` | Core | 5 | | |
|
199
200
|
| `WORKFLOW_CORE_GENERATE_ID_SIMPLE_MODE` | Core | true | A flog that enable generating ID with `md5` algorithm. | |
|
200
201
|
| `WORKFLOW_LOG_DEBUG_MODE` | Log | true | A flag that enable logging with debug level mode. | |
|
201
202
|
| `WORKFLOW_LOG_ENABLE_WRITE` | Log | true | A flag that enable logging object saving log to its destination. | |
|
@@ -163,6 +163,7 @@ and do not raise any error to you.
|
|
163
163
|
| `WORKFLOW_CORE_MAX_NUM_POKING` | Core | 4 | . | |
|
164
164
|
| `WORKFLOW_CORE_MAX_JOB_PARALLEL` | Core | 2 | The maximum job number that able to run parallel in workflow executor. | |
|
165
165
|
| `WORKFLOW_CORE_MAX_JOB_EXEC_TIMEOUT` | Core | 600 | | |
|
166
|
+
| `WORKFLOW_CORE_MAX_ON_PER_WORKFLOW` | Core | 5 | | |
|
166
167
|
| `WORKFLOW_CORE_GENERATE_ID_SIMPLE_MODE` | Core | true | A flog that enable generating ID with `md5` algorithm. | |
|
167
168
|
| `WORKFLOW_LOG_DEBUG_MODE` | Log | true | A flag that enable logging with debug level mode. | |
|
168
169
|
| `WORKFLOW_LOG_ENABLE_WRITE` | Log | true | A flag that enable logging object saving log to its destination. | |
|
@@ -0,0 +1 @@
|
|
1
|
+
__version__: str = "0.0.22"
|
@@ -106,6 +106,9 @@ class Config:
|
|
106
106
|
max_poking_pool_worker: int = int(
|
107
107
|
os.getenv("WORKFLOW_CORE_MAX_NUM_POKING", "4")
|
108
108
|
)
|
109
|
+
max_on_per_workflow: int = int(
|
110
|
+
env("WORKFLOW_CORE_MAX_ON_PER_WORKFLOW", "5")
|
111
|
+
)
|
109
112
|
|
110
113
|
# NOTE: Schedule App
|
111
114
|
max_schedule_process: int = int(env("WORKFLOW_APP_MAX_PROCESS", "2"))
|
@@ -462,6 +465,7 @@ class FileLog(BaseLog):
|
|
462
465
|
|
463
466
|
:param excluded: An excluded list of key name that want to pass in the
|
464
467
|
model_dump method.
|
468
|
+
|
465
469
|
:rtype: Self
|
466
470
|
"""
|
467
471
|
# NOTE: Check environ variable was set for real writing.
|
@@ -22,7 +22,7 @@ from enum import Enum
|
|
22
22
|
from functools import lru_cache
|
23
23
|
from textwrap import dedent
|
24
24
|
from threading import Event
|
25
|
-
from typing import Optional, Union
|
25
|
+
from typing import Any, Optional, Union
|
26
26
|
|
27
27
|
from ddeutil.core import freeze_args
|
28
28
|
from pydantic import BaseModel, Field
|
@@ -40,6 +40,7 @@ from .stage import Stage
|
|
40
40
|
from .utils import (
|
41
41
|
Result,
|
42
42
|
cross_product,
|
43
|
+
cut_id,
|
43
44
|
dash2underscore,
|
44
45
|
filter_func,
|
45
46
|
gen_id,
|
@@ -346,6 +347,13 @@ class Job(BaseModel):
|
|
346
347
|
return stage
|
347
348
|
raise ValueError(f"Stage ID {stage_id} does not exists")
|
348
349
|
|
350
|
+
def check_needs(self, jobs: dict[str, Any]) -> bool:
|
351
|
+
"""Return True if job's need exists in an input list of job's ID.
|
352
|
+
|
353
|
+
:rtype: bool
|
354
|
+
"""
|
355
|
+
return all(need in jobs for need in self.needs)
|
356
|
+
|
349
357
|
def set_outputs(self, output: DictData, to: DictData) -> DictData:
|
350
358
|
"""Set an outputs from execution process to the receive context. The
|
351
359
|
result from execution will pass to value of ``strategies`` key.
|
@@ -427,6 +435,7 @@ class Job(BaseModel):
|
|
427
435
|
"""
|
428
436
|
run_id: str = run_id or gen_id(self.id or "", unique=True)
|
429
437
|
strategy_id: str = gen_id(strategy)
|
438
|
+
rs: Result = Result(run_id=run_id)
|
430
439
|
|
431
440
|
# PARAGRAPH:
|
432
441
|
#
|
@@ -447,14 +456,18 @@ class Job(BaseModel):
|
|
447
456
|
for stage in self.stages:
|
448
457
|
|
449
458
|
if stage.is_skipped(params=context):
|
450
|
-
logger.info(
|
459
|
+
logger.info(
|
460
|
+
f"({cut_id(run_id)}) [JOB]: Skip stage: {stage.iden!r}"
|
461
|
+
)
|
451
462
|
continue
|
452
463
|
|
453
|
-
logger.info(
|
464
|
+
logger.info(
|
465
|
+
f"({cut_id(run_id)}) [JOB]: Execute stage: {stage.iden!r}"
|
466
|
+
)
|
454
467
|
|
455
468
|
# NOTE: Logging a matrix that pass on this stage execution.
|
456
469
|
if strategy:
|
457
|
-
logger.info(f"({run_id}) [JOB]: ... Matrix: {strategy}")
|
470
|
+
logger.info(f"({cut_id(run_id)}) [JOB]: ... Matrix: {strategy}")
|
458
471
|
|
459
472
|
# NOTE: Force stop this execution if event was set from main
|
460
473
|
# execution.
|
@@ -463,7 +476,7 @@ class Job(BaseModel):
|
|
463
476
|
"Job strategy was canceled from event that had set before "
|
464
477
|
"strategy execution."
|
465
478
|
)
|
466
|
-
return
|
479
|
+
return rs.catch(
|
467
480
|
status=1,
|
468
481
|
context={
|
469
482
|
strategy_id: {
|
@@ -478,7 +491,6 @@ class Job(BaseModel):
|
|
478
491
|
"error_message": error_msg,
|
479
492
|
},
|
480
493
|
},
|
481
|
-
run_id=run_id,
|
482
494
|
)
|
483
495
|
|
484
496
|
# PARAGRAPH:
|
@@ -506,14 +518,14 @@ class Job(BaseModel):
|
|
506
518
|
)
|
507
519
|
except (StageException, UtilException) as err:
|
508
520
|
logger.error(
|
509
|
-
f"({run_id}) [JOB]: {err.__class__.__name__}: {err}"
|
521
|
+
f"({cut_id(run_id)}) [JOB]: {err.__class__.__name__}: {err}"
|
510
522
|
)
|
511
523
|
if config.job_raise_error:
|
512
524
|
raise JobException(
|
513
525
|
f"Get stage execution error: {err.__class__.__name__}: "
|
514
526
|
f"{err}"
|
515
527
|
) from None
|
516
|
-
return
|
528
|
+
return rs.catch(
|
517
529
|
status=1,
|
518
530
|
context={
|
519
531
|
strategy_id: {
|
@@ -523,13 +535,12 @@ class Job(BaseModel):
|
|
523
535
|
"error_message": f"{err.__class__.__name__}: {err}",
|
524
536
|
},
|
525
537
|
},
|
526
|
-
run_id=run_id,
|
527
538
|
)
|
528
539
|
|
529
540
|
# NOTE: Remove the current stage object for saving memory.
|
530
541
|
del stage
|
531
542
|
|
532
|
-
return
|
543
|
+
return rs.catch(
|
533
544
|
status=0,
|
534
545
|
context={
|
535
546
|
strategy_id: {
|
@@ -537,7 +548,6 @@ class Job(BaseModel):
|
|
537
548
|
"stages": filter_func(context.pop("stages", {})),
|
538
549
|
},
|
539
550
|
},
|
540
|
-
run_id=run_id,
|
541
551
|
)
|
542
552
|
|
543
553
|
def execute(self, params: DictData, run_id: str | None = None) -> Result:
|
@@ -619,7 +629,7 @@ class Job(BaseModel):
|
|
619
629
|
|
620
630
|
:rtype: Result
|
621
631
|
"""
|
622
|
-
rs_final: Result = Result()
|
632
|
+
rs_final: Result = Result(run_id=run_id)
|
623
633
|
context: DictData = {}
|
624
634
|
status: int = 0
|
625
635
|
|
@@ -631,7 +641,7 @@ class Job(BaseModel):
|
|
631
641
|
nd: str = (
|
632
642
|
f", the strategies do not run is {not_done}" if not_done else ""
|
633
643
|
)
|
634
|
-
logger.debug(f"({run_id}) [JOB]: Strategy is set Fail Fast{nd}")
|
644
|
+
logger.debug(f"({cut_id(run_id)}) [JOB]: Strategy is set Fail Fast{nd}")
|
635
645
|
|
636
646
|
# NOTE:
|
637
647
|
# Stop all running tasks with setting the event manager and cancel
|
@@ -649,7 +659,7 @@ class Job(BaseModel):
|
|
649
659
|
if err := future.exception():
|
650
660
|
status: int = 1
|
651
661
|
logger.error(
|
652
|
-
f"({run_id}) [JOB]: Fail-fast catching:\n\t"
|
662
|
+
f"({cut_id(run_id)}) [JOB]: Fail-fast catching:\n\t"
|
653
663
|
f"{future.exception()}"
|
654
664
|
)
|
655
665
|
context.update(
|
@@ -680,7 +690,7 @@ class Job(BaseModel):
|
|
680
690
|
|
681
691
|
:rtype: Result
|
682
692
|
"""
|
683
|
-
rs_final: Result = Result()
|
693
|
+
rs_final: Result = Result(run_id=run_id)
|
684
694
|
context: DictData = {}
|
685
695
|
status: int = 0
|
686
696
|
|
@@ -690,7 +700,7 @@ class Job(BaseModel):
|
|
690
700
|
except JobException as err:
|
691
701
|
status = 1
|
692
702
|
logger.error(
|
693
|
-
f"({run_id}) [JOB]: All-completed catching:\n\t"
|
703
|
+
f"({cut_id(run_id)}) [JOB]: All-completed catching:\n\t"
|
694
704
|
f"{err.__class__.__name__}:\n\t{err}"
|
695
705
|
)
|
696
706
|
context.update(
|
@@ -55,6 +55,7 @@ from .utils import (
|
|
55
55
|
Registry,
|
56
56
|
Result,
|
57
57
|
TagFunc,
|
58
|
+
cut_id,
|
58
59
|
gen_id,
|
59
60
|
make_exec,
|
60
61
|
make_registry,
|
@@ -124,13 +125,16 @@ def handler_result(message: str | None = None) -> DecoratorResult:
|
|
124
125
|
run_id: str = gen_id(self.name + (self.id or ""), unique=True)
|
125
126
|
kwargs["run_id"] = run_id
|
126
127
|
|
128
|
+
rs_raise: Result = Result(status=1, run_id=run_id)
|
129
|
+
|
127
130
|
try:
|
128
131
|
# NOTE: Start calling origin function with a passing args.
|
129
132
|
return func(self, *args, **kwargs)
|
130
133
|
except Exception as err:
|
131
134
|
# NOTE: Start catching error from the stage execution.
|
132
135
|
logger.error(
|
133
|
-
f"({run_id}) [STAGE]: {err.__class__.__name__}:
|
136
|
+
f"({cut_id(run_id)}) [STAGE]: {err.__class__.__name__}: "
|
137
|
+
f"{err}"
|
134
138
|
)
|
135
139
|
if config.stage_raise_error:
|
136
140
|
# NOTE: If error that raise from stage execution course by
|
@@ -147,13 +151,12 @@ def handler_result(message: str | None = None) -> DecoratorResult:
|
|
147
151
|
|
148
152
|
# NOTE: Catching exception error object to result with
|
149
153
|
# error_message and error keys.
|
150
|
-
return
|
154
|
+
return rs_raise.catch(
|
151
155
|
status=1,
|
152
156
|
context={
|
153
157
|
"error": err,
|
154
158
|
"error_message": f"{err.__class__.__name__}: {err}",
|
155
159
|
},
|
156
|
-
run_id=run_id,
|
157
160
|
)
|
158
161
|
|
159
162
|
return wrapped
|
@@ -339,7 +342,7 @@ class EmptyStage(BaseStage):
|
|
339
342
|
:rtype: Result
|
340
343
|
"""
|
341
344
|
logger.info(
|
342
|
-
f"({run_id}) [STAGE]: Empty-Execute: {self.name!r}: "
|
345
|
+
f"({cut_id(run_id)}) [STAGE]: Empty-Execute: {self.name!r}: "
|
343
346
|
f"( {param2template(self.echo, params=params) or '...'} )"
|
344
347
|
)
|
345
348
|
if self.sleep > 0:
|
@@ -393,7 +396,9 @@ class BashStage(BaseStage):
|
|
393
396
|
f_name: str = f"{run_id}.sh"
|
394
397
|
f_shebang: str = "bash" if sys.platform.startswith("win") else "sh"
|
395
398
|
|
396
|
-
logger.debug(
|
399
|
+
logger.debug(
|
400
|
+
f"({cut_id(run_id)}) [STAGE]: Start create `{f_name}` file."
|
401
|
+
)
|
397
402
|
|
398
403
|
with open(f"./{f_name}", mode="w", newline="\n") as f:
|
399
404
|
# NOTE: write header of `.sh` file
|
@@ -425,7 +430,7 @@ class BashStage(BaseStage):
|
|
425
430
|
"""
|
426
431
|
bash: str = param2template(dedent(self.bash), params)
|
427
432
|
|
428
|
-
logger.info(f"({run_id}) [STAGE]: Shell-Execute: {self.name}")
|
433
|
+
logger.info(f"({cut_id(run_id)}) [STAGE]: Shell-Execute: {self.name}")
|
429
434
|
with self.create_sh_file(
|
430
435
|
bash=bash, env=param2template(self.env, params), run_id=run_id
|
431
436
|
) as sh:
|
@@ -535,7 +540,7 @@ class PyStage(BaseStage):
|
|
535
540
|
lc: DictData = {}
|
536
541
|
|
537
542
|
# NOTE: Start exec the run statement.
|
538
|
-
logger.info(f"({run_id}) [STAGE]: Py-Execute: {self.name}")
|
543
|
+
logger.info(f"({cut_id(run_id)}) [STAGE]: Py-Execute: {self.name}")
|
539
544
|
|
540
545
|
# WARNING: The exec build-in function is vary dangerous. So, it
|
541
546
|
# should us the re module to validate exec-string before running.
|
@@ -660,7 +665,8 @@ class HookStage(BaseStage):
|
|
660
665
|
args[k] = args.pop(k.removeprefix("_"))
|
661
666
|
|
662
667
|
logger.info(
|
663
|
-
f"({run_id}) [STAGE]: Hook-Execute:
|
668
|
+
f"({cut_id(run_id)}) [STAGE]: Hook-Execute: "
|
669
|
+
f"{t_func.name}@{t_func.tag}"
|
664
670
|
)
|
665
671
|
rs: DictData = t_func(**param2template(args, params))
|
666
672
|
|
@@ -716,7 +722,9 @@ class TriggerStage(BaseStage):
|
|
716
722
|
# NOTE: Set running workflow ID from running stage ID to external
|
717
723
|
# params on Loader object.
|
718
724
|
wf: Workflow = Workflow.from_loader(name=_trigger)
|
719
|
-
logger.info(
|
725
|
+
logger.info(
|
726
|
+
f"({cut_id(run_id)}) [STAGE]: Trigger-Execute: {_trigger!r}"
|
727
|
+
)
|
720
728
|
return wf.execute(
|
721
729
|
params=param2template(self.params, params),
|
722
730
|
run_id=run_id,
|
@@ -430,7 +430,7 @@ class Result:
|
|
430
430
|
return self
|
431
431
|
|
432
432
|
|
433
|
-
def make_exec(path: str | Path) -> None:
|
433
|
+
def make_exec(path: str | Path) -> None:
|
434
434
|
"""Change mode of file to be executable file.
|
435
435
|
|
436
436
|
:param path: A file path that want to make executable permission.
|
@@ -451,7 +451,9 @@ FILTERS: dict[str, callable] = { # pragma: no cov
|
|
451
451
|
|
452
452
|
|
453
453
|
class FilterFunc(Protocol):
|
454
|
-
"""Tag Function Protocol
|
454
|
+
"""Tag Function Protocol. This protocol that use to represent any callable
|
455
|
+
object that able to access the name attribute.
|
456
|
+
"""
|
455
457
|
|
456
458
|
name: str
|
457
459
|
|
@@ -814,3 +816,17 @@ def batch(iterable: Iterator[Any], n: int) -> Iterator[Any]:
|
|
814
816
|
|
815
817
|
def queue2str(queue: list[datetime]) -> Iterator[str]: # pragma: no cov
|
816
818
|
return (f"{q:%Y-%m-%d %H:%M:%S}" for q in queue)
|
819
|
+
|
820
|
+
|
821
|
+
def cut_id(run_id: str, *, num: int = 6):
|
822
|
+
"""Cutting running ID with length.
|
823
|
+
|
824
|
+
Example:
|
825
|
+
>>> cut_id(run_id='668931127320241228100331254567')
|
826
|
+
'254567'
|
827
|
+
|
828
|
+
:param run_id:
|
829
|
+
:param num:
|
830
|
+
:return:
|
831
|
+
"""
|
832
|
+
return run_id[-num:]
|