ddeutil-workflow 0.0.27__py3-none-any.whl → 0.0.29__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -149,7 +149,7 @@ class WorkflowSchedule(BaseModel):
149
149
  @field_validator("on", mode="after")
150
150
  def __on_no_dup__(cls, value: list[On]) -> list[On]:
151
151
  """Validate the on fields should not contain duplicate values and if it
152
- contain every minute value, it should has only one on value.
152
+ contains every minute value, it should have only one on value.
153
153
 
154
154
  :rtype: list[On]
155
155
  """
@@ -195,8 +195,8 @@ class WorkflowSchedule(BaseModel):
195
195
  wf: Workflow = Workflow.from_loader(self.name, externals=extras)
196
196
  wf_queue: WorkflowQueue = queue[self.alias]
197
197
 
198
- # IMPORTANT: Create the default 'on' value if it does not passing
199
- # the on field to the Schedule object.
198
+ # IMPORTANT: Create the default 'on' value if it does not pass the `on`
199
+ # field to the Schedule object.
200
200
  ons: list[On] = self.on or wf.on.copy()
201
201
 
202
202
  for on in ons:
@@ -223,7 +223,7 @@ class WorkflowSchedule(BaseModel):
223
223
  class Schedule(BaseModel):
224
224
  """Schedule Pydantic model that use to run with any scheduler package.
225
225
 
226
- It does not equal the on value in Workflow model but it use same logic
226
+ It does not equal the on value in Workflow model, but it uses same logic
227
227
  to running release date with crontab interval.
228
228
  """
229
229
 
@@ -368,7 +368,7 @@ def schedule_task(
368
368
  :param stop: A stop datetime object that force stop running scheduler.
369
369
  :param queue: A mapping of alias name and WorkflowQueue object.
370
370
  :param threads: A mapping of alias name and Thread object.
371
- :param log: A log class that want to making log object.
371
+ :param log: A log class that want to make log object.
372
372
 
373
373
  :rtype: CancelJob | None
374
374
  """
@@ -449,7 +449,7 @@ def schedule_task(
449
449
 
450
450
 
451
451
  def monitor(threads: ReleaseThreads) -> None: # pragma: no cov
452
- """Monitoring function that running every five minute for track long running
452
+ """Monitoring function that running every five minute for track long-running
453
453
  thread instance from the schedule_control function that run every minute.
454
454
 
455
455
  :param threads: A mapping of Thread object and its name.
@@ -479,7 +479,7 @@ def schedule_control(
479
479
  """Scheduler control function that running every minute.
480
480
 
481
481
  :param schedules: A list of workflow names that want to schedule running.
482
- :param stop: An datetime value that use to stop running schedule.
482
+ :param stop: A datetime value that use to stop running schedule.
483
483
  :param externals: An external parameters that pass to Loader.
484
484
  :param log:
485
485
 
@@ -554,7 +554,7 @@ def schedule_control(
554
554
  scheduler.run_pending()
555
555
  time.sleep(1)
556
556
 
557
- # NOTE: Break the scheduler when the control job does not exists.
557
+ # NOTE: Break the scheduler when the control job does not exist.
558
558
  if not scheduler.get_jobs("control"):
559
559
  scheduler.clear("monitor")
560
560
 
@@ -585,7 +585,7 @@ def schedule_runner(
585
585
 
586
586
  :param stop: A stop datetime object that force stop running scheduler.
587
587
  :param externals:
588
- :param excluded: A list of schedule name that want to excluded from finding.
588
+ :param excluded: A list of schedule name that want to exclude from finding.
589
589
 
590
590
  :rtype: list[str]
591
591
 
ddeutil/workflow/stage.py CHANGED
@@ -5,12 +5,12 @@
5
5
  # ------------------------------------------------------------------------------
6
6
  """Stage Model that use for getting stage data template from the Job Model.
7
7
  The stage handle the minimize task that run in some thread (same thread at
8
- its job owner) that mean it is the lowest executor of a workflow workflow that
9
- can tracking logs.
8
+ its job owner) that mean it is the lowest executor of a workflow that can
9
+ tracking logs.
10
10
 
11
11
  The output of stage execution only return 0 status because I do not want to
12
12
  handle stage error on this stage model. I think stage model should have a lot of
13
- usecase and it does not worry when I want to create a new one.
13
+ use-case, and it does not worry when I want to create a new one.
14
14
 
15
15
  Execution --> Ok --> Result with 0
16
16
 
@@ -31,17 +31,11 @@ import time
31
31
  import uuid
32
32
  from abc import ABC, abstractmethod
33
33
  from collections.abc import Iterator
34
- from functools import wraps
35
34
  from inspect import Parameter
36
35
  from pathlib import Path
37
36
  from subprocess import CompletedProcess
38
37
  from textwrap import dedent
39
- from typing import Callable, Optional, Union
40
-
41
- try:
42
- from typing import ParamSpec
43
- except ImportError:
44
- from typing_extensions import ParamSpec
38
+ from typing import Optional, Union
45
39
 
46
40
  from pydantic import BaseModel, Field
47
41
  from pydantic.functional_validators import model_validator
@@ -59,9 +53,6 @@ from .utils import (
59
53
  make_exec,
60
54
  )
61
55
 
62
- P = ParamSpec("P")
63
- ReturnResult = Callable[P, Result]
64
- DecoratorResult = Callable[[ReturnResult], ReturnResult]
65
56
  logger = get_logger("ddeutil.workflow")
66
57
 
67
58
 
@@ -75,90 +66,6 @@ __all__: TupleStr = (
75
66
  )
76
67
 
77
68
 
78
- def handler_result(message: str | None = None) -> DecoratorResult:
79
- """Decorator function for handler result from the stage execution. This
80
- function should to use with execution method only.
81
-
82
- This stage exception handler still use ok-error concept but it allow
83
- you force catching an output result with error message by specific
84
- environment variable,`WORKFLOW_CORE_STAGE_RAISE_ERROR`.
85
-
86
- Execution --> Ok --> Result
87
- |-status: 0
88
- |-context:
89
- |-outputs: ...
90
-
91
- --> Error --> Result (if env var was set)
92
- |-status: 1
93
- |-context:
94
- |-error: ...
95
- |-error_message: ...
96
-
97
- --> Error --> Raise StageException(...)
98
-
99
- On the last step, it will set the running ID on a return result object
100
- from current stage ID before release the final result.
101
-
102
- :param message: A message that want to add at prefix of exception statement.
103
- :type message: str | None (Default=None)
104
- :rtype: Callable[P, Result]
105
- """
106
- # NOTE: The prefix message string that want to add on the first exception
107
- # message dialog.
108
- #
109
- # >>> ValueError: {message}
110
- # ... raise value error from the stage execution process.
111
- #
112
- message: str = message or ""
113
-
114
- def decorator(func: ReturnResult) -> ReturnResult:
115
-
116
- @wraps(func)
117
- def wrapped(self: Stage, *args, **kwargs):
118
-
119
- if not (run_id := kwargs.get("run_id")):
120
- run_id: str = gen_id(self.name + (self.id or ""), unique=True)
121
- kwargs["run_id"] = run_id
122
-
123
- rs_raise: Result = Result(status=1, run_id=run_id)
124
-
125
- try:
126
- # NOTE: Start calling origin function with a passing args.
127
- return func(self, *args, **kwargs)
128
- except Exception as err:
129
- # NOTE: Start catching error from the stage execution.
130
- logger.error(
131
- f"({cut_id(run_id)}) [STAGE]: {err.__class__.__name__}: "
132
- f"{err}"
133
- )
134
- if config.stage_raise_error:
135
- # NOTE: If error that raise from stage execution course by
136
- # itself, it will return that error with previous
137
- # dependency.
138
- if isinstance(err, StageException):
139
- raise StageException(
140
- f"{self.__class__.__name__}: {message}\n\t{err}"
141
- ) from err
142
- raise StageException(
143
- f"{self.__class__.__name__}: {message}\n\t"
144
- f"{err.__class__.__name__}: {err}"
145
- ) from None
146
-
147
- # NOTE: Catching exception error object to result with
148
- # error_message and error keys.
149
- return rs_raise.catch(
150
- status=1,
151
- context={
152
- "error": err,
153
- "error_message": f"{err.__class__.__name__}: {err}",
154
- },
155
- )
156
-
157
- return wrapped
158
-
159
- return decorator
160
-
161
-
162
69
  class BaseStage(BaseModel, ABC):
163
70
  """Base Stage Model that keep only id and name fields for the stage
164
71
  metadata. If you want to implement any custom stage, you can use this class
@@ -225,8 +132,74 @@ class BaseStage(BaseModel, ABC):
225
132
  """
226
133
  raise NotImplementedError("Stage should implement ``execute`` method.")
227
134
 
135
+ def handler_execute(
136
+ self, params: DictData, *, run_id: str | None = None
137
+ ) -> Result:
138
+ """Handler result from the stage execution.
139
+
140
+ This stage exception handler still use ok-error concept, but it
141
+ allows you force catching an output result with error message by
142
+ specific environment variable,`WORKFLOW_CORE_STAGE_RAISE_ERROR`.
143
+
144
+ Execution --> Ok --> Result
145
+ |-status: 0
146
+ |-context:
147
+ |-outputs: ...
148
+
149
+ --> Error --> Result (if env var was set)
150
+ |-status: 1
151
+ |-context:
152
+ |-error: ...
153
+ |-error_message: ...
154
+
155
+ --> Error --> Raise StageException(...)
156
+
157
+ On the last step, it will set the running ID on a return result object
158
+ from current stage ID before release the final result.
159
+
160
+ :param params: A parameter data that want to use in this execution.
161
+ :param run_id: A running stage ID for this execution.
162
+
163
+ :rtype: Result
164
+ """
165
+ if not run_id:
166
+ run_id: str = gen_id(self.name + (self.id or ""), unique=True)
167
+
168
+ rs_raise: Result = Result(status=1, run_id=run_id)
169
+ try:
170
+ # NOTE: Start calling origin function with a passing args.
171
+ return self.execute(params, run_id=run_id)
172
+ except Exception as err:
173
+ # NOTE: Start catching error from the stage execution.
174
+ logger.error(
175
+ f"({cut_id(run_id)}) [STAGE]: {err.__class__.__name__}: "
176
+ f"{err}"
177
+ )
178
+ if config.stage_raise_error:
179
+ # NOTE: If error that raise from stage execution course by
180
+ # itself, it will return that error with previous
181
+ # dependency.
182
+ if isinstance(err, StageException):
183
+ raise StageException(
184
+ f"{self.__class__.__name__}: \n\t{err}"
185
+ ) from err
186
+ raise StageException(
187
+ f"{self.__class__.__name__}: \n\t"
188
+ f"{err.__class__.__name__}: {err}"
189
+ ) from None
190
+
191
+ # NOTE: Catching exception error object to result with
192
+ # error_message and error keys.
193
+ return rs_raise.catch(
194
+ status=1,
195
+ context={
196
+ "error": err,
197
+ "error_message": f"{err.__class__.__name__}: {err}",
198
+ },
199
+ )
200
+
228
201
  def set_outputs(self, output: DictData, to: DictData) -> DictData:
229
- """Set an outputs from execution process to the receive context. The
202
+ """Set an outputs from execution process to the received context. The
230
203
  result from execution will pass to value of ``outputs`` key.
231
204
 
232
205
  For example of setting output method, If you receive execute output
@@ -243,7 +216,7 @@ class BaseStage(BaseModel, ABC):
243
216
  }
244
217
  }
245
218
 
246
- :param output: A output data that want to extract to an output key.
219
+ :param output: An output data that want to extract to an output key.
247
220
  :param to: A context data that want to add output result.
248
221
  :rtype: DictData
249
222
  """
@@ -288,8 +261,9 @@ class BaseStage(BaseModel, ABC):
288
261
  params: DictData = {} if params is None else params
289
262
 
290
263
  try:
291
- # WARNING: The eval build-in function is vary dangerous. So, it
292
- # should us the re module to validate eval-string before running.
264
+ # WARNING: The eval build-in function is very dangerous. So, it
265
+ # should use the `re` module to validate eval-string before
266
+ # running.
293
267
  rs: bool = eval(
294
268
  param2template(self.condition, params), globals() | params, {}
295
269
  )
@@ -321,7 +295,6 @@ class EmptyStage(BaseStage):
321
295
  ge=0,
322
296
  )
323
297
 
324
- @handler_result()
325
298
  def execute(self, params: DictData, *, run_id: str | None = None) -> Result:
326
299
  """Execution method for the Empty stage that do only logging out to
327
300
  stdout. This method does not use the `handler_result` decorator because
@@ -352,7 +325,7 @@ class EmptyStage(BaseStage):
352
325
 
353
326
  class BashStage(BaseStage):
354
327
  """Bash execution stage that execute bash script on the current OS.
355
- That mean if your current OS is Windows, it will running bash in the WSL.
328
+ If your current OS is Windows, it will run on the bash in the WSL.
356
329
 
357
330
  I get some limitation when I run shell statement with the built-in
358
331
  supprocess package. It does not good enough to use multiline statement.
@@ -418,7 +391,6 @@ class BashStage(BaseStage):
418
391
  # Note: Remove .sh file that use to run bash.
419
392
  Path(f"./{f_name}").unlink()
420
393
 
421
- @handler_result()
422
394
  def execute(self, params: DictData, *, run_id: str | None = None) -> Result:
423
395
  """Execute the Bash statement with the Python build-in ``subprocess``
424
396
  package.
@@ -499,7 +471,7 @@ class PyStage(BaseStage):
499
471
  """Override set an outputs method for the Python execution process that
500
472
  extract output from all the locals values.
501
473
 
502
- :param output: A output data that want to extract to an output key.
474
+ :param output: An output data that want to extract to an output key.
503
475
  :param to: A context data that want to add output result.
504
476
 
505
477
  :rtype: DictData
@@ -520,7 +492,6 @@ class PyStage(BaseStage):
520
492
  to.update({k: gb[k] for k in to if k in gb})
521
493
  return to
522
494
 
523
- @handler_result()
524
495
  def execute(self, params: DictData, *, run_id: str | None = None) -> Result:
525
496
  """Execute the Python statement that pass all globals and input params
526
497
  to globals argument on ``exec`` build-in function.
@@ -542,8 +513,8 @@ class PyStage(BaseStage):
542
513
  # NOTE: Start exec the run statement.
543
514
  logger.info(f"({cut_id(run_id)}) [STAGE]: Py-Execute: {self.name}")
544
515
 
545
- # WARNING: The exec build-in function is vary dangerous. So, it
546
- # should us the re module to validate exec-string before running.
516
+ # WARNING: The exec build-in function is very dangerous. So, it
517
+ # should use the re module to validate exec-string before running.
547
518
  exec(run, _globals, lc)
548
519
 
549
520
  return Result(
@@ -560,7 +531,7 @@ class HookStage(BaseStage):
560
531
  This stage is different with PyStage because the PyStage is just calling
561
532
  a Python statement with the ``eval`` and pass that locale before eval that
562
533
  statement. So, you can create your function complexly that you can for your
563
- propose to invoked by this stage object.
534
+ objective to invoked by this stage object.
564
535
 
565
536
  Data Validate:
566
537
  >>> stage = {
@@ -581,7 +552,6 @@ class HookStage(BaseStage):
581
552
  alias="with",
582
553
  )
583
554
 
584
- @handler_result()
585
555
  def execute(self, params: DictData, *, run_id: str | None = None) -> Result:
586
556
  """Execute the Hook function that already in the hook registry.
587
557
 
@@ -612,7 +582,7 @@ class HookStage(BaseStage):
612
582
  f"Necessary params, ({', '.join(ips.parameters.keys())}, ), "
613
583
  f"does not set to args"
614
584
  )
615
- # NOTE: add '_' prefix if it want to use.
585
+ # NOTE: add '_' prefix if it wants to use.
616
586
  for k in ips.parameters:
617
587
  if k.removeprefix("_") in args:
618
588
  args[k] = args.pop(k.removeprefix("_"))
@@ -634,7 +604,7 @@ class HookStage(BaseStage):
634
604
 
635
605
 
636
606
  class TriggerStage(BaseStage):
637
- """Trigger Workflow execution stage that execute another workflow. This this
607
+ """Trigger Workflow execution stage that execute another workflow. This
638
608
  the core stage that allow you to create the reusable workflow object or
639
609
  dynamic parameters workflow for common usecase.
640
610
 
@@ -656,9 +626,8 @@ class TriggerStage(BaseStage):
656
626
  description="A parameter that want to pass to workflow execution.",
657
627
  )
658
628
 
659
- @handler_result("Raise from TriggerStage")
660
629
  def execute(self, params: DictData, *, run_id: str | None = None) -> Result:
661
- """Trigger another workflow execution. It will waiting the trigger
630
+ """Trigger another workflow execution. It will wait the trigger
662
631
  workflow running complete before catching its result.
663
632
 
664
633
  :param params: A parameter data that want to use in this execution.
@@ -687,7 +656,7 @@ class TriggerStage(BaseStage):
687
656
  # NOTE:
688
657
  # An order of parsing stage model on the Job model with ``stages`` field.
689
658
  # From the current build-in stages, they do not have stage that have the same
690
- # fields that be cause of parsing on the Job's stages key.
659
+ # fields that because of parsing on the Job's stages key.
691
660
  #
692
661
  Stage = Union[
693
662
  PyStage,
@@ -44,10 +44,10 @@ FILTERS: dict[str, callable] = { # pragma: no cov
44
44
 
45
45
  class FilterFunc(Protocol):
46
46
  """Tag Function Protocol. This protocol that use to represent any callable
47
- object that able to access the name attribute.
47
+ object that able to access the filter attribute.
48
48
  """
49
49
 
50
- name: str
50
+ filter: str
51
51
 
52
52
  def __call__(self, *args, **kwargs): ... # pragma: no cov
53
53
 
@@ -95,6 +95,8 @@ def make_filter_registry() -> dict[str, FilterRegistry]:
95
95
  if not hasattr(func, "filter"):
96
96
  continue
97
97
 
98
+ func: FilterFunc
99
+
98
100
  rs[func.filter] = import_string(f"{module}.{fstr}")
99
101
 
100
102
  rs.update(FILTERS)
@@ -174,7 +176,7 @@ def map_post_filter(
174
176
  """Mapping post-filter to value with sequence list of filter function name
175
177
  that will get from the filter registry.
176
178
 
177
- :param value: A string value that want to mapped with filter function.
179
+ :param value: A string value that want to map with filter function.
178
180
  :param post_filter: A list of post-filter function name.
179
181
  :param filters: A filter registry.
180
182
 
@@ -204,7 +206,7 @@ def not_in_template(value: Any, *, not_in: str = "matrix.") -> bool:
204
206
  """Check value should not pass template with not_in value prefix.
205
207
 
206
208
  :param value: A value that want to find parameter template prefix.
207
- :param not_in: The not in string that use in the `.startswith` function.
209
+ :param not_in: The not-in string that use in the `.startswith` function.
208
210
 
209
211
  :rtype: bool
210
212
  """
@@ -249,7 +251,7 @@ def str2template(
249
251
  with the workflow parameter types that is `str`, `int`, `datetime`, and
250
252
  `list`.
251
253
 
252
- :param value: A string value that want to mapped with an params
254
+ :param value: A string value that want to map with params
253
255
  :param params: A parameter value that getting with matched regular
254
256
  expression.
255
257
  :param filters:
@@ -273,11 +275,11 @@ def str2template(
273
275
  if not hasdot(caller, params):
274
276
  raise UtilException(f"The params does not set caller: {caller!r}.")
275
277
 
276
- # NOTE: from validate step, it guarantee that caller exists in params.
278
+ # NOTE: from validate step, it guarantees that caller exists in params.
277
279
  getter: Any = getdot(caller, params)
278
280
 
279
281
  # NOTE:
280
- # If type of getter caller is not string type and it does not use to
282
+ # If type of getter caller is not string type, and it does not use to
281
283
  # concat other string value, it will return origin value from the
282
284
  # ``getdot`` function.
283
285
  if value.replace(found.full, "", 1) == "":
@@ -293,14 +295,11 @@ def str2template(
293
295
  return search_env_replace(value)
294
296
 
295
297
 
296
- def param2template(
297
- value: Any,
298
- params: DictData,
299
- ) -> Any:
298
+ def param2template(value: Any, params: DictData) -> Any:
300
299
  """Pass param to template string that can search by ``RE_CALLER`` regular
301
300
  expression.
302
301
 
303
- :param value: A value that want to mapped with an params
302
+ :param value: A value that want to map with params
304
303
  :param params: A parameter value that getting with matched regular
305
304
  expression.
306
305
 
ddeutil/workflow/utils.py CHANGED
@@ -18,18 +18,12 @@ from random import randrange
18
18
  from typing import Any, TypeVar
19
19
  from zoneinfo import ZoneInfo
20
20
 
21
- try:
22
- from typing import ParamSpec
23
- except ImportError:
24
- from typing_extensions import ParamSpec
25
-
26
21
  from ddeutil.core import hash_str
27
22
 
28
23
  from .__types import DictData, Matrix
29
24
  from .conf import config
30
25
 
31
26
  T = TypeVar("T")
32
- P = ParamSpec("P")
33
27
 
34
28
  logger = logging.getLogger("ddeutil.workflow")
35
29
 
@@ -86,7 +80,7 @@ def gen_id(
86
80
  sensitive: bool = True,
87
81
  unique: bool = False,
88
82
  ) -> str:
89
- """Generate running ID for able to tracking. This generate process use `md5`
83
+ """Generate running ID for able to tracking. This generates process use `md5`
90
84
  algorithm function if ``WORKFLOW_CORE_WORKFLOW_ID_SIMPLE_MODE`` set to
91
85
  false. But it will cut this hashing value length to 10 it the setting value
92
86
  set to true.
@@ -95,6 +89,7 @@ def gen_id(
95
89
  :param sensitive: A flag that convert the value to lower case before hashing
96
90
  :param unique: A flag that add timestamp at microsecond level to value
97
91
  before hashing.
92
+
98
93
  :rtype: str
99
94
  """
100
95
  if not isinstance(value, str):
@@ -121,13 +116,13 @@ def make_exec(path: str | Path) -> None:
121
116
  f.chmod(f.stat().st_mode | stat.S_IEXEC)
122
117
 
123
118
 
124
- def filter_func(value: Any) -> Any:
119
+ def filter_func(value: T) -> T:
125
120
  """Filter out an own created function of any value of mapping context by
126
121
  replacing it to its function name. If it is built-in function, it does not
127
122
  have any changing.
128
123
 
129
124
  :param value: A value context data that want to filter out function value.
130
- :type: The same type of an input ``value``.
125
+ :type: The same type of input ``value``.
131
126
  """
132
127
  if isinstance(value, dict):
133
128
  return {k: filter_func(value[k]) for k in value}
@@ -135,8 +130,8 @@ def filter_func(value: Any) -> Any:
135
130
  return type(value)([filter_func(i) for i in value])
136
131
 
137
132
  if isfunction(value):
138
- # NOTE: If it want to improve to get this function, it able to save to
139
- # some global memory storage.
133
+ # NOTE: If it wants to improve to get this function, it is able to save
134
+ # to some global memory storage.
140
135
  # ---
141
136
  # >>> GLOBAL_DICT[value.__name__] = value
142
137
  #
@@ -152,6 +147,10 @@ def dash2underscore(
152
147
  ) -> DictData:
153
148
  """Change key name that has dash to underscore.
154
149
 
150
+ :param key
151
+ :param values
152
+ :param fixed
153
+
155
154
  :rtype: DictData
156
155
  """
157
156
  if key in values:
@@ -162,6 +161,8 @@ def dash2underscore(
162
161
  def cross_product(matrix: Matrix) -> Iterator[DictData]:
163
162
  """Iterator of products value from matrix.
164
163
 
164
+ :param matrix:
165
+
165
166
  :rtype: Iterator[DictData]
166
167
  """
167
168
  yield from (
@@ -181,6 +182,11 @@ def batch(iterable: Iterator[Any], n: int) -> Iterator[Any]:
181
182
  ['A', 'B', 'C']
182
183
  ['D', 'E', 'F']
183
184
  ['G']
185
+
186
+ :param iterable:
187
+ :param n:
188
+
189
+ :rtype: Iterator[Any]
184
190
  """
185
191
  if n < 1:
186
192
  raise ValueError("n must be at least one")
@@ -195,7 +201,7 @@ def batch(iterable: Iterator[Any], n: int) -> Iterator[Any]:
195
201
  yield chain((first_el,), chunk_it)
196
202
 
197
203
 
198
- def cut_id(run_id: str, *, num: int = 6):
204
+ def cut_id(run_id: str, *, num: int = 6) -> str:
199
205
  """Cutting running ID with length.
200
206
 
201
207
  Example:
@@ -204,6 +210,7 @@ def cut_id(run_id: str, *, num: int = 6):
204
210
 
205
211
  :param run_id:
206
212
  :param num:
207
- :return:
213
+
214
+ :rtype: str
208
215
  """
209
216
  return run_id[-num:]