ddeutil-workflow 0.0.29__py3-none-any.whl → 0.0.31__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ddeutil/workflow/__about__.py +1 -1
- ddeutil/workflow/__cron.py +1 -0
- ddeutil/workflow/__init__.py +5 -1
- ddeutil/workflow/api/api.py +2 -2
- ddeutil/workflow/conf.py +4 -0
- ddeutil/workflow/cron.py +77 -21
- ddeutil/workflow/exceptions.py +3 -0
- ddeutil/workflow/hook.py +3 -1
- ddeutil/workflow/params.py +18 -1
- ddeutil/workflow/result.py +1 -0
- ddeutil/workflow/scheduler.py +101 -67
- ddeutil/workflow/stage.py +13 -1
- ddeutil/workflow/utils.py +36 -10
- ddeutil/workflow/workflow.py +179 -141
- {ddeutil_workflow-0.0.29.dist-info → ddeutil_workflow-0.0.31.dist-info}/METADATA +17 -12
- ddeutil_workflow-0.0.31.dist-info/RECORD +25 -0
- ddeutil_workflow-0.0.29.dist-info/RECORD +0 -25
- {ddeutil_workflow-0.0.29.dist-info → ddeutil_workflow-0.0.31.dist-info}/LICENSE +0 -0
- {ddeutil_workflow-0.0.29.dist-info → ddeutil_workflow-0.0.31.dist-info}/WHEEL +0 -0
- {ddeutil_workflow-0.0.29.dist-info → ddeutil_workflow-0.0.31.dist-info}/top_level.txt +0 -0
ddeutil/workflow/scheduler.py
CHANGED
@@ -33,7 +33,7 @@ from functools import wraps
|
|
33
33
|
from heapq import heappop, heappush
|
34
34
|
from textwrap import dedent
|
35
35
|
from threading import Thread
|
36
|
-
from typing import Callable, Optional, TypedDict
|
36
|
+
from typing import Callable, Optional, TypedDict, Union
|
37
37
|
|
38
38
|
from pydantic import BaseModel, Field
|
39
39
|
from pydantic.functional_validators import field_validator, model_validator
|
@@ -41,7 +41,7 @@ from typing_extensions import Self
|
|
41
41
|
|
42
42
|
try:
|
43
43
|
from typing import ParamSpec
|
44
|
-
except ImportError:
|
44
|
+
except ImportError: # pragma: no cov
|
45
45
|
from typing_extensions import ParamSpec
|
46
46
|
|
47
47
|
try:
|
@@ -53,12 +53,10 @@ from .__cron import CronRunner
|
|
53
53
|
from .__types import DictData, TupleStr
|
54
54
|
from .conf import Loader, Log, config, get_log, get_logger
|
55
55
|
from .cron import On
|
56
|
-
from .exceptions import WorkflowException
|
57
|
-
from .
|
58
|
-
|
59
|
-
|
60
|
-
)
|
61
|
-
from .workflow import Workflow, WorkflowQueue, WorkflowRelease, WorkflowTask
|
56
|
+
from .exceptions import ScheduleException, WorkflowException
|
57
|
+
from .result import Result
|
58
|
+
from .utils import batch, delay
|
59
|
+
from .workflow import Release, ReleaseQueue, Workflow, WorkflowTask
|
62
60
|
|
63
61
|
P = ParamSpec("P")
|
64
62
|
logger = get_logger("ddeutil.workflow")
|
@@ -69,7 +67,7 @@ logging.getLogger("schedule").setLevel(logging.INFO)
|
|
69
67
|
|
70
68
|
__all__: TupleStr = (
|
71
69
|
"Schedule",
|
72
|
-
"
|
70
|
+
"ScheduleWorkflow",
|
73
71
|
"schedule_task",
|
74
72
|
"monitor",
|
75
73
|
"schedule_control",
|
@@ -79,8 +77,8 @@ __all__: TupleStr = (
|
|
79
77
|
)
|
80
78
|
|
81
79
|
|
82
|
-
class
|
83
|
-
"""Workflow
|
80
|
+
class ScheduleWorkflow(BaseModel):
|
81
|
+
"""Schedule Workflow Pydantic model that use to keep workflow model for
|
84
82
|
the Schedule model. it should not use Workflow model directly because on the
|
85
83
|
schedule config it can adjust crontab value that different from the Workflow
|
86
84
|
model.
|
@@ -170,7 +168,7 @@ class WorkflowSchedule(BaseModel):
|
|
170
168
|
def tasks(
|
171
169
|
self,
|
172
170
|
start_date: datetime,
|
173
|
-
queue: dict[str,
|
171
|
+
queue: dict[str, ReleaseQueue],
|
174
172
|
*,
|
175
173
|
externals: DictData | None = None,
|
176
174
|
) -> list[WorkflowTask]:
|
@@ -193,7 +191,7 @@ class WorkflowSchedule(BaseModel):
|
|
193
191
|
|
194
192
|
# NOTE: Loading workflow model from the name of workflow.
|
195
193
|
wf: Workflow = Workflow.from_loader(self.name, externals=extras)
|
196
|
-
wf_queue:
|
194
|
+
wf_queue: ReleaseQueue = queue[self.alias]
|
197
195
|
|
198
196
|
# IMPORTANT: Create the default 'on' value if it does not pass the `on`
|
199
197
|
# field to the Schedule object.
|
@@ -233,9 +231,9 @@ class Schedule(BaseModel):
|
|
233
231
|
"A schedule description that can be string of markdown content."
|
234
232
|
),
|
235
233
|
)
|
236
|
-
workflows: list[
|
234
|
+
workflows: list[ScheduleWorkflow] = Field(
|
237
235
|
default_factory=list,
|
238
|
-
description="A list of
|
236
|
+
description="A list of ScheduleWorkflow models.",
|
239
237
|
)
|
240
238
|
|
241
239
|
@field_validator("desc", mode="after")
|
@@ -258,7 +256,7 @@ class Schedule(BaseModel):
|
|
258
256
|
an input schedule name. The loader object will use this schedule name to
|
259
257
|
searching configuration data of this schedule model in conf path.
|
260
258
|
|
261
|
-
:param name: A schedule name that want to pass to Loader object.
|
259
|
+
:param name: (str) A schedule name that want to pass to Loader object.
|
262
260
|
:param externals: An external parameters that want to pass to Loader
|
263
261
|
object.
|
264
262
|
|
@@ -277,10 +275,41 @@ class Schedule(BaseModel):
|
|
277
275
|
|
278
276
|
return cls.model_validate(obj=loader_data)
|
279
277
|
|
278
|
+
@classmethod
|
279
|
+
def extract_tasks(
|
280
|
+
cls,
|
281
|
+
schedules: list[str],
|
282
|
+
start_date: datetime,
|
283
|
+
queue: dict[str, ReleaseQueue],
|
284
|
+
externals: DictData | None = None,
|
285
|
+
) -> list[WorkflowTask]:
|
286
|
+
"""Return the list of WorkflowTask object from all schedule object that
|
287
|
+
include in an input schedules argument.
|
288
|
+
|
289
|
+
:param schedules: A list of schedule name that will use `from_loader`
|
290
|
+
method.
|
291
|
+
:param start_date: A start date that get from the workflow schedule.
|
292
|
+
:param queue: A mapping of name and list of datetime for queue.
|
293
|
+
:param externals: An external parameters that pass to the Loader object.
|
294
|
+
|
295
|
+
:rtype: list[WorkflowTask]
|
296
|
+
"""
|
297
|
+
tasks: list[WorkflowTask] = []
|
298
|
+
for name in schedules:
|
299
|
+
schedule: Schedule = Schedule.from_loader(name, externals=externals)
|
300
|
+
tasks.extend(
|
301
|
+
schedule.tasks(
|
302
|
+
start_date,
|
303
|
+
queue=queue,
|
304
|
+
externals=externals,
|
305
|
+
),
|
306
|
+
)
|
307
|
+
return tasks
|
308
|
+
|
280
309
|
def tasks(
|
281
310
|
self,
|
282
311
|
start_date: datetime,
|
283
|
-
queue: dict[str,
|
312
|
+
queue: dict[str, ReleaseQueue],
|
284
313
|
*,
|
285
314
|
externals: DictData | None = None,
|
286
315
|
) -> list[WorkflowTask]:
|
@@ -289,7 +318,7 @@ class Schedule(BaseModel):
|
|
289
318
|
|
290
319
|
:param start_date: A start date that get from the workflow schedule.
|
291
320
|
:param queue: A mapping of name and list of datetime for queue.
|
292
|
-
:type queue: dict[str,
|
321
|
+
:type queue: dict[str, ReleaseQueue]
|
293
322
|
:param externals: An external parameters that pass to the Loader object.
|
294
323
|
:type externals: DictData | None
|
295
324
|
|
@@ -302,7 +331,7 @@ class Schedule(BaseModel):
|
|
302
331
|
for workflow in self.workflows:
|
303
332
|
|
304
333
|
if workflow.alias not in queue:
|
305
|
-
queue[workflow.alias] =
|
334
|
+
queue[workflow.alias] = ReleaseQueue()
|
306
335
|
|
307
336
|
workflow_tasks.extend(
|
308
337
|
workflow.tasks(start_date, queue=queue, externals=externals)
|
@@ -311,7 +340,8 @@ class Schedule(BaseModel):
|
|
311
340
|
return workflow_tasks
|
312
341
|
|
313
342
|
|
314
|
-
|
343
|
+
ResultOrCancelJob = Union[type[CancelJob], Result]
|
344
|
+
ReturnCancelJob = Callable[P, ResultOrCancelJob]
|
315
345
|
DecoratorCancelJob = Callable[[ReturnCancelJob], ReturnCancelJob]
|
316
346
|
|
317
347
|
|
@@ -326,24 +356,25 @@ def catch_exceptions(cancel_on_failure: bool = False) -> DecoratorCancelJob:
|
|
326
356
|
"""
|
327
357
|
|
328
358
|
def decorator(func: ReturnCancelJob) -> ReturnCancelJob: # pragma: no cov
|
329
|
-
try:
|
330
359
|
|
331
|
-
|
332
|
-
|
360
|
+
@wraps(func)
|
361
|
+
def wrapper(*args: P.args, **kwargs: P.kwargs) -> ResultOrCancelJob:
|
362
|
+
try:
|
333
363
|
return func(*args, **kwargs)
|
364
|
+
except Exception as err:
|
365
|
+
logger.exception(err)
|
366
|
+
if cancel_on_failure:
|
367
|
+
return CancelJob
|
368
|
+
raise err
|
334
369
|
|
335
|
-
|
336
|
-
|
337
|
-
except Exception as err:
|
338
|
-
logger.exception(err)
|
339
|
-
if cancel_on_failure:
|
340
|
-
return CancelJob
|
341
|
-
raise err
|
370
|
+
return wrapper
|
342
371
|
|
343
372
|
return decorator
|
344
373
|
|
345
374
|
|
346
375
|
class ReleaseThread(TypedDict):
|
376
|
+
"""TypeDict for the release thread."""
|
377
|
+
|
347
378
|
thread: Thread
|
348
379
|
start_date: datetime
|
349
380
|
|
@@ -355,22 +386,24 @@ ReleaseThreads = dict[str, ReleaseThread]
|
|
355
386
|
def schedule_task(
|
356
387
|
tasks: list[WorkflowTask],
|
357
388
|
stop: datetime,
|
358
|
-
queue: dict[str,
|
389
|
+
queue: dict[str, ReleaseQueue],
|
359
390
|
threads: ReleaseThreads,
|
360
391
|
log: type[Log],
|
361
|
-
) -> CancelJob | None:
|
362
|
-
"""
|
363
|
-
|
392
|
+
) -> type[CancelJob] | None:
|
393
|
+
"""Schedule task function that generate thread of workflow task release
|
394
|
+
method in background. This function do the same logic as the workflow poke
|
395
|
+
method, but it runs with map of schedules and the on values.
|
364
396
|
|
365
|
-
This
|
397
|
+
This schedule task start runs every minute at ':02' second and it does
|
398
|
+
not allow you to run with offset time.
|
366
399
|
|
367
400
|
:param tasks: A list of WorkflowTask object.
|
368
401
|
:param stop: A stop datetime object that force stop running scheduler.
|
369
|
-
:param queue: A mapping of alias name and
|
402
|
+
:param queue: A mapping of alias name and ReleaseQueue object.
|
370
403
|
:param threads: A mapping of alias name and Thread object.
|
371
404
|
:param log: A log class that want to make log object.
|
372
405
|
|
373
|
-
:rtype: CancelJob | None
|
406
|
+
:rtype: type[CancelJob] | None
|
374
407
|
"""
|
375
408
|
current_date: datetime = datetime.now(tz=config.tz)
|
376
409
|
if current_date > stop.replace(tzinfo=config.tz):
|
@@ -390,7 +423,7 @@ def schedule_task(
|
|
390
423
|
#
|
391
424
|
for task in tasks:
|
392
425
|
|
393
|
-
q:
|
426
|
+
q: ReleaseQueue = queue[task.alias]
|
394
427
|
|
395
428
|
# NOTE: Start adding queue and move the runner date in the WorkflowTask.
|
396
429
|
task.queue(stop, q, log=log)
|
@@ -410,15 +443,20 @@ def schedule_task(
|
|
410
443
|
current_release: datetime = current_date.replace(
|
411
444
|
second=0, microsecond=0
|
412
445
|
)
|
413
|
-
if (first_date := q.first_queue.date)
|
446
|
+
if (first_date := q.first_queue.date) > current_release:
|
414
447
|
logger.debug(
|
415
448
|
f"[WORKFLOW]: Skip schedule "
|
416
449
|
f"{first_date:%Y-%m-%d %H:%M:%S} for : {task.alias!r}"
|
417
450
|
)
|
418
451
|
continue
|
452
|
+
elif first_date < current_release: # pragma: no cov
|
453
|
+
raise ScheduleException(
|
454
|
+
"The first release date from queue should not less than current"
|
455
|
+
"release date."
|
456
|
+
)
|
419
457
|
|
420
458
|
# NOTE: Pop the latest release and push it to running.
|
421
|
-
release:
|
459
|
+
release: Release = heappop(q.queue)
|
422
460
|
heappush(q.running, release)
|
423
461
|
|
424
462
|
logger.info(
|
@@ -445,7 +483,7 @@ def schedule_task(
|
|
445
483
|
|
446
484
|
delay()
|
447
485
|
|
448
|
-
logger.debug(f"[SCHEDULE]: End schedule
|
486
|
+
logger.debug(f"[SCHEDULE]: End schedule task {'=' * 80}")
|
449
487
|
|
450
488
|
|
451
489
|
def monitor(threads: ReleaseThreads) -> None: # pragma: no cov
|
@@ -455,9 +493,7 @@ def monitor(threads: ReleaseThreads) -> None: # pragma: no cov
|
|
455
493
|
:param threads: A mapping of Thread object and its name.
|
456
494
|
:type threads: ReleaseThreads
|
457
495
|
"""
|
458
|
-
logger.debug(
|
459
|
-
"[MONITOR]: Start checking long running workflow release task."
|
460
|
-
)
|
496
|
+
logger.debug("[MONITOR]: Start checking long running schedule task.")
|
461
497
|
|
462
498
|
snapshot_threads: list[str] = list(threads.keys())
|
463
499
|
for t_name in snapshot_threads:
|
@@ -476,12 +512,15 @@ def schedule_control(
|
|
476
512
|
*,
|
477
513
|
log: type[Log] | None = None,
|
478
514
|
) -> list[str]: # pragma: no cov
|
479
|
-
"""Scheduler control function that
|
515
|
+
"""Scheduler control function that run the chuck of schedules every minute
|
516
|
+
and this function release monitoring thread for tracking undead thread in
|
517
|
+
the background.
|
480
518
|
|
481
519
|
:param schedules: A list of workflow names that want to schedule running.
|
482
520
|
:param stop: A datetime value that use to stop running schedule.
|
483
521
|
:param externals: An external parameters that pass to Loader.
|
484
|
-
:param log:
|
522
|
+
:param log: A log class that use on the workflow task release for writing
|
523
|
+
its release log context.
|
485
524
|
|
486
525
|
:rtype: list[str]
|
487
526
|
"""
|
@@ -493,38 +532,31 @@ def schedule_control(
|
|
493
532
|
"Should install schedule package before use this module."
|
494
533
|
) from None
|
495
534
|
|
535
|
+
# NOTE: Get default logging.
|
496
536
|
log: type[Log] = log or get_log()
|
497
537
|
scheduler: Scheduler = Scheduler()
|
538
|
+
|
539
|
+
# NOTE: Create the start and stop datetime.
|
498
540
|
start_date: datetime = datetime.now(tz=config.tz)
|
499
541
|
stop_date: datetime = stop or (start_date + config.stop_boundary_delta)
|
500
542
|
|
501
543
|
# IMPORTANT: Create main mapping of queue and thread object.
|
502
|
-
queue: dict[str,
|
544
|
+
queue: dict[str, ReleaseQueue] = {}
|
503
545
|
threads: ReleaseThreads = {}
|
504
546
|
|
505
547
|
start_date_waiting: datetime = start_date.replace(
|
506
548
|
second=0, microsecond=0
|
507
549
|
) + timedelta(minutes=1)
|
508
550
|
|
509
|
-
# NOTE: Start create workflow tasks from list of schedule name.
|
510
|
-
tasks: list[WorkflowTask] = []
|
511
|
-
for name in schedules:
|
512
|
-
schedule: Schedule = Schedule.from_loader(name, externals=externals)
|
513
|
-
tasks.extend(
|
514
|
-
schedule.tasks(
|
515
|
-
start_date_waiting,
|
516
|
-
queue=queue,
|
517
|
-
externals=externals,
|
518
|
-
),
|
519
|
-
)
|
520
|
-
|
521
551
|
# NOTE: This schedule job will start every minute at :02 seconds.
|
522
552
|
(
|
523
553
|
scheduler.every(1)
|
524
554
|
.minutes.at(":02")
|
525
555
|
.do(
|
526
556
|
schedule_task,
|
527
|
-
tasks=
|
557
|
+
tasks=Schedule.extract_tasks(
|
558
|
+
schedules, start_date_waiting, queue, externals=externals
|
559
|
+
),
|
528
560
|
stop=stop_date,
|
529
561
|
queue=queue,
|
530
562
|
threads=threads,
|
@@ -533,7 +565,8 @@ def schedule_control(
|
|
533
565
|
.tag("control")
|
534
566
|
)
|
535
567
|
|
536
|
-
# NOTE: Checking zombie task with schedule job will start every 5 minute
|
568
|
+
# NOTE: Checking zombie task with schedule job will start every 5 minute at
|
569
|
+
# :10 seconds.
|
537
570
|
(
|
538
571
|
scheduler.every(5)
|
539
572
|
.minutes.at(":10")
|
@@ -579,16 +612,15 @@ def schedule_runner(
|
|
579
612
|
externals: DictData | None = None,
|
580
613
|
excluded: list[str] | None = None,
|
581
614
|
) -> list[str]: # pragma: no cov
|
582
|
-
"""Schedule runner function
|
583
|
-
|
584
|
-
|
615
|
+
"""Schedule runner function it the multiprocess controller function for
|
616
|
+
split the setting schedule to the `schedule_control` function on the
|
617
|
+
process pool. It chunks schedule configs that exists in config
|
618
|
+
path by `WORKFLOW_APP_MAX_SCHEDULE_PER_PROCESS` value.
|
585
619
|
|
586
620
|
:param stop: A stop datetime object that force stop running scheduler.
|
587
621
|
:param externals:
|
588
622
|
:param excluded: A list of schedule name that want to exclude from finding.
|
589
623
|
|
590
|
-
:rtype: list[str]
|
591
|
-
|
592
624
|
This function will get all workflows that include on value that was
|
593
625
|
created in config path and chuck it with application config variable
|
594
626
|
``WORKFLOW_APP_MAX_SCHEDULE_PER_PROCESS`` env var to multiprocess executor
|
@@ -600,7 +632,9 @@ def schedule_runner(
|
|
600
632
|
--> thread of release task 01 02
|
601
633
|
==> schedule --> thread of release task 02 01
|
602
634
|
--> thread of release task 02 02
|
603
|
-
==> process 02
|
635
|
+
==> process 02 ==> ...
|
636
|
+
|
637
|
+
:rtype: list[str]
|
604
638
|
"""
|
605
639
|
results: list[str] = []
|
606
640
|
|
ddeutil/workflow/stage.py
CHANGED
@@ -328,7 +328,7 @@ class BashStage(BaseStage):
|
|
328
328
|
If your current OS is Windows, it will run on the bash in the WSL.
|
329
329
|
|
330
330
|
I get some limitation when I run shell statement with the built-in
|
331
|
-
|
331
|
+
subprocess package. It does not good enough to use multiline statement.
|
332
332
|
Thus, I add writing ``.sh`` file before execution process for fix this
|
333
333
|
issue.
|
334
334
|
|
@@ -665,3 +665,15 @@ Stage = Union[
|
|
665
665
|
TriggerStage,
|
666
666
|
EmptyStage,
|
667
667
|
]
|
668
|
+
|
669
|
+
|
670
|
+
# TODO: Not implement this stages yet
|
671
|
+
class ParallelStage(BaseModel):
|
672
|
+
parallel: list[Stage]
|
673
|
+
max_parallel_core: int = Field(default=2)
|
674
|
+
|
675
|
+
|
676
|
+
# TODO: Not implement this stages yet
|
677
|
+
class ForEachStage(BaseModel):
|
678
|
+
foreach: list[str]
|
679
|
+
stages: list[Stage]
|
ddeutil/workflow/utils.py
CHANGED
@@ -21,10 +21,9 @@ from zoneinfo import ZoneInfo
|
|
21
21
|
from ddeutil.core import hash_str
|
22
22
|
|
23
23
|
from .__types import DictData, Matrix
|
24
|
-
from .conf import config
|
25
24
|
|
26
25
|
T = TypeVar("T")
|
27
|
-
|
26
|
+
UTC = ZoneInfo("UTC")
|
28
27
|
logger = logging.getLogger("ddeutil.workflow")
|
29
28
|
|
30
29
|
|
@@ -37,7 +36,7 @@ def get_dt_now(
|
|
37
36
|
:param offset:
|
38
37
|
:return: The current datetime object that use an input timezone or UTC.
|
39
38
|
"""
|
40
|
-
return datetime.now(tz=(tz or
|
39
|
+
return datetime.now(tz=(tz or UTC)) - timedelta(seconds=offset)
|
41
40
|
|
42
41
|
|
43
42
|
def get_diff_sec(
|
@@ -52,17 +51,42 @@ def get_diff_sec(
|
|
52
51
|
"""
|
53
52
|
return round(
|
54
53
|
(
|
55
|
-
dt
|
56
|
-
- datetime.now(tz=(tz or ZoneInfo("UTC")))
|
57
|
-
- timedelta(seconds=offset)
|
54
|
+
dt - datetime.now(tz=(tz or UTC)) - timedelta(seconds=offset)
|
58
55
|
).total_seconds()
|
59
56
|
)
|
60
57
|
|
61
58
|
|
62
|
-
def
|
59
|
+
def reach_next_minute(
|
60
|
+
dt: datetime, tz: ZoneInfo | None = None, offset: float = 0.0
|
61
|
+
) -> bool:
|
62
|
+
"""Check this datetime object is not in range of minute level on the current
|
63
|
+
datetime.
|
64
|
+
"""
|
65
|
+
diff: float = (
|
66
|
+
dt.replace(second=0, microsecond=0)
|
67
|
+
- (
|
68
|
+
get_dt_now(tz=(tz or UTC), offset=offset).replace(
|
69
|
+
second=0, microsecond=0
|
70
|
+
)
|
71
|
+
)
|
72
|
+
).total_seconds()
|
73
|
+
if diff >= 60:
|
74
|
+
return True
|
75
|
+
elif diff >= 0:
|
76
|
+
return False
|
77
|
+
|
78
|
+
raise ValueError(
|
79
|
+
"Check reach the next minute function should check a datetime that not "
|
80
|
+
"less than the current date"
|
81
|
+
)
|
82
|
+
|
83
|
+
|
84
|
+
def wait_to_next_minute(
|
85
|
+
dt: datetime, second: float = 0
|
86
|
+
) -> None: # pragma: no cov
|
63
87
|
"""Wait with sleep to the next minute with an offset second value."""
|
64
|
-
future =
|
65
|
-
time.sleep((future -
|
88
|
+
future = dt.replace(second=0, microsecond=0) + timedelta(minutes=1)
|
89
|
+
time.sleep((future - dt).total_seconds() + second)
|
66
90
|
|
67
91
|
|
68
92
|
def delay(second: float = 0) -> None: # pragma: no cov
|
@@ -92,6 +116,8 @@ def gen_id(
|
|
92
116
|
|
93
117
|
:rtype: str
|
94
118
|
"""
|
119
|
+
from .conf import config
|
120
|
+
|
95
121
|
if not isinstance(value, str):
|
96
122
|
value: str = str(value)
|
97
123
|
|
@@ -177,7 +203,7 @@ def batch(iterable: Iterator[Any], n: int) -> Iterator[Any]:
|
|
177
203
|
"""Batch data into iterators of length n. The last batch may be shorter.
|
178
204
|
|
179
205
|
Example:
|
180
|
-
>>> for b in batch('ABCDEFG', 3):
|
206
|
+
>>> for b in batch(iter('ABCDEFG'), 3):
|
181
207
|
... print(list(b))
|
182
208
|
['A', 'B', 'C']
|
183
209
|
['D', 'E', 'F']
|