ddeutil-workflow 0.0.82__py3-none-any.whl → 0.0.84__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -51,6 +51,8 @@ from .utils import cut_id, get_dt_now, prepare_newline
51
51
 
52
52
  logger = logging.getLogger("ddeutil.workflow")
53
53
  Level = Literal["debug", "info", "warning", "error", "exception"]
54
+ EMJ_ALERT: str = "🚨"
55
+ EMJ_SKIP: str = "⏭️"
54
56
 
55
57
 
56
58
  @lru_cache
@@ -239,7 +241,7 @@ class Metadata(BaseModel): # pragma: no cov
239
241
  default=None, description="Environment (dev, staging, prod)."
240
242
  )
241
243
 
242
- # System context
244
+ # NOTE: System context
243
245
  hostname: Optional[str] = Field(
244
246
  default=None, description="Hostname where workflow is running."
245
247
  )
@@ -253,7 +255,7 @@ class Metadata(BaseModel): # pragma: no cov
253
255
  default=None, description="Workflow package version."
254
256
  )
255
257
 
256
- # Custom metadata
258
+ # NOTE: Custom metadata
257
259
  tags: Optional[list[str]] = Field(
258
260
  default_factory=list, description="Custom tags for categorization."
259
261
  )
@@ -320,6 +322,8 @@ class Metadata(BaseModel): # pragma: no cov
320
322
  import socket
321
323
  import sys
322
324
 
325
+ from .__about__ import __version__
326
+
323
327
  frame: Optional[FrameType] = currentframe()
324
328
  if frame is None:
325
329
  raise ValueError("Cannot get current frame")
@@ -384,7 +388,7 @@ class Metadata(BaseModel): # pragma: no cov
384
388
  hostname=hostname,
385
389
  ip_address=ip_address,
386
390
  python_version=python_version,
387
- package_version=extras_data.get("package_version"),
391
+ package_version=__version__,
388
392
  # NOTE: Custom metadata
389
393
  tags=extras_data.get("tags", []),
390
394
  metadata=extras_data.get("metadata", {}),
@@ -2046,7 +2050,7 @@ def get_trace(
2046
2050
  Args:
2047
2051
  run_id (str): A running ID.
2048
2052
  parent_run_id (str | None, default None): A parent running ID.
2049
- handlers:
2053
+ handlers (list):
2050
2054
  extras: An extra parameter that want to override the core
2051
2055
  config values.
2052
2056
  auto_pre_process (bool, default False)
@@ -2057,7 +2061,7 @@ def get_trace(
2057
2061
  handlers: list[DictData] = dynamic(
2058
2062
  "trace_handlers", f=handlers, extras=extras
2059
2063
  )
2060
- trace = Trace.model_validate(
2064
+ trace: Trace = Trace.model_validate(
2061
2065
  {
2062
2066
  "run_id": run_id,
2063
2067
  "parent_run_id": parent_run_id,
ddeutil/workflow/utils.py CHANGED
@@ -8,26 +8,6 @@
8
8
  This module provides essential utility functions used throughout the workflow
9
9
  system for ID generation, datetime handling, string processing, template
10
10
  operations, and other common tasks.
11
-
12
- Functions:
13
- to_train: Convert camel case strings to train case format
14
- prepare_newline: Format messages with multiple newlines
15
- replace_sec: Replace seconds and microseconds in datetime objects
16
- clear_tz: Clear timezone info from datetime objects
17
- get_dt_now: Get current datetime with timezone
18
- get_d_now: Get current date
19
- get_diff_sec: Calculate time difference in seconds
20
- reach_next_minute: Check if datetime reaches next minute
21
- wait_until_next_minute: Wait until next minute
22
- delay: Add random delay to execution
23
- gen_id: Generate unique identifiers for workflow components
24
- default_gen_id: Generate default running ID
25
- make_exec: Make files executable
26
- filter_func: Filter function objects from data structures
27
- cross_product: Generate cross product of matrix values
28
- cut_id: Cut running ID to specified length
29
- dump_all: Serialize nested BaseModel objects to dictionaries
30
- obj_name: Get object name or class name
31
11
  """
32
12
  from __future__ import annotations
33
13
 
@@ -253,6 +233,33 @@ def gen_id(
253
233
  ).hexdigest()
254
234
 
255
235
 
236
+ def extract_id(
237
+ name: str,
238
+ run_id: Optional[str] = None,
239
+ extras: Optional[DictData] = None,
240
+ ) -> tuple[str, str]:
241
+ """Extract the parent ID and running ID. If the `run_id` parameter was
242
+ passed, it will replace the parent_run_id with this value and re-generate
243
+ new running ID for it instead.
244
+
245
+ Args:
246
+ name (str): A name for generate hashing value for the `gen_id` function.
247
+ run_id (str | None, default None):
248
+ extras:
249
+
250
+ Returns:
251
+ tuple[str, str]: A pair of parent running ID and running ID.
252
+ """
253
+ generated = gen_id(name, unique=True, extras=extras)
254
+ if run_id:
255
+ parent_run_id: str = run_id
256
+ run_id: str = generated
257
+ else:
258
+ run_id: str = generated
259
+ parent_run_id: str = run_id
260
+ return parent_run_id, run_id
261
+
262
+
256
263
  def default_gen_id() -> str:
257
264
  """Return running ID for making default ID for the Result model.
258
265
 
@@ -327,6 +334,8 @@ def cut_id(run_id: str, *, num: int = 8) -> str:
327
334
  Example:
328
335
  >>> cut_id(run_id='20240101081330000000T1354680202')
329
336
  '202401010813680202'
337
+ >>> cut_id(run_id='20240101081330000000T1354680202')
338
+ '54680202'
330
339
 
331
340
  Args:
332
341
  run_id: A running ID to cut.
@@ -394,3 +403,19 @@ def obj_name(obj: Optional[Union[str, object]] = None) -> Optional[str]:
394
403
  else:
395
404
  obj_type: str = obj.__class__.__name__
396
405
  return obj_type
406
+
407
+
408
+ def pop_sys_extras(extras: DictData, scope: str = "exec") -> DictData:
409
+ """Remove key that starts with `__sys_` from the extra dict parameter.
410
+
411
+ Args:
412
+ extras:
413
+ scope (str):
414
+
415
+ Returns:
416
+ DictData:
417
+ """
418
+ keys: list[str] = [k for k in extras if not k.startswith(f"__sys_{scope}")]
419
+ for k in keys:
420
+ extras.pop(k)
421
+ return extras
@@ -37,9 +37,11 @@ from threading import Event as ThreadEvent
37
37
  from typing import Any, Literal, Optional, Union
38
38
 
39
39
  from pydantic import BaseModel, Field
40
+ from pydantic.functional_serializers import field_serializer
40
41
  from pydantic.functional_validators import field_validator, model_validator
41
42
  from typing_extensions import Self
42
43
 
44
+ from . import DRYRUN
43
45
  from .__types import DictData
44
46
  from .audits import NORMAL, RERUN, Audit, ReleaseType, get_audit
45
47
  from .conf import YamlParser, dynamic
@@ -62,10 +64,10 @@ from .result import (
62
64
  from .reusables import has_template, param2template
63
65
  from .traces import Trace, get_trace
64
66
  from .utils import (
65
- UTC,
67
+ extract_id,
66
68
  gen_id,
67
69
  get_dt_now,
68
- replace_sec,
70
+ pop_sys_extras,
69
71
  )
70
72
 
71
73
 
@@ -245,6 +247,11 @@ class Workflow(BaseModel):
245
247
 
246
248
  return self
247
249
 
250
+ @field_serializer("extras")
251
+ def __serialize_extras(self, extras: DictData) -> DictData:
252
+ """Serialize extra parameter."""
253
+ return {k: extras[k] for k in extras if not k.startswith("__sys_")}
254
+
248
255
  def detail(self) -> DictData: # pragma: no cov
249
256
  """Return the detail of this workflow for generate markdown."""
250
257
  return self.model_dump(by_alias=True)
@@ -256,8 +263,11 @@ class Workflow(BaseModel):
256
263
  author (str | None, default None): An author name.
257
264
  """
258
265
 
259
- def align_newline(value: str) -> str:
260
- return value.rstrip("\n").replace("\n", "\n ")
266
+ def align_newline(value: Optional[str]) -> str:
267
+ space: str = " " * 16
268
+ if value is None:
269
+ return ""
270
+ return value.rstrip("\n").replace("\n", f"\n{space}")
261
271
 
262
272
  info: str = (
263
273
  f"| Author: {author or 'nobody'} "
@@ -284,8 +294,7 @@ class Workflow(BaseModel):
284
294
  {align_newline(self.desc)}\n
285
295
  ## Parameters\n
286
296
  | name | type | default | description |
287
- | --- | --- | --- | : --- : |
288
-
297
+ | --- | --- | --- | : --- : |\n\n
289
298
  ## Jobs\n
290
299
  {align_newline(jobs)}
291
300
  """.lstrip(
@@ -314,8 +323,7 @@ class Workflow(BaseModel):
314
323
  f"{self.name!r}"
315
324
  )
316
325
  job: Job = self.jobs[name]
317
- if self.extras:
318
- job.extras = self.extras
326
+ job.extras = self.extras
319
327
  return job
320
328
 
321
329
  def parameterize(self, params: DictData) -> DictData:
@@ -334,8 +342,8 @@ class Workflow(BaseModel):
334
342
  execute method.
335
343
 
336
344
  Returns:
337
- DictData: The parameter value that validate with its parameter fields and
338
- adding jobs key to this parameter.
345
+ DictData: The parameter value that validate with its parameter fields
346
+ and adding jobs key to this parameter.
339
347
 
340
348
  Raises:
341
349
  WorkflowError: If parameter value that want to validate does
@@ -366,33 +374,6 @@ class Workflow(BaseModel):
366
374
  "jobs": {},
367
375
  }
368
376
 
369
- def validate_release(self, dt: datetime) -> datetime:
370
- """Validate the release datetime that should was replaced second and
371
- millisecond to 0 and replaced timezone to None before checking it match
372
- with the set `on` field.
373
-
374
- Args:
375
- dt (datetime): A datetime object that want to validate.
376
-
377
- Returns:
378
- datetime: The validated release datetime.
379
- """
380
- if dt.tzinfo is None:
381
- dt = dt.replace(tzinfo=UTC)
382
-
383
- release: datetime = replace_sec(dt.astimezone(UTC))
384
-
385
- # NOTE: Return itself if schedule event does not set.
386
- if not self.on.schedule:
387
- return release
388
-
389
- for on in self.on.schedule:
390
- if release == on.cronjob.schedule(release, tz=UTC).next:
391
- return release
392
- raise WorkflowError(
393
- "Release datetime does not support for this workflow"
394
- )
395
-
396
377
  def release(
397
378
  self,
398
379
  release: datetime,
@@ -422,11 +403,12 @@ class Workflow(BaseModel):
422
403
 
423
404
  Args:
424
405
  release (datetime): A release datetime.
425
- params: A workflow parameter that pass to execute method.
426
- release_type:
406
+ params (DictData): A workflow parameter that pass to execute method.
407
+ release_type (ReleaseType): A release type that want to execute.
427
408
  run_id: (str) A workflow running ID.
428
409
  runs_metadata: (DictData)
429
- audit: An audit class that want to save the execution result.
410
+ audit (Audit): An audit model that use to manage release log of this
411
+ execution.
430
412
  override_log_name: (str) An override logging name that use
431
413
  instead the workflow name.
432
414
  timeout: (int) A workflow execution time out in second unit.
@@ -441,13 +423,9 @@ class Workflow(BaseModel):
441
423
  audit: Audit = audit or get_audit(extras=self.extras)
442
424
 
443
425
  # NOTE: Generate the parent running ID with not None value.
444
- if run_id:
445
- parent_run_id: str = run_id
446
- run_id: str = gen_id(name, unique=True)
447
- else:
448
- run_id: str = gen_id(name, unique=True)
449
- parent_run_id: str = run_id
450
-
426
+ parent_run_id, run_id = extract_id(
427
+ name, run_id=run_id, extras=self.extras
428
+ )
451
429
  context: DictData = {"status": WAIT}
452
430
  audit_data: DictData = {
453
431
  "name": name,
@@ -460,7 +438,7 @@ class Workflow(BaseModel):
460
438
  trace: Trace = get_trace(
461
439
  run_id, parent_run_id=parent_run_id, extras=self.extras
462
440
  )
463
- release: datetime = self.validate_release(dt=release)
441
+ release: datetime = self.on.validate_dt(dt=release)
464
442
  trace.info(f"[RELEASE]: Start {name!r} : {release:%Y-%m-%d %H:%M:%S}")
465
443
  values: DictData = param2template(
466
444
  params,
@@ -475,7 +453,17 @@ class Workflow(BaseModel):
475
453
  extras=self.extras,
476
454
  )
477
455
 
478
- if release_type == NORMAL and audit.is_pointed(data=audit_data):
456
+ if release_type == RERUN:
457
+ # TODO: It will load previous audit and use this data to run with
458
+ # the `rerun` method.
459
+ raise NotImplementedError(
460
+ "Release does not support for rerun type yet. Please use the "
461
+ "`rerun` method instead."
462
+ )
463
+ elif release_type == DRYRUN:
464
+ self.extras.update({"__sys_release_dryrun_mode": True})
465
+ trace.debug("[RELEASE]: Mark dryrun mode to the extra params.")
466
+ elif release_type == NORMAL and audit.is_pointed(data=audit_data):
479
467
  trace.info("[RELEASE]: Skip this release because it already audit.")
480
468
  return Result(
481
469
  run_id=run_id,
@@ -485,14 +473,6 @@ class Workflow(BaseModel):
485
473
  extras=self.extras,
486
474
  )
487
475
 
488
- if release_type == RERUN:
489
- # TODO: It will load previous audit and use this data to run with
490
- # the `rerun` method.
491
- raise NotImplementedError(
492
- "Release does not support for rerun type yet. Please use the "
493
- "`rerun` method instead."
494
- )
495
-
496
476
  rs: Result = self.execute(
497
477
  params=values,
498
478
  run_id=parent_run_id,
@@ -501,27 +481,29 @@ class Workflow(BaseModel):
501
481
  catch(context, status=rs.status, updated=rs.context)
502
482
  trace.info(f"[RELEASE]: End {name!r} : {release:%Y-%m-%d %H:%M:%S}")
503
483
  trace.debug(f"[RELEASE]: Writing audit: {name!r}.")
504
- (
505
- audit.save(
506
- data=audit_data
507
- | {
508
- "context": context,
509
- "runs_metadata": (
510
- (runs_metadata or {})
511
- | rs.info
512
- | {
513
- "timeout": timeout,
514
- "original_name": self.name,
515
- "audit_excluded": audit_excluded,
516
- }
517
- ),
518
- },
519
- excluded=audit_excluded,
484
+ if release_type != DRYRUN:
485
+ (
486
+ audit.save(
487
+ data=audit_data
488
+ | {
489
+ "context": context,
490
+ "runs_metadata": (
491
+ (runs_metadata or {})
492
+ | rs.info
493
+ | {
494
+ "timeout": timeout,
495
+ "original_name": self.name,
496
+ "audit_excluded": audit_excluded,
497
+ }
498
+ ),
499
+ },
500
+ excluded=audit_excluded,
501
+ )
520
502
  )
521
- )
522
- return Result(
523
- run_id=run_id,
524
- parent_run_id=parent_run_id,
503
+
504
+ # NOTE: Pop system extra parameters.
505
+ pop_sys_extras(self.extras, scope="release")
506
+ return Result.from_trace(trace).catch(
525
507
  status=rs.status,
526
508
  context=catch(
527
509
  context,
@@ -536,7 +518,6 @@ class Workflow(BaseModel):
536
518
  **(context["errors"] if "errors" in context else {}),
537
519
  },
538
520
  ),
539
- extras=self.extras,
540
521
  )
541
522
 
542
523
  def execute_job(
@@ -684,8 +665,9 @@ class Workflow(BaseModel):
684
665
  :rtype: Result
685
666
  """
686
667
  ts: float = time.monotonic()
687
- parent_run_id: Optional[str] = run_id
688
- run_id: str = gen_id(self.name, unique=True, extras=self.extras)
668
+ parent_run_id, run_id = extract_id(
669
+ self.name, run_id=run_id, extras=self.extras
670
+ )
689
671
  trace: Trace = get_trace(
690
672
  run_id, parent_run_id=parent_run_id, extras=self.extras
691
673
  )
@@ -741,6 +723,8 @@ class Workflow(BaseModel):
741
723
  extras=self.extras,
742
724
  )
743
725
 
726
+ # NOTE: Force update internal extras for handler circle execution.
727
+ self.extras.update({"__sys_exec_break_circle": self.name})
744
728
  with ThreadPoolExecutor(max_job_parallel, "wf") as executor:
745
729
  futures: list[Future] = []
746
730
 
@@ -769,6 +753,7 @@ class Workflow(BaseModel):
769
753
  backoff_sleep = 0.01
770
754
 
771
755
  if check == FAILED: # pragma: no cov
756
+ pop_sys_extras(self.extras)
772
757
  return Result(
773
758
  run_id=run_id,
774
759
  parent_run_id=parent_run_id,
@@ -863,6 +848,7 @@ class Workflow(BaseModel):
863
848
  for i, s in enumerate(sequence_statuses, start=0):
864
849
  statuses[total + 1 + skip_count + i] = s
865
850
 
851
+ pop_sys_extras(self.extras)
866
852
  st: Status = validate_statuses(statuses)
867
853
  return Result(
868
854
  run_id=run_id,
@@ -884,6 +870,7 @@ class Workflow(BaseModel):
884
870
 
885
871
  time.sleep(0.0025)
886
872
 
873
+ pop_sys_extras(self.extras)
887
874
  return Result(
888
875
  run_id=run_id,
889
876
  parent_run_id=parent_run_id,
@@ -913,6 +900,10 @@ class Workflow(BaseModel):
913
900
  ) -> Result: # pragma: no cov
914
901
  """Re-Execute workflow with passing the error context data.
915
902
 
903
+ Warnings:
904
+ This rerun method allow to rerun job execution level only. That mean
905
+ it does not support rerun only stage.
906
+
916
907
  Args:
917
908
  context: A context result that get the failed status.
918
909
  run_id: (Optional[str]) A workflow running ID.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.82
3
+ Version: 0.0.84
4
4
  Summary: Lightweight workflow orchestration with YAML template
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
@@ -1,20 +1,20 @@
1
- ddeutil/workflow/__about__.py,sha256=GJmjvBMMhA0y8IwyPpPJLqLsuBWa6J90Pleir68QW5I,60
2
- ddeutil/workflow/__cron.py,sha256=avOagaHl9xXOmizeRWm13cOrty9Tw0vRjFq-xoEgpAY,29167
3
- ddeutil/workflow/__init__.py,sha256=elWSX2JPbjORH-CJIH_zR_nrdd8Xw95NjLK49R4Kqdg,3434
1
+ ddeutil/workflow/__about__.py,sha256=zX5wSNry4MbxSK_Ujk9sD5ooaxsvpp2kdHcvlV5NcWY,60
2
+ ddeutil/workflow/__cron.py,sha256=-1tqZG7GtUmusdl6NTy_Ck7nM_tGYTXYB7TB7tKeO60,29184
3
+ ddeutil/workflow/__init__.py,sha256=Dvfjs7LpLerGCYGnbqKwznViTw7ire_6LR8obC1I4aM,3456
4
4
  ddeutil/workflow/__main__.py,sha256=Nqk5aO-HsZVKV2BmuJYeJEufJluipvCD9R1k2kMoJ3Y,8581
5
- ddeutil/workflow/__types.py,sha256=tA2vsr6mzTSzbWB1sb62c5GgxODlfVRz6FvgLNJtQao,4788
6
- ddeutil/workflow/audits.py,sha256=YPnWQYvhILPwPS6RhYKK0OG3lCeYFLuUNBxJvMlXf5w,26109
5
+ ddeutil/workflow/__types.py,sha256=IOKuJCxTUPHh8Z2JoLu_K7a85oq0VOcKBhpabiJ6qEE,5001
6
+ ddeutil/workflow/audits.py,sha256=H8yuMzXs_QAAKNox-HXdojk9CcilHYYQtklJYetoZv8,26955
7
7
  ddeutil/workflow/conf.py,sha256=VfPmwaBYEgOj8bu4eim13ayZwJ4Liy7I702aQf7vS8g,17644
8
- ddeutil/workflow/errors.py,sha256=Rqtuf1MGxA-hKGP5wMAkaeeayst-u4P2dX6Fp_pzbsA,5678
9
- ddeutil/workflow/event.py,sha256=qm7QHw-Pozm6oIUzAIxpDkPzzVZVtHgJIUlIle0vEfQ,13943
10
- ddeutil/workflow/job.py,sha256=WHWOVz0ErOUfbN_aqpDeNmvBvpbKhcFlzcvJmlCpJuI,48430
8
+ ddeutil/workflow/errors.py,sha256=J4bEbtI7qtBX7eghod4djLf0y5i1r4mCz_uFU4roLhY,5713
9
+ ddeutil/workflow/event.py,sha256=OumcZBlOZD0_J53GS4V2XJEqQ9HEcIl3UicQrCyL46M,14684
10
+ ddeutil/workflow/job.py,sha256=VVTpxVR2iVEkjvP8r0O0LRtAPnrbsguYbKzHpe2TAVo,48146
11
11
  ddeutil/workflow/params.py,sha256=y9f6DEIyae1j4awbj3Kbeq75-U2UPFlKv9K57Hdo_Go,17188
12
- ddeutil/workflow/result.py,sha256=3Lpyv2Jn6T1Uc-lRbweDucSCoBr0ZByHjffKj14bj6s,9492
12
+ ddeutil/workflow/result.py,sha256=0W3z5wAs3Dyr8r2vRMY5hl1MkvdsyXWJmQD4NmsDDOM,10194
13
13
  ddeutil/workflow/reusables.py,sha256=SBLJSxR8ELoWJErBfSMZS3Rr1O_93T-fFBpfn2AvxuA,25007
14
- ddeutil/workflow/stages.py,sha256=CCR_D6yqVo74PuMxfqhi8GeeAq8sRbyxReHob6yxrjI,123708
15
- ddeutil/workflow/traces.py,sha256=YN4XuRfQK523cNy8EVgz2iPh6s6WB865K9JezCNdM7E,74637
16
- ddeutil/workflow/utils.py,sha256=vQwFu-wPK-lDiX2L8AZIahCkKEF6I0MCrZ1LlP8xkoQ,12011
17
- ddeutil/workflow/workflow.py,sha256=VN8i0mVyuCUw1kk_CqLkN-8dWLON9i4vTLbHp6lyS-s,42961
14
+ ddeutil/workflow/stages.py,sha256=zoTtD2w6ZqaAsi-ilH14Wvg_Wx_AJlZqKZCCOzSprj0,139303
15
+ ddeutil/workflow/traces.py,sha256=pq1lOg2UMgDiSDmjHxXPoTaBHnfc7uzzlo1u2TCwN2Q,74733
16
+ ddeutil/workflow/utils.py,sha256=Hqdh6I3OD4N0Kuehn9qWVOg54pO3BG3rgIacCrN7jqk,12298
17
+ ddeutil/workflow/workflow.py,sha256=1db85LamgwFlYtOJv6ghd3WLREfsP4zQpNOlPXbeuy4,43124
18
18
  ddeutil/workflow/api/__init__.py,sha256=5DzYL3ngceoRshh5HYCSVWChqNJSiP01E1bEd8XxPi0,4799
19
19
  ddeutil/workflow/api/log_conf.py,sha256=WfS3udDLSyrP-C80lWOvxxmhd_XWKvQPkwDqKblcH3E,1834
20
20
  ddeutil/workflow/api/routes/__init__.py,sha256=JRaJZB0D6mgR17MbZo8yLtdYDtD62AA8MdKlFqhG84M,420
@@ -27,9 +27,9 @@ ddeutil/workflow/plugins/providers/aws.py,sha256=61uIFBEWt-_D5Sui24qUPier1Hiqlw_
27
27
  ddeutil/workflow/plugins/providers/az.py,sha256=o3dh011lEtmr7-d7FPZJPgXdT0ytFzKfc5xnVxSyXGU,34867
28
28
  ddeutil/workflow/plugins/providers/container.py,sha256=DSN0RWxMjTJN5ANheeMauDaPa3X6Z2E1eGUcctYkENw,22134
29
29
  ddeutil/workflow/plugins/providers/gcs.py,sha256=KgAOdMBvdbMLTH_z_FwVriBFtZfKEYx8_34jzUOVjTY,27460
30
- ddeutil_workflow-0.0.82.dist-info/licenses/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
31
- ddeutil_workflow-0.0.82.dist-info/METADATA,sha256=lB4PBeyneqHLwG0S4Lt_Xz8eCz4Lxn2bkDDHIFcBpxE,16087
32
- ddeutil_workflow-0.0.82.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
33
- ddeutil_workflow-0.0.82.dist-info/entry_points.txt,sha256=qDTpPSauL0ciO6T4iSVt8bJeYrVEkkoEEw_RlGx6Kgk,63
34
- ddeutil_workflow-0.0.82.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
35
- ddeutil_workflow-0.0.82.dist-info/RECORD,,
30
+ ddeutil_workflow-0.0.84.dist-info/licenses/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
31
+ ddeutil_workflow-0.0.84.dist-info/METADATA,sha256=3GFTGHqYEocyCDqwG7-RcaVfrJ61Es9iiC_tP3Aiy04,16087
32
+ ddeutil_workflow-0.0.84.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
33
+ ddeutil_workflow-0.0.84.dist-info/entry_points.txt,sha256=qDTpPSauL0ciO6T4iSVt8bJeYrVEkkoEEw_RlGx6Kgk,63
34
+ ddeutil_workflow-0.0.84.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
35
+ ddeutil_workflow-0.0.84.dist-info/RECORD,,