ddeutil-workflow 0.0.71__tar.gz → 0.0.72__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ddeutil_workflow-0.0.71/src/ddeutil_workflow.egg-info → ddeutil_workflow-0.0.72}/PKG-INFO +4 -4
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/pyproject.toml +3 -3
- ddeutil_workflow-0.0.72/src/ddeutil/workflow/__about__.py +1 -0
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/src/ddeutil/workflow/cli.py +19 -3
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/src/ddeutil/workflow/conf.py +19 -9
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/src/ddeutil/workflow/event.py +1 -1
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/src/ddeutil/workflow/utils.py +13 -1
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/src/ddeutil/workflow/workflow.py +1 -1
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72/src/ddeutil_workflow.egg-info}/PKG-INFO +4 -4
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/src/ddeutil_workflow.egg-info/SOURCES.txt +1 -0
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/src/ddeutil_workflow.egg-info/requires.txt +3 -3
- ddeutil_workflow-0.0.72/tests/test_cli.py +15 -0
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/tests/test_conf.py +58 -27
- ddeutil_workflow-0.0.71/src/ddeutil/workflow/__about__.py +0 -1
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/LICENSE +0 -0
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/README.md +0 -0
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/setup.cfg +0 -0
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/src/ddeutil/workflow/__cron.py +0 -0
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/src/ddeutil/workflow/__init__.py +0 -0
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/src/ddeutil/workflow/__main__.py +0 -0
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/src/ddeutil/workflow/__types.py +0 -0
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/src/ddeutil/workflow/api/__init__.py +0 -0
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/src/ddeutil/workflow/api/log_conf.py +0 -0
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/src/ddeutil/workflow/api/routes/__init__.py +0 -0
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/src/ddeutil/workflow/api/routes/job.py +0 -0
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/src/ddeutil/workflow/api/routes/logs.py +0 -0
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/src/ddeutil/workflow/api/routes/workflows.py +0 -0
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/src/ddeutil/workflow/audits.py +0 -0
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/src/ddeutil/workflow/errors.py +0 -0
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/src/ddeutil/workflow/job.py +0 -0
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/src/ddeutil/workflow/params.py +0 -0
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/src/ddeutil/workflow/result.py +0 -0
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/src/ddeutil/workflow/reusables.py +0 -0
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/src/ddeutil/workflow/stages.py +0 -0
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/src/ddeutil/workflow/traces.py +0 -0
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/src/ddeutil_workflow.egg-info/dependency_links.txt +0 -0
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/src/ddeutil_workflow.egg-info/entry_points.txt +0 -0
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/src/ddeutil_workflow.egg-info/top_level.txt +0 -0
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/tests/test__cron.py +0 -0
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/tests/test__regex.py +0 -0
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/tests/test_audits.py +0 -0
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/tests/test_errors.py +0 -0
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/tests/test_event.py +0 -0
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/tests/test_job.py +0 -0
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/tests/test_job_exec.py +0 -0
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/tests/test_job_exec_strategy.py +0 -0
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/tests/test_params.py +0 -0
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/tests/test_result.py +0 -0
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/tests/test_reusables_call_tag.py +0 -0
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/tests/test_reusables_func_model.py +0 -0
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/tests/test_reusables_template.py +0 -0
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/tests/test_reusables_template_filter.py +0 -0
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/tests/test_strategy.py +0 -0
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/tests/test_traces.py +0 -0
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/tests/test_utils.py +0 -0
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/tests/test_workflow.py +0 -0
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/tests/test_workflow_exec.py +0 -0
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/tests/test_workflow_exec_job.py +0 -0
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/tests/test_workflow_release.py +0 -0
- {ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/tests/test_workflow_rerun.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: ddeutil-workflow
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.72
|
4
4
|
Summary: Lightweight workflow orchestration with YAML template
|
5
5
|
Author-email: ddeutils <korawich.anu@gmail.com>
|
6
6
|
License: MIT
|
@@ -24,9 +24,9 @@ Description-Content-Type: text/markdown
|
|
24
24
|
License-File: LICENSE
|
25
25
|
Requires-Dist: ddeutil[checksum]>=0.4.8
|
26
26
|
Requires-Dist: ddeutil-io[toml,yaml]>=0.2.14
|
27
|
-
Requires-Dist: pydantic
|
28
|
-
Requires-Dist: pydantic-extra-types
|
29
|
-
Requires-Dist: python-dotenv
|
27
|
+
Requires-Dist: pydantic<3.0.0,==2.11.5
|
28
|
+
Requires-Dist: pydantic-extra-types<3.0.0,>=2.10.4
|
29
|
+
Requires-Dist: python-dotenv>=1.1.0
|
30
30
|
Requires-Dist: typer>=0.16.0
|
31
31
|
Provides-Extra: all
|
32
32
|
Requires-Dist: fastapi<1.0.0,>=0.115.0; extra == "all"
|
@@ -27,9 +27,9 @@ requires-python = ">=3.9.13"
|
|
27
27
|
dependencies = [
|
28
28
|
"ddeutil[checksum]>=0.4.8",
|
29
29
|
"ddeutil-io[yaml,toml]>=0.2.14",
|
30
|
-
"pydantic==2.11.5",
|
31
|
-
"pydantic-extra-types
|
32
|
-
"python-dotenv
|
30
|
+
"pydantic==2.11.5,<3.0.0",
|
31
|
+
"pydantic-extra-types>=2.10.4,<3.0.0",
|
32
|
+
"python-dotenv>=1.1.0",
|
33
33
|
"typer>=0.16.0",
|
34
34
|
]
|
35
35
|
dynamic = ["version"]
|
@@ -0,0 +1 @@
|
|
1
|
+
__version__: str = "0.0.72"
|
@@ -1,3 +1,10 @@
|
|
1
|
+
# ------------------------------------------------------------------------------
|
2
|
+
# Copyright (c) 2022 Korawich Anuttra. All rights reserved.
|
3
|
+
# Licensed under the MIT License. See LICENSE in the project root for
|
4
|
+
# license information.
|
5
|
+
# ------------------------------------------------------------------------------
|
6
|
+
from __future__ import annotations
|
7
|
+
|
1
8
|
import json
|
2
9
|
from pathlib import Path
|
3
10
|
from platform import python_version
|
@@ -8,7 +15,6 @@ from pydantic import Field, TypeAdapter
|
|
8
15
|
|
9
16
|
from .__about__ import __version__
|
10
17
|
from .__types import DictData
|
11
|
-
from .api import app as fastapp
|
12
18
|
from .errors import JobError
|
13
19
|
from .event import Crontab
|
14
20
|
from .job import Job
|
@@ -46,7 +52,7 @@ def execute_job(
|
|
46
52
|
"""Job execution on the local.
|
47
53
|
|
48
54
|
Example:
|
49
|
-
... workflow-cli job --params "{
|
55
|
+
... workflow-cli job --params \"{\\\"test\\\": 1}\"
|
50
56
|
"""
|
51
57
|
try:
|
52
58
|
params_dict: dict[str, Any] = json.loads(params)
|
@@ -92,6 +98,7 @@ def api(
|
|
92
98
|
"""
|
93
99
|
import uvicorn
|
94
100
|
|
101
|
+
from .api import app as fastapp
|
95
102
|
from .api.log_conf import LOGGING_CONFIG
|
96
103
|
|
97
104
|
# LOGGING_CONFIG = {}
|
@@ -121,7 +128,7 @@ def make(
|
|
121
128
|
|
122
129
|
|
123
130
|
workflow_app = typer.Typer()
|
124
|
-
app.add_typer(workflow_app, name="
|
131
|
+
app.add_typer(workflow_app, name="workflows", help="An Only Workflow CLI.")
|
125
132
|
|
126
133
|
|
127
134
|
@workflow_app.callback()
|
@@ -180,5 +187,14 @@ def workflow_json_schema(
|
|
180
187
|
json.dump(template_schema | json_schema, f, indent=2)
|
181
188
|
|
182
189
|
|
190
|
+
log_app = typer.Typer()
|
191
|
+
app.add_typer(log_app, name="logs", help="An Only Log CLI.")
|
192
|
+
|
193
|
+
|
194
|
+
@log_app.callback()
|
195
|
+
def log_callback():
|
196
|
+
"""Manage Only Log CLI."""
|
197
|
+
|
198
|
+
|
183
199
|
if __name__ == "__main__":
|
184
200
|
app()
|
@@ -9,7 +9,6 @@ import copy
|
|
9
9
|
import os
|
10
10
|
from collections.abc import Iterator
|
11
11
|
from functools import cached_property
|
12
|
-
from inspect import isclass
|
13
12
|
from pathlib import Path
|
14
13
|
from typing import Final, Optional, TypeVar, Union
|
15
14
|
from zoneinfo import ZoneInfo
|
@@ -20,6 +19,7 @@ from ddeutil.io.paths import glob_files, is_ignored, read_ignore
|
|
20
19
|
from pydantic import SecretStr
|
21
20
|
|
22
21
|
from .__types import DictData
|
22
|
+
from .utils import obj_name
|
23
23
|
|
24
24
|
T = TypeVar("T")
|
25
25
|
PREFIX: Final[str] = "WORKFLOW"
|
@@ -192,6 +192,7 @@ class YamlParser:
|
|
192
192
|
path: Optional[Union[str, Path]] = None,
|
193
193
|
externals: DictData | None = None,
|
194
194
|
extras: DictData | None = None,
|
195
|
+
obj: Optional[Union[object, str]] = None,
|
195
196
|
) -> None:
|
196
197
|
self.path: Path = Path(dynamic("conf_path", f=path, extras=extras))
|
197
198
|
self.externals: DictData = externals or {}
|
@@ -201,6 +202,7 @@ class YamlParser:
|
|
201
202
|
path=path,
|
202
203
|
paths=self.extras.get("conf_paths"),
|
203
204
|
extras=extras,
|
205
|
+
obj=obj,
|
204
206
|
)
|
205
207
|
|
206
208
|
# VALIDATE: check the data that reading should not empty.
|
@@ -218,6 +220,7 @@ class YamlParser:
|
|
218
220
|
*,
|
219
221
|
path: Optional[Path] = None,
|
220
222
|
paths: Optional[list[Path]] = None,
|
223
|
+
obj: Optional[Union[object, str]] = None,
|
221
224
|
extras: Optional[DictData] = None,
|
222
225
|
) -> DictData:
|
223
226
|
"""Find data with specific key and return the latest modify date data if
|
@@ -226,6 +229,7 @@ class YamlParser:
|
|
226
229
|
:param name: (str) A name of data that want to find.
|
227
230
|
:param path: (Path) A config path object.
|
228
231
|
:param paths: (list[Path]) A list of config path object.
|
232
|
+
:param obj:
|
229
233
|
:param extras: (DictData) An extra parameter that use to override core
|
230
234
|
config values.
|
231
235
|
|
@@ -243,6 +247,8 @@ class YamlParser:
|
|
243
247
|
paths.append(path)
|
244
248
|
|
245
249
|
all_data: list[tuple[float, DictData]] = []
|
250
|
+
obj_type: Optional[str] = obj_name(obj)
|
251
|
+
|
246
252
|
for path in paths:
|
247
253
|
for file in glob_files(path):
|
248
254
|
|
@@ -250,14 +256,19 @@ class YamlParser:
|
|
250
256
|
continue
|
251
257
|
|
252
258
|
if data := cls.filter_yaml(file, name=name):
|
253
|
-
|
259
|
+
if not obj_type:
|
260
|
+
all_data.append((file.lstat().st_mtime, data))
|
261
|
+
elif data.get("type", "") == obj_type:
|
262
|
+
all_data.append((file.lstat().st_mtime, data))
|
263
|
+
else:
|
264
|
+
continue
|
254
265
|
|
255
266
|
return {} if not all_data else max(all_data, key=lambda x: x[0])[1]
|
256
267
|
|
257
268
|
@classmethod
|
258
269
|
def finds(
|
259
270
|
cls,
|
260
|
-
obj: object,
|
271
|
+
obj: Union[object, str],
|
261
272
|
*,
|
262
273
|
path: Optional[Path] = None,
|
263
274
|
paths: Optional[list[Path]] = None,
|
@@ -268,8 +279,8 @@ class YamlParser:
|
|
268
279
|
method can use include and exclude list of identity name for filter and
|
269
280
|
adds-on.
|
270
281
|
|
271
|
-
:param obj: (object) An object that want to validate matching
|
272
|
-
return.
|
282
|
+
:param obj: (object | str) An object that want to validate matching
|
283
|
+
before return.
|
273
284
|
:param path: (Path) A config path object.
|
274
285
|
:param paths: (list[Path]) A list of config path object.
|
275
286
|
:param excluded: An included list of data key that want to filter from
|
@@ -292,6 +303,8 @@ class YamlParser:
|
|
292
303
|
paths.append(path)
|
293
304
|
|
294
305
|
all_data: dict[str, list[tuple[float, DictData]]] = {}
|
306
|
+
obj_type: str = obj_name(obj)
|
307
|
+
|
295
308
|
for path in paths:
|
296
309
|
for file in glob_files(path):
|
297
310
|
|
@@ -303,10 +316,7 @@ class YamlParser:
|
|
303
316
|
if key in excluded:
|
304
317
|
continue
|
305
318
|
|
306
|
-
if (
|
307
|
-
data.get("type", "")
|
308
|
-
== (obj if isclass(obj) else obj.__class__).__name__
|
309
|
-
):
|
319
|
+
if data.get("type", "") == obj_type:
|
310
320
|
marking: tuple[float, DictData] = (
|
311
321
|
file.lstat().st_mtime,
|
312
322
|
data,
|
@@ -139,7 +139,7 @@ class Crontab(BaseModel):
|
|
139
139
|
:rtype: Self
|
140
140
|
"""
|
141
141
|
extras: DictData = extras or {}
|
142
|
-
loader: YamlParser = YamlParser(name, extras=extras)
|
142
|
+
loader: YamlParser = YamlParser(name, extras=extras, obj=cls)
|
143
143
|
|
144
144
|
# NOTE: Validate the config type match with current connection model
|
145
145
|
if loader.type != cls.__name__:
|
@@ -11,7 +11,7 @@ import time
|
|
11
11
|
from collections.abc import Iterator
|
12
12
|
from datetime import date, datetime, timedelta
|
13
13
|
from hashlib import md5
|
14
|
-
from inspect import isfunction
|
14
|
+
from inspect import isclass, isfunction
|
15
15
|
from itertools import product
|
16
16
|
from pathlib import Path
|
17
17
|
from random import randrange
|
@@ -303,3 +303,15 @@ def dump_all(
|
|
303
303
|
elif isinstance(value, BaseModel):
|
304
304
|
return value.model_dump(by_alias=by_alias)
|
305
305
|
return value
|
306
|
+
|
307
|
+
|
308
|
+
def obj_name(obj: Optional[Union[str, object]] = None) -> Optional[str]:
|
309
|
+
if not obj:
|
310
|
+
obj_type: Optional[str] = None
|
311
|
+
elif isinstance(obj, str):
|
312
|
+
obj_type: str = obj
|
313
|
+
elif isclass(obj):
|
314
|
+
obj_type: str = obj.__name__
|
315
|
+
else:
|
316
|
+
obj_type: str = obj.__class__.__name__
|
317
|
+
return obj_type
|
@@ -126,7 +126,7 @@ class Workflow(BaseModel):
|
|
126
126
|
|
127
127
|
:rtype: Self
|
128
128
|
"""
|
129
|
-
load: YamlParser = YamlParser(name, path=path, extras=extras)
|
129
|
+
load: YamlParser = YamlParser(name, path=path, extras=extras, obj=cls)
|
130
130
|
|
131
131
|
# NOTE: Validate the config type match with current connection model
|
132
132
|
if load.type != cls.__name__:
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: ddeutil-workflow
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.72
|
4
4
|
Summary: Lightweight workflow orchestration with YAML template
|
5
5
|
Author-email: ddeutils <korawich.anu@gmail.com>
|
6
6
|
License: MIT
|
@@ -24,9 +24,9 @@ Description-Content-Type: text/markdown
|
|
24
24
|
License-File: LICENSE
|
25
25
|
Requires-Dist: ddeutil[checksum]>=0.4.8
|
26
26
|
Requires-Dist: ddeutil-io[toml,yaml]>=0.2.14
|
27
|
-
Requires-Dist: pydantic
|
28
|
-
Requires-Dist: pydantic-extra-types
|
29
|
-
Requires-Dist: python-dotenv
|
27
|
+
Requires-Dist: pydantic<3.0.0,==2.11.5
|
28
|
+
Requires-Dist: pydantic-extra-types<3.0.0,>=2.10.4
|
29
|
+
Requires-Dist: python-dotenv>=1.1.0
|
30
30
|
Requires-Dist: typer>=0.16.0
|
31
31
|
Provides-Extra: all
|
32
32
|
Requires-Dist: fastapi<1.0.0,>=0.115.0; extra == "all"
|
@@ -0,0 +1,15 @@
|
|
1
|
+
import pytest
|
2
|
+
from ddeutil.workflow.cli import app
|
3
|
+
from typer.testing import CliRunner
|
4
|
+
|
5
|
+
|
6
|
+
@pytest.fixture(scope="module")
|
7
|
+
def runner() -> CliRunner:
|
8
|
+
return CliRunner()
|
9
|
+
|
10
|
+
|
11
|
+
def test_app(runner: CliRunner):
|
12
|
+
result = runner.invoke(app, ["version"])
|
13
|
+
assert result.exit_code == 0
|
14
|
+
assert "ddeutil-workflow==" in result.output
|
15
|
+
assert "python-version==" in result.output
|
@@ -76,13 +76,22 @@ def test_load_file(target_path: Path):
|
|
76
76
|
"env": "Asia/Bangkok",
|
77
77
|
}
|
78
78
|
|
79
|
+
load = YamlParser(
|
80
|
+
"test_load_file", extras={"conf_paths": [target_path]}, obj="Workflow"
|
81
|
+
)
|
82
|
+
assert load.data == {
|
83
|
+
"type": "Workflow",
|
84
|
+
"desc": "Test multi config path",
|
85
|
+
"env": "${WORKFLOW_CORE_TIMEZONE}",
|
86
|
+
}
|
87
|
+
|
79
88
|
# NOTE: Raise because passing `conf_paths` invalid type.
|
80
89
|
with pytest.raises(TypeError):
|
81
90
|
YamlParser("test_load_file", extras={"conf_paths": target_path})
|
82
91
|
|
83
92
|
|
84
93
|
def test_load_file_finds(target_path: Path):
|
85
|
-
dummy_file: Path = target_path / "
|
94
|
+
dummy_file: Path = target_path / "01_test_simple_file.yaml"
|
86
95
|
with dummy_file.open(mode="w") as f:
|
87
96
|
yaml.dump(
|
88
97
|
{
|
@@ -90,7 +99,7 @@ def test_load_file_finds(target_path: Path):
|
|
90
99
|
"type": "Config",
|
91
100
|
"foo": "bar",
|
92
101
|
},
|
93
|
-
"test_load_file": {"type": "Workflow"},
|
102
|
+
"test_load_file": {"type": "Workflow", "data": "foo"},
|
94
103
|
},
|
95
104
|
f,
|
96
105
|
)
|
@@ -102,6 +111,7 @@ def test_load_file_finds(target_path: Path):
|
|
102
111
|
{"type": "Config", "foo": "bar"},
|
103
112
|
)
|
104
113
|
] == list(YamlParser.finds(Config, path=config.conf_path))
|
114
|
+
|
105
115
|
assert [] == list(
|
106
116
|
YamlParser.finds(
|
107
117
|
Config,
|
@@ -110,6 +120,48 @@ def test_load_file_finds(target_path: Path):
|
|
110
120
|
)
|
111
121
|
)
|
112
122
|
|
123
|
+
# NOTE: Create duplicate data with the first order by filename.
|
124
|
+
dummy_file_dup: Path = target_path / "00_test_simple_file_duplicate.yaml"
|
125
|
+
with dummy_file_dup.open(mode="w") as f:
|
126
|
+
yaml.dump(
|
127
|
+
{"test_load_file": {"type": "Workflow", "data": "bar"}},
|
128
|
+
f,
|
129
|
+
)
|
130
|
+
|
131
|
+
assert [
|
132
|
+
(
|
133
|
+
"test_load_file",
|
134
|
+
{"type": "Workflow", "data": "bar"},
|
135
|
+
),
|
136
|
+
] == list(YamlParser.finds("Workflow", path=target_path))
|
137
|
+
|
138
|
+
dummy_file_dup.unlink()
|
139
|
+
|
140
|
+
# NOTE: Create duplicate data with the first order by filename.
|
141
|
+
dummy_file_dup: Path = target_path / "00_test_simple_file_duplicate.yaml"
|
142
|
+
with dummy_file_dup.open(mode="w") as f:
|
143
|
+
yaml.dump(
|
144
|
+
{"test_load_file": {"type": "Config", "data": "bar"}},
|
145
|
+
f,
|
146
|
+
)
|
147
|
+
|
148
|
+
assert [
|
149
|
+
(
|
150
|
+
"test_load_file",
|
151
|
+
{"type": "Workflow", "data": "foo"},
|
152
|
+
),
|
153
|
+
] == list(YamlParser.finds("Workflow", path=target_path))
|
154
|
+
|
155
|
+
load = YamlParser.find("test_load_file", path=target_path, obj="Workflow")
|
156
|
+
assert load == {"type": "Workflow", "data": "foo"}
|
157
|
+
|
158
|
+
# NOTE: Load with the same name, but it set different type.
|
159
|
+
load = YamlParser.find("test_load_file", path=target_path, obj="Config")
|
160
|
+
assert load == {"type": "Config", "data": "bar"}
|
161
|
+
|
162
|
+
load = YamlParser.find("test_load_file", path=target_path, obj="Crontab")
|
163
|
+
assert load == {}
|
164
|
+
|
113
165
|
dummy_file.unlink()
|
114
166
|
|
115
167
|
|
@@ -117,12 +169,7 @@ def test_load_file_finds_raise(target_path: Path):
|
|
117
169
|
dummy_file: Path = target_path / "test_simple_file_raise.yaml"
|
118
170
|
with dummy_file.open(mode="w") as f:
|
119
171
|
yaml.dump(
|
120
|
-
{
|
121
|
-
"test_load_file_config": {
|
122
|
-
"foo": "bar",
|
123
|
-
},
|
124
|
-
"test_load_file": {"type": "Workflow"},
|
125
|
-
},
|
172
|
+
{"test_load_file": {"type": "Workflow"}},
|
126
173
|
f,
|
127
174
|
)
|
128
175
|
|
@@ -130,27 +177,11 @@ def test_load_file_finds_raise(target_path: Path):
|
|
130
177
|
with pytest.raises(ValueError):
|
131
178
|
_ = YamlParser("test_load_file_config", path=config.conf_path).type
|
132
179
|
|
133
|
-
|
134
|
-
|
135
|
-
|
136
|
-
target_p = test_path / "test_schedule_conf"
|
137
|
-
target_p.mkdir(exist_ok=True)
|
138
|
-
|
139
|
-
with (target_p / "test_schedule_conf.yaml").open(mode="w") as f:
|
140
|
-
yaml.dump(
|
141
|
-
{
|
142
|
-
"schedule-wf": {
|
143
|
-
"type": "Schedule",
|
144
|
-
"desc": "Test multi config path",
|
145
|
-
}
|
146
|
-
},
|
147
|
-
f,
|
180
|
+
assert (
|
181
|
+
YamlParser("test_load_file", path=config.conf_path).type
|
182
|
+
== "Workflow"
|
148
183
|
)
|
149
184
|
|
150
|
-
yield target_p
|
151
|
-
|
152
|
-
shutil.rmtree(target_p)
|
153
|
-
|
154
185
|
|
155
186
|
def test_dynamic():
|
156
187
|
conf = dynamic("audit_path", extras={"audit_path": Path("/extras-audits")})
|
@@ -1 +0,0 @@
|
|
1
|
-
__version__: str = "0.0.71"
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/src/ddeutil/workflow/api/routes/__init__.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
{ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/src/ddeutil/workflow/api/routes/workflows.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/src/ddeutil_workflow.egg-info/entry_points.txt
RENAMED
File without changes
|
{ddeutil_workflow-0.0.71 → ddeutil_workflow-0.0.72}/src/ddeutil_workflow.egg-info/top_level.txt
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|