ddeutil-workflow 0.0.1__tar.gz → 0.0.3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.3}/PKG-INFO +35 -20
  2. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.3}/README.md +32 -16
  3. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.3}/pyproject.toml +12 -8
  4. ddeutil_workflow-0.0.3/src/ddeutil/workflow/__about__.py +1 -0
  5. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.3}/src/ddeutil/workflow/__types.py +1 -0
  6. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.3}/src/ddeutil/workflow/conn.py +33 -28
  7. ddeutil_workflow-0.0.3/src/ddeutil/workflow/exceptions.py +12 -0
  8. ddeutil_workflow-0.0.3/src/ddeutil/workflow/loader.py +174 -0
  9. ddeutil_workflow-0.0.3/src/ddeutil/workflow/pipeline.py +492 -0
  10. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.3}/src/ddeutil/workflow/schedule.py +10 -15
  11. ddeutil_workflow-0.0.3/src/ddeutil/workflow/tasks/__init__.py +6 -0
  12. ddeutil_workflow-0.0.3/src/ddeutil/workflow/tasks/_pandas.py +54 -0
  13. ddeutil_workflow-0.0.3/src/ddeutil/workflow/tasks/_polars.py +92 -0
  14. ddeutil_workflow-0.0.3/src/ddeutil/workflow/utils.py +180 -0
  15. ddeutil_workflow-0.0.3/src/ddeutil/workflow/vendors/__dataset.py +127 -0
  16. ddeutil_workflow-0.0.3/src/ddeutil/workflow/vendors/pd.py +13 -0
  17. ddeutil_workflow-0.0.3/src/ddeutil/workflow/vendors/pg.py +11 -0
  18. ddeutil_workflow-0.0.1/src/ddeutil/workflow/dataset.py → ddeutil_workflow-0.0.3/src/ddeutil/workflow/vendors/pl.py +4 -138
  19. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.3}/src/ddeutil_workflow.egg-info/PKG-INFO +35 -20
  20. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.3}/src/ddeutil_workflow.egg-info/SOURCES.txt +12 -10
  21. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.3}/src/ddeutil_workflow.egg-info/requires.txt +2 -3
  22. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.3}/tests/test_conn.py +8 -9
  23. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.3}/tests/test_dataset.py +7 -5
  24. ddeutil_workflow-0.0.3/tests/test_loader.py +6 -0
  25. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.3}/tests/test_pipeline.py +1 -1
  26. ddeutil_workflow-0.0.3/tests/test_pipeline_matrix.py +29 -0
  27. ddeutil_workflow-0.0.3/tests/test_pipeline_params.py +12 -0
  28. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.3}/tests/test_pipeline_run.py +11 -9
  29. ddeutil_workflow-0.0.3/tests/test_pipeline_task.py +80 -0
  30. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.3}/tests/test_schedule.py +4 -31
  31. ddeutil_workflow-0.0.1/src/ddeutil/workflow/__about__.py +0 -1
  32. ddeutil_workflow-0.0.1/src/ddeutil/workflow/exceptions.py +0 -82
  33. ddeutil_workflow-0.0.1/src/ddeutil/workflow/hooks/__init__.py +0 -9
  34. ddeutil_workflow-0.0.1/src/ddeutil/workflow/hooks/_postgres.py +0 -2
  35. ddeutil_workflow-0.0.1/src/ddeutil/workflow/loader.py +0 -310
  36. ddeutil_workflow-0.0.1/src/ddeutil/workflow/pipeline.py +0 -338
  37. ddeutil_workflow-0.0.1/src/ddeutil/workflow/tasks/__init__.py +0 -10
  38. ddeutil_workflow-0.0.1/src/ddeutil/workflow/tasks/_polars.py +0 -41
  39. ddeutil_workflow-0.0.1/src/ddeutil/workflow/utils/receive.py +0 -33
  40. ddeutil_workflow-0.0.1/src/ddeutil/workflow/utils/selection.py +0 -2
  41. ddeutil_workflow-0.0.1/tests/test_loader.py +0 -51
  42. ddeutil_workflow-0.0.1/tests/test_loader_simple.py +0 -89
  43. ddeutil_workflow-0.0.1/tests/test_pipeline_task.py +0 -11
  44. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.3}/LICENSE +0 -0
  45. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.3}/setup.cfg +0 -0
  46. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.3}/src/ddeutil/workflow/__init__.py +0 -0
  47. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.3}/src/ddeutil/workflow/__regex.py +0 -0
  48. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.3}/src/ddeutil/workflow/vendors/__dict.py +0 -0
  49. {ddeutil_workflow-0.0.1/src/ddeutil/workflow/utils → ddeutil_workflow-0.0.3/src/ddeutil/workflow/vendors}/__init__.py +0 -0
  50. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.3}/src/ddeutil/workflow/vendors/__schedule.py +0 -0
  51. /ddeutil_workflow-0.0.1/src/ddeutil/workflow/vendors/aws_warpped.py → /ddeutil_workflow-0.0.3/src/ddeutil/workflow/vendors/aws.py +0 -0
  52. /ddeutil_workflow-0.0.1/src/ddeutil/workflow/vendors/__init__.py → /ddeutil_workflow-0.0.3/src/ddeutil/workflow/vendors/az.py +0 -0
  53. /ddeutil_workflow-0.0.1/src/ddeutil/workflow/vendors/minio_warpped.py → /ddeutil_workflow-0.0.3/src/ddeutil/workflow/vendors/minio.py +0 -0
  54. /ddeutil_workflow-0.0.1/src/ddeutil/workflow/vendors/sftp_wrapped.py → /ddeutil_workflow-0.0.3/src/ddeutil/workflow/vendors/sftp.py +0 -0
  55. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.3}/src/ddeutil_workflow.egg-info/dependency_links.txt +0 -0
  56. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.3}/src/ddeutil_workflow.egg-info/top_level.txt +0 -0
  57. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.3}/tests/test_base_data.py +0 -0
  58. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.3}/tests/test_base_local_and_global.py +0 -0
  59. {ddeutil_workflow-0.0.1 → ddeutil_workflow-0.0.3}/tests/test_base_regex.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.1
3
+ Version: 0.0.3
4
4
  Summary: Data Developer & Engineer Workflow Utility Objects
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
@@ -23,15 +23,14 @@ Description-Content-Type: text/markdown
23
23
  License-File: LICENSE
24
24
  Requires-Dist: fmtutil
25
25
  Requires-Dist: ddeutil-io
26
- Requires-Dist: ddeutil-model
27
26
  Requires-Dist: python-dotenv
28
27
  Provides-Extra: test
29
28
  Requires-Dist: sqlalchemy==2.0.30; extra == "test"
30
29
  Requires-Dist: paramiko==3.4.0; extra == "test"
31
30
  Requires-Dist: sshtunnel==0.4.0; extra == "test"
32
- Requires-Dist: boto3==1.34.109; extra == "test"
31
+ Requires-Dist: boto3==1.34.117; extra == "test"
33
32
  Requires-Dist: fsspec==2024.5.0; extra == "test"
34
- Requires-Dist: polars==0.20.26; extra == "test"
33
+ Requires-Dist: polars==0.20.31; extra == "test"
35
34
  Requires-Dist: pyarrow==16.1.0; extra == "test"
36
35
 
37
36
  # Data Utility: _Workflow_
@@ -83,6 +82,15 @@ The first step, you should start create the connections and datasets for In and
83
82
  Out of you data that want to use in pipeline of workflow. Some of this component
84
83
  is similar component of the **Airflow** because I like it concepts.
85
84
 
85
+ The main feature of this project is the `Pipeline` object that can call any
86
+ registries function. The pipeline can handle everything that you want to do, it
87
+ will passing parameters and catching the output for re-use it to next step.
88
+
89
+ > [!IMPORTANT]
90
+ > In the future of this project, I will drop the connection and dataset to
91
+ > dynamic registries instead of main features because it have a lot of maintain
92
+ > vendor codes and deps. (I do not have time to handle this features)
93
+
86
94
  ### Connection
87
95
 
88
96
  The connection for worker able to do any thing.
@@ -102,7 +110,9 @@ assert conn.ping()
102
110
 
103
111
  ### Dataset
104
112
 
105
- The dataset is define any objects on the connection.
113
+ The dataset is define any objects on the connection. This feature was implemented
114
+ on `/vendors` because it has a lot of tools that can interact with any data systems
115
+ in the data tool stacks.
106
116
 
107
117
  ```yaml
108
118
  ds_postgres_customer_tbl:
@@ -114,7 +124,7 @@ ds_postgres_customer_tbl:
114
124
  ```
115
125
 
116
126
  ```python
117
- from ddeutil.workflow.dataset import PostgresTbl
127
+ from ddeutil.workflow.vendors.pg import PostgresTbl
118
128
 
119
129
  dataset = PostgresTbl.from_loader(name='ds_postgres_customer_tbl', externals={})
120
130
  assert dataset.exists()
@@ -124,14 +134,14 @@ assert dataset.exists()
124
134
 
125
135
  ```yaml
126
136
  schd_for_node:
127
- type: schedule.Scdl
137
+ type: schedule.Schedule
128
138
  cron: "*/5 * * * *"
129
139
  ```
130
140
 
131
141
  ```python
132
- from ddeutil.workflow.schedule import Scdl
142
+ from ddeutil.workflow.schedule import Schedule
133
143
 
134
- scdl = Scdl.from_loader(name='schd_for_node', externals={})
144
+ scdl = Schedule.from_loader(name='schd_for_node', externals={})
135
145
  assert '*/5 * * * *' == str(scdl.cronjob)
136
146
 
137
147
  cron_iterate = scdl.generate('2022-01-01 00:00:00')
@@ -155,8 +165,10 @@ The state of doing lists that worker should to do. It be collection of the stage
155
165
  run_py_local:
156
166
  type: ddeutil.workflow.pipe.Pipeline
157
167
  params:
158
- author-run: utils.receive.string
159
- run-date: utils.receive.datetime
168
+ author-run:
169
+ type: str
170
+ run-date:
171
+ type: datetime
160
172
  jobs:
161
173
  first-job:
162
174
  stages:
@@ -203,13 +215,15 @@ pipe.execute(params={'author-run': 'Local Workflow', 'run-date': '2024-01-01'})
203
215
  pipe_el_pg_to_lake:
204
216
  type: ddeutil.workflow.pipe.Pipeline
205
217
  params:
206
- run-date: utils.receive.datetime
207
- author-email: utils.receive.string
218
+ run-date:
219
+ type: datetime
220
+ author-email:
221
+ type: str
208
222
  jobs:
209
223
  extract-load:
210
224
  stages:
211
225
  - name: "Extract Load from Postgres to Lake"
212
- id: extract
226
+ id: extract-load
213
227
  task: tasks/postgres-to-delta@polars
214
228
  with:
215
229
  source:
@@ -222,21 +236,22 @@ pipe_el_pg_to_lake:
222
236
  endpoint: "/${{ params.name }}"
223
237
  ```
224
238
 
225
- ### Hooks (Transform)
239
+ ### Tasks (Transform)
226
240
 
227
241
  ```yaml
228
242
  pipe_hook_mssql_proc:
229
243
  type: ddeutil.workflow.pipe.Pipeline
230
244
  params:
231
- run_date: utils.receive.datetime
232
- sp_name: utils.receive.string
233
- source_name: utils.receive.string
234
- target_name: utils.receive.string
245
+ run_date: datetime
246
+ sp_name: str
247
+ source_name: str
248
+ target_name: str
235
249
  jobs:
236
250
  transform:
237
251
  stages:
238
252
  - name: "Transform Data in MS SQL Server"
239
- hook: hooks/mssql-proc@odbc
253
+ id: transform
254
+ task: tasks/mssql-proc@odbc
240
255
  with:
241
256
  exec: ${{ params.sp_name }}
242
257
  params:
@@ -47,6 +47,15 @@ The first step, you should start create the connections and datasets for In and
47
47
  Out of you data that want to use in pipeline of workflow. Some of this component
48
48
  is similar component of the **Airflow** because I like it concepts.
49
49
 
50
+ The main feature of this project is the `Pipeline` object that can call any
51
+ registries function. The pipeline can handle everything that you want to do, it
52
+ will passing parameters and catching the output for re-use it to next step.
53
+
54
+ > [!IMPORTANT]
55
+ > In the future of this project, I will drop the connection and dataset to
56
+ > dynamic registries instead of main features because it have a lot of maintain
57
+ > vendor codes and deps. (I do not have time to handle this features)
58
+
50
59
  ### Connection
51
60
 
52
61
  The connection for worker able to do any thing.
@@ -66,7 +75,9 @@ assert conn.ping()
66
75
 
67
76
  ### Dataset
68
77
 
69
- The dataset is define any objects on the connection.
78
+ The dataset is define any objects on the connection. This feature was implemented
79
+ on `/vendors` because it has a lot of tools that can interact with any data systems
80
+ in the data tool stacks.
70
81
 
71
82
  ```yaml
72
83
  ds_postgres_customer_tbl:
@@ -78,7 +89,7 @@ ds_postgres_customer_tbl:
78
89
  ```
79
90
 
80
91
  ```python
81
- from ddeutil.workflow.dataset import PostgresTbl
92
+ from ddeutil.workflow.vendors.pg import PostgresTbl
82
93
 
83
94
  dataset = PostgresTbl.from_loader(name='ds_postgres_customer_tbl', externals={})
84
95
  assert dataset.exists()
@@ -88,14 +99,14 @@ assert dataset.exists()
88
99
 
89
100
  ```yaml
90
101
  schd_for_node:
91
- type: schedule.Scdl
102
+ type: schedule.Schedule
92
103
  cron: "*/5 * * * *"
93
104
  ```
94
105
 
95
106
  ```python
96
- from ddeutil.workflow.schedule import Scdl
107
+ from ddeutil.workflow.schedule import Schedule
97
108
 
98
- scdl = Scdl.from_loader(name='schd_for_node', externals={})
109
+ scdl = Schedule.from_loader(name='schd_for_node', externals={})
99
110
  assert '*/5 * * * *' == str(scdl.cronjob)
100
111
 
101
112
  cron_iterate = scdl.generate('2022-01-01 00:00:00')
@@ -119,8 +130,10 @@ The state of doing lists that worker should to do. It be collection of the stage
119
130
  run_py_local:
120
131
  type: ddeutil.workflow.pipe.Pipeline
121
132
  params:
122
- author-run: utils.receive.string
123
- run-date: utils.receive.datetime
133
+ author-run:
134
+ type: str
135
+ run-date:
136
+ type: datetime
124
137
  jobs:
125
138
  first-job:
126
139
  stages:
@@ -167,13 +180,15 @@ pipe.execute(params={'author-run': 'Local Workflow', 'run-date': '2024-01-01'})
167
180
  pipe_el_pg_to_lake:
168
181
  type: ddeutil.workflow.pipe.Pipeline
169
182
  params:
170
- run-date: utils.receive.datetime
171
- author-email: utils.receive.string
183
+ run-date:
184
+ type: datetime
185
+ author-email:
186
+ type: str
172
187
  jobs:
173
188
  extract-load:
174
189
  stages:
175
190
  - name: "Extract Load from Postgres to Lake"
176
- id: extract
191
+ id: extract-load
177
192
  task: tasks/postgres-to-delta@polars
178
193
  with:
179
194
  source:
@@ -186,21 +201,22 @@ pipe_el_pg_to_lake:
186
201
  endpoint: "/${{ params.name }}"
187
202
  ```
188
203
 
189
- ### Hooks (Transform)
204
+ ### Tasks (Transform)
190
205
 
191
206
  ```yaml
192
207
  pipe_hook_mssql_proc:
193
208
  type: ddeutil.workflow.pipe.Pipeline
194
209
  params:
195
- run_date: utils.receive.datetime
196
- sp_name: utils.receive.string
197
- source_name: utils.receive.string
198
- target_name: utils.receive.string
210
+ run_date: datetime
211
+ sp_name: str
212
+ source_name: str
213
+ target_name: str
199
214
  jobs:
200
215
  transform:
201
216
  stages:
202
217
  - name: "Transform Data in MS SQL Server"
203
- hook: hooks/mssql-proc@odbc
218
+ id: transform
219
+ task: tasks/mssql-proc@odbc
204
220
  with:
205
221
  exec: ${{ params.sp_name }}
206
222
  params:
@@ -28,7 +28,6 @@ requires-python = ">=3.9.13"
28
28
  dependencies = [
29
29
  "fmtutil",
30
30
  "ddeutil-io",
31
- "ddeutil-model",
32
31
  "python-dotenv",
33
32
  ]
34
33
  dynamic = ["version"]
@@ -40,15 +39,15 @@ Homepage = "https://github.com/ddeutils/ddeutil-workflow/"
40
39
  [project.optional-dependencies]
41
40
  test = [
42
41
  "sqlalchemy==2.0.30",
43
- # SFTP warpper
42
+ # SFTP
44
43
  "paramiko==3.4.0",
45
44
  "sshtunnel==0.4.0",
46
- # AWS
47
- "boto3==1.34.109",
45
+ # AWS Client
46
+ "boto3==1.34.117",
48
47
  # Open files
49
48
  "fsspec==2024.5.0",
50
49
  # Polars
51
- "polars==0.20.26",
50
+ "polars==0.20.31",
52
51
  "pyarrow==16.1.0",
53
52
  ]
54
53
 
@@ -80,6 +79,10 @@ addopts = [
80
79
  "--strict-markers",
81
80
  ]
82
81
  filterwarnings = ["error"]
82
+ log_cli = true
83
+ log_cli_level = "INFO"
84
+ log_cli_format = "%(asctime)s [%(levelname)8s] %(message)s (%(filename)s:%(lineno)s)"
85
+ log_cli_date_format = "%Y-%m-%d %H:%M:%S"
83
86
 
84
87
  [tool.black]
85
88
  line-length = 80
@@ -98,7 +101,6 @@ exclude = """
98
101
  | build
99
102
  | dist
100
103
  | venv
101
- | __legacy
102
104
  )/
103
105
  )
104
106
  """
@@ -114,7 +116,6 @@ exclude = [
114
116
  "build",
115
117
  "dist",
116
118
  "venv",
117
- "__legacy",
118
119
  ]
119
120
 
120
121
  [tool.ruff.lint]
@@ -133,4 +134,7 @@ ignore = [
133
134
  ]
134
135
 
135
136
  [tool.ruff.lint.per-file-ignores]
136
- "__init__.py" = ["F401"]
137
+ "__init__.py" = [
138
+ "F401",
139
+ "F403",
140
+ ]
@@ -0,0 +1 @@
1
+ __version__: str = "0.0.3"
@@ -9,3 +9,4 @@ from typing import Any
9
9
 
10
10
  TupleStr = tuple[str, ...]
11
11
  DictData = dict[str, Any]
12
+ DictStr = dict[str, str]
@@ -10,7 +10,7 @@ from collections.abc import Iterator
10
10
  from pathlib import Path
11
11
  from typing import Annotated, Any, Literal, Optional, TypeVar
12
12
 
13
- from ddeutil.model.conn import Conn as ConnModel
13
+ from ddeutil.io.models.conn import Conn as ConnModel
14
14
  from pydantic import BaseModel, ConfigDict, Field
15
15
  from pydantic.functional_validators import field_validator
16
16
  from pydantic.types import SecretStr
@@ -43,27 +43,21 @@ class BaseConn(BaseModel):
43
43
  ]
44
44
 
45
45
  @classmethod
46
- def from_loader(
47
- cls,
48
- name: str,
49
- externals: DictData,
50
- ) -> Self:
51
- """Construct Connection with Loader object with specific config name.
46
+ def from_dict(cls, values: DictData) -> Self:
47
+ """Construct Connection Model from dict data. This construct is
48
+ different with ``.model_validate()`` because it will prepare the values
49
+ before using it if the data dose not have 'url'.
52
50
 
53
- :param name:
54
- :param externals:
51
+ :param values: A dict data that use to construct this model.
55
52
  """
56
- loader: Loader = Loader(name, externals=externals)
57
- # NOTE: Validate the config type match with current connection model
58
- if loader.type != cls:
59
- raise ValueError(f"Type {loader.type} does not match with {cls}")
53
+ # NOTE: filter out the fields of this model.
60
54
  filter_data: DictData = {
61
- k: loader.data.pop(k)
62
- for k in loader.data.copy()
55
+ k: values.pop(k)
56
+ for k in values.copy()
63
57
  if k not in cls.model_fields and k not in EXCLUDED_EXTRAS
64
58
  }
65
- if "url" in loader.data:
66
- url: ConnModel = ConnModel.from_url(loader.data.pop("url"))
59
+ if "url" in values:
60
+ url: ConnModel = ConnModel.from_url(values.pop("url"))
67
61
  return cls(
68
62
  dialect=url.dialect,
69
63
  host=url.host,
@@ -73,27 +67,38 @@ class BaseConn(BaseModel):
73
67
  # NOTE:
74
68
  # I will replace None endpoint with memory value for SQLite
75
69
  # connection string.
76
- endpoint=cls.__prepare_slash_from_url(url.endpoint or "memory"),
70
+ endpoint=(url.endpoint or "memory"),
77
71
  # NOTE: This order will show that externals this the top level.
78
- extras=(url.options | filter_data | externals),
72
+ extras=(url.options | filter_data),
79
73
  )
80
74
  return cls.model_validate(
81
75
  obj={
82
- "extras": (
83
- loader.data.pop("extras", {}) | filter_data | externals
84
- ),
85
- **loader.data,
76
+ "extras": (values.pop("extras", {}) | filter_data),
77
+ **values,
86
78
  }
87
79
  )
88
80
 
89
81
  @classmethod
90
- def __prepare_slash_from_url(cls, value: str) -> str:
91
- if value.startswith("/"):
92
- return value[1:]
93
- return value
82
+ def from_loader(cls, name: str, externals: DictData) -> Self:
83
+ """Construct Connection with Loader object with specific config name.
84
+
85
+ :param name: A config name.
86
+ :param externals: A external data that want to adding to extras.
87
+ """
88
+ loader: Loader = Loader(name, externals=externals)
89
+ # NOTE: Validate the config type match with current connection model
90
+ if loader.type != cls:
91
+ raise ValueError(f"Type {loader.type} does not match with {cls}")
92
+ return cls.from_dict(
93
+ {
94
+ "extras": (loader.data.pop("extras", {}) | externals),
95
+ **loader.data,
96
+ }
97
+ )
94
98
 
95
99
  @field_validator("endpoint")
96
100
  def __prepare_slash(cls, value: str) -> str:
101
+ """Prepare slash character that map double form URL model loading."""
97
102
  if value.startswith("//"):
98
103
  return value[1:]
99
104
  return value
@@ -146,7 +151,7 @@ class SFTP(Conn):
146
151
  dialect: Literal["sftp"] = "sftp"
147
152
 
148
153
  def __client(self):
149
- from .vendors.sftp_wrapped import WrapSFTP
154
+ from .vendors.sftp import WrapSFTP
150
155
 
151
156
  return WrapSFTP(
152
157
  host=self.host,
@@ -0,0 +1,12 @@
1
+ # ------------------------------------------------------------------------------
2
+ # Copyright (c) 2022 Korawich Anuttra. All rights reserved.
3
+ # Licensed under the MIT License. See LICENSE in the project root for
4
+ # license information.
5
+ # ------------------------------------------------------------------------------
6
+ """
7
+ Define Errors Object for Node package
8
+ """
9
+ from __future__ import annotations
10
+
11
+
12
+ class TaskException(Exception): ...
@@ -0,0 +1,174 @@
1
+ # ------------------------------------------------------------------------------
2
+ # Copyright (c) 2022 Korawich Anuttra. All rights reserved.
3
+ # Licensed under the MIT License. See LICENSE in the project root for
4
+ # license information.
5
+ # ------------------------------------------------------------------------------
6
+ from __future__ import annotations
7
+
8
+ from functools import cached_property
9
+ from typing import Any, ClassVar, TypeVar
10
+
11
+ from ddeutil.core import (
12
+ getdot,
13
+ hasdot,
14
+ import_string,
15
+ )
16
+ from ddeutil.io import (
17
+ PathData,
18
+ PathSearch,
19
+ YamlEnvFl,
20
+ )
21
+ from pydantic import BaseModel, Field
22
+ from pydantic.functional_validators import model_validator
23
+
24
+ from .__regex import RegexConf
25
+ from .__types import DictData
26
+
27
+ T = TypeVar("T")
28
+ BaseModelType = type[BaseModel]
29
+ AnyModel = TypeVar("AnyModel", bound=BaseModel)
30
+
31
+
32
+ class Engine(BaseModel):
33
+ """Engine Model"""
34
+
35
+ paths: PathData = Field(default_factory=PathData)
36
+ registry: list[str] = Field(default_factory=lambda: ["ddeutil.workflow"])
37
+
38
+ @model_validator(mode="before")
39
+ def __prepare_registry(cls, values: DictData) -> DictData:
40
+ if (_regis := values.get("registry")) and isinstance(_regis, str):
41
+ values["registry"] = [_regis]
42
+ return values
43
+
44
+
45
+ class Params(BaseModel):
46
+ """Params Model"""
47
+
48
+ engine: Engine = Field(default_factory=Engine)
49
+
50
+
51
+ class SimLoad:
52
+ """Simple Load Object that will search config data by name.
53
+
54
+ :param name: A name of config data that will read by Yaml Loader object.
55
+ :param params: A Params model object.
56
+ :param externals: An external parameters
57
+
58
+ Noted:
59
+ The config data should have ``type`` key for engine can know what is
60
+ config should to do next.
61
+ """
62
+
63
+ def __init__(
64
+ self,
65
+ name: str,
66
+ params: Params,
67
+ externals: DictData,
68
+ ) -> None:
69
+ self.data: DictData = {}
70
+ for file in PathSearch(params.engine.paths.conf).files:
71
+ if any(file.suffix.endswith(s) for s in ("yml", "yaml")) and (
72
+ data := YamlEnvFl(file).read().get(name, {})
73
+ ):
74
+ self.data = data
75
+ if not self.data:
76
+ raise ValueError(f"Config {name!r} does not found on conf path")
77
+ self.__conf_params: Params = params
78
+ self.externals: DictData = externals
79
+
80
+ @property
81
+ def conf_params(self) -> Params:
82
+ return self.__conf_params
83
+
84
+ @cached_property
85
+ def type(self) -> BaseModelType:
86
+ """Return object type which implement in `config_object` key."""
87
+ if not (_typ := self.data.get("type")):
88
+ raise ValueError(
89
+ f"the 'type' value: {_typ} does not exists in config data."
90
+ )
91
+ try:
92
+ # NOTE: Auto adding module prefix if it does not set
93
+ return import_string(f"ddeutil.workflow.{_typ}")
94
+ except ModuleNotFoundError:
95
+ for registry in self.conf_params.engine.registry:
96
+ try:
97
+ return import_string(f"{registry}.{_typ}")
98
+ except ModuleNotFoundError:
99
+ continue
100
+ return import_string(f"{_typ}")
101
+
102
+ def load(self) -> AnyModel:
103
+ return self.type.model_validate(self.data)
104
+
105
+
106
+ class Loader(SimLoad):
107
+ """Main Loader Object that get the config `yaml` file from current path.
108
+
109
+ :param name: A name of config data that will read by Yaml Loader object.
110
+ :param externals: An external parameters
111
+ """
112
+
113
+ conf_name: ClassVar[str] = "workflows-conf"
114
+
115
+ def __init__(
116
+ self,
117
+ name: str,
118
+ externals: DictData,
119
+ *,
120
+ path: str | None = None,
121
+ ) -> None:
122
+ self.data: DictData = {}
123
+
124
+ # NOTE: import params object from specific config file
125
+ params: Params = self.config(path)
126
+
127
+ super().__init__(name, params, externals)
128
+
129
+ @classmethod
130
+ def config(cls, path: str | None = None) -> Params:
131
+ """Load Config data from ``workflows-conf.yaml`` file."""
132
+ return Params.model_validate(
133
+ YamlEnvFl(path or f"./{cls.conf_name}.yaml").read()
134
+ )
135
+
136
+
137
+ def map_params(value: Any, params: dict[str, Any]) -> Any:
138
+ """Map caller value that found from ``RE_CALLER`` regular expression.
139
+
140
+ :param value: A value that want to mapped with an params
141
+ :param params: A parameter value that getting with matched regular
142
+ expression.
143
+
144
+ :rtype: Any
145
+ :returns: An any getter value from the params input.
146
+ """
147
+ if isinstance(value, dict):
148
+ return {k: map_params(value[k], params) for k in value}
149
+ elif isinstance(value, (list, tuple, set)):
150
+ return type(value)([map_params(i, params) for i in value])
151
+ elif not isinstance(value, str):
152
+ return value
153
+
154
+ if not (found := RegexConf.RE_CALLER.search(value)):
155
+ return value
156
+
157
+ # NOTE: get caller value that setting inside; ``${{ <caller-value> }}``
158
+ caller: str = found.group("caller")
159
+ if not hasdot(caller, params):
160
+ raise ValueError(f"params does not set caller: {caller!r}")
161
+ getter: Any = getdot(caller, params)
162
+
163
+ # NOTE: check type of vars
164
+ if isinstance(getter, (str, int)):
165
+ return value.replace(found.group(0), str(getter))
166
+
167
+ # NOTE:
168
+ # If type of getter caller does not formatting, it will return origin
169
+ # value.
170
+ if value.replace(found.group(0), "") != "":
171
+ raise ValueError(
172
+ "Callable variable should not pass other outside ${{ ... }}"
173
+ )
174
+ return getter