ddeutil-workflow 0.0.3__tar.gz → 0.0.5__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. {ddeutil_workflow-0.0.3/src/ddeutil_workflow.egg-info → ddeutil_workflow-0.0.5}/PKG-INFO +57 -58
  2. {ddeutil_workflow-0.0.3 → ddeutil_workflow-0.0.5}/README.md +53 -47
  3. {ddeutil_workflow-0.0.3 → ddeutil_workflow-0.0.5}/pyproject.toml +5 -20
  4. ddeutil_workflow-0.0.5/src/ddeutil/workflow/__about__.py +1 -0
  5. ddeutil_workflow-0.0.3/src/ddeutil/workflow/vendors/__schedule.py → ddeutil_workflow-0.0.5/src/ddeutil/workflow/__scheduler.py +153 -135
  6. {ddeutil_workflow-0.0.3 → ddeutil_workflow-0.0.5}/src/ddeutil/workflow/loader.py +9 -1
  7. ddeutil_workflow-0.0.5/src/ddeutil/workflow/on.py +143 -0
  8. {ddeutil_workflow-0.0.3 → ddeutil_workflow-0.0.5}/src/ddeutil/workflow/pipeline.py +102 -46
  9. {ddeutil_workflow-0.0.3 → ddeutil_workflow-0.0.5}/src/ddeutil/workflow/tasks/__init__.py +1 -1
  10. ddeutil_workflow-0.0.5/src/ddeutil/workflow/tasks/dummy.py +52 -0
  11. {ddeutil_workflow-0.0.3 → ddeutil_workflow-0.0.5}/src/ddeutil/workflow/utils.py +33 -5
  12. {ddeutil_workflow-0.0.3 → ddeutil_workflow-0.0.5/src/ddeutil_workflow.egg-info}/PKG-INFO +57 -58
  13. {ddeutil_workflow-0.0.3 → ddeutil_workflow-0.0.5}/src/ddeutil_workflow.egg-info/SOURCES.txt +6 -19
  14. ddeutil_workflow-0.0.5/src/ddeutil_workflow.egg-info/requires.txt +4 -0
  15. {ddeutil_workflow-0.0.3 → ddeutil_workflow-0.0.5}/tests/test_base_data.py +0 -1
  16. {ddeutil_workflow-0.0.3 → ddeutil_workflow-0.0.5}/tests/test_base_local_and_global.py +4 -4
  17. ddeutil_workflow-0.0.5/tests/test_base_schedule.py +75 -0
  18. ddeutil_workflow-0.0.3/tests/test_schedule.py → ddeutil_workflow-0.0.5/tests/test_on.py +12 -3
  19. {ddeutil_workflow-0.0.3 → ddeutil_workflow-0.0.5}/tests/test_pipeline_run.py +17 -24
  20. {ddeutil_workflow-0.0.3 → ddeutil_workflow-0.0.5}/tests/test_pipeline_task.py +3 -3
  21. ddeutil_workflow-0.0.3/src/ddeutil/workflow/__about__.py +0 -1
  22. ddeutil_workflow-0.0.3/src/ddeutil/workflow/conn.py +0 -240
  23. ddeutil_workflow-0.0.3/src/ddeutil/workflow/schedule.py +0 -82
  24. ddeutil_workflow-0.0.3/src/ddeutil/workflow/tasks/_pandas.py +0 -54
  25. ddeutil_workflow-0.0.3/src/ddeutil/workflow/tasks/_polars.py +0 -92
  26. ddeutil_workflow-0.0.3/src/ddeutil/workflow/vendors/__dataset.py +0 -127
  27. ddeutil_workflow-0.0.3/src/ddeutil/workflow/vendors/__dict.py +0 -333
  28. ddeutil_workflow-0.0.3/src/ddeutil/workflow/vendors/__init__.py +0 -0
  29. ddeutil_workflow-0.0.3/src/ddeutil/workflow/vendors/aws.py +0 -185
  30. ddeutil_workflow-0.0.3/src/ddeutil/workflow/vendors/az.py +0 -0
  31. ddeutil_workflow-0.0.3/src/ddeutil/workflow/vendors/minio.py +0 -11
  32. ddeutil_workflow-0.0.3/src/ddeutil/workflow/vendors/pd.py +0 -13
  33. ddeutil_workflow-0.0.3/src/ddeutil/workflow/vendors/pg.py +0 -11
  34. ddeutil_workflow-0.0.3/src/ddeutil/workflow/vendors/pl.py +0 -172
  35. ddeutil_workflow-0.0.3/src/ddeutil/workflow/vendors/sftp.py +0 -209
  36. ddeutil_workflow-0.0.3/src/ddeutil_workflow.egg-info/requires.txt +0 -12
  37. ddeutil_workflow-0.0.3/tests/test_conn.py +0 -93
  38. ddeutil_workflow-0.0.3/tests/test_dataset.py +0 -90
  39. {ddeutil_workflow-0.0.3 → ddeutil_workflow-0.0.5}/LICENSE +0 -0
  40. {ddeutil_workflow-0.0.3 → ddeutil_workflow-0.0.5}/setup.cfg +0 -0
  41. {ddeutil_workflow-0.0.3 → ddeutil_workflow-0.0.5}/src/ddeutil/workflow/__init__.py +0 -0
  42. {ddeutil_workflow-0.0.3 → ddeutil_workflow-0.0.5}/src/ddeutil/workflow/__regex.py +0 -0
  43. {ddeutil_workflow-0.0.3 → ddeutil_workflow-0.0.5}/src/ddeutil/workflow/__types.py +0 -0
  44. {ddeutil_workflow-0.0.3 → ddeutil_workflow-0.0.5}/src/ddeutil/workflow/exceptions.py +0 -0
  45. {ddeutil_workflow-0.0.3 → ddeutil_workflow-0.0.5}/src/ddeutil_workflow.egg-info/dependency_links.txt +0 -0
  46. {ddeutil_workflow-0.0.3 → ddeutil_workflow-0.0.5}/src/ddeutil_workflow.egg-info/top_level.txt +0 -0
  47. {ddeutil_workflow-0.0.3 → ddeutil_workflow-0.0.5}/tests/test_base_regex.py +0 -0
  48. {ddeutil_workflow-0.0.3 → ddeutil_workflow-0.0.5}/tests/test_loader.py +0 -0
  49. {ddeutil_workflow-0.0.3 → ddeutil_workflow-0.0.5}/tests/test_pipeline.py +0 -0
  50. {ddeutil_workflow-0.0.3 → ddeutil_workflow-0.0.5}/tests/test_pipeline_matrix.py +0 -0
  51. {ddeutil_workflow-0.0.3 → ddeutil_workflow-0.0.5}/tests/test_pipeline_params.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.3
3
+ Version: 0.0.5
4
4
  Summary: Data Developer & Engineer Workflow Utility Objects
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
@@ -9,7 +9,7 @@ Project-URL: Source Code, https://github.com/ddeutils/ddeutil-workflow/
9
9
  Keywords: data,workflow,utility,pipeline
10
10
  Classifier: Topic :: Utilities
11
11
  Classifier: Natural Language :: English
12
- Classifier: Development Status :: 3 - Alpha
12
+ Classifier: Development Status :: 4 - Beta
13
13
  Classifier: Intended Audience :: Developers
14
14
  Classifier: Operating System :: OS Independent
15
15
  Classifier: Programming Language :: Python
@@ -23,21 +23,16 @@ Description-Content-Type: text/markdown
23
23
  License-File: LICENSE
24
24
  Requires-Dist: fmtutil
25
25
  Requires-Dist: ddeutil-io
26
- Requires-Dist: python-dotenv
27
- Provides-Extra: test
28
- Requires-Dist: sqlalchemy==2.0.30; extra == "test"
29
- Requires-Dist: paramiko==3.4.0; extra == "test"
30
- Requires-Dist: sshtunnel==0.4.0; extra == "test"
31
- Requires-Dist: boto3==1.34.117; extra == "test"
32
- Requires-Dist: fsspec==2024.5.0; extra == "test"
33
- Requires-Dist: polars==0.20.31; extra == "test"
34
- Requires-Dist: pyarrow==16.1.0; extra == "test"
26
+ Requires-Dist: python-dotenv==1.0.1
27
+ Requires-Dist: schedule==1.2.2
35
28
 
36
29
  # Data Utility: _Workflow_
37
30
 
38
31
  [![test](https://github.com/ddeutils/ddeutil-workflow/actions/workflows/tests.yml/badge.svg?branch=main)](https://github.com/ddeutils/ddeutil-workflow/actions/workflows/tests.yml)
39
32
  [![python support version](https://img.shields.io/pypi/pyversions/ddeutil-workflow)](https://pypi.org/project/ddeutil-workflow/)
40
33
  [![size](https://img.shields.io/github/languages/code-size/ddeutils/ddeutil-workflow)](https://github.com/ddeutils/ddeutil-workflow)
34
+ [![gh license](https://img.shields.io/github/license/ddeutils/ddeutil-workflow)](https://github.com/ddeutils/ddeutil-workflow/blob/main/LICENSE)
35
+
41
36
 
42
37
  **Table of Contents**:
43
38
 
@@ -46,10 +41,11 @@ Requires-Dist: pyarrow==16.1.0; extra == "test"
46
41
  - [Connection](#connection)
47
42
  - [Dataset](#dataset)
48
43
  - [Schedule](#schedule)
49
- - [Examples](#examples)
50
- - [Python](#python)
44
+ - [Pipeline Examples](#examples)
45
+ - [Python & Shell](#python--shell)
51
46
  - [Tasks (EL)](#tasks-extract--load)
52
- - [Hooks (T)](#hooks-transform)
47
+ - [Hooks (T)](#tasks-transform)
48
+ - [Configuration](#configuration)
53
49
 
54
50
  This **Utility Workflow** objects was created for easy to make a simple metadata
55
51
  driven pipeline that able to **ETL, T, EL, or ELT** by `.yaml` file.
@@ -80,7 +76,7 @@ This project need `ddeutil-io`, `ddeutil-model` extension namespace packages.
80
76
 
81
77
  The first step, you should start create the connections and datasets for In and
82
78
  Out of you data that want to use in pipeline of workflow. Some of this component
83
- is similar component of the **Airflow** because I like it concepts.
79
+ is similar component of the **Airflow** because I like it orchestration concepts.
84
80
 
85
81
  The main feature of this project is the `Pipeline` object that can call any
86
82
  registries function. The pipeline can handle everything that you want to do, it
@@ -91,44 +87,7 @@ will passing parameters and catching the output for re-use it to next step.
91
87
  > dynamic registries instead of main features because it have a lot of maintain
92
88
  > vendor codes and deps. (I do not have time to handle this features)
93
89
 
94
- ### Connection
95
-
96
- The connection for worker able to do any thing.
97
-
98
- ```yaml
99
- conn_postgres_data:
100
- type: conn.Postgres
101
- url: 'postgres//username:${ENV_PASS}@hostname:port/database?echo=True&time_out=10'
102
- ```
103
-
104
- ```python
105
- from ddeutil.workflow.conn import Conn
106
-
107
- conn = Conn.from_loader(name='conn_postgres_data', externals={})
108
- assert conn.ping()
109
- ```
110
-
111
- ### Dataset
112
-
113
- The dataset is define any objects on the connection. This feature was implemented
114
- on `/vendors` because it has a lot of tools that can interact with any data systems
115
- in the data tool stacks.
116
-
117
- ```yaml
118
- ds_postgres_customer_tbl:
119
- type: dataset.PostgresTbl
120
- conn: 'conn_postgres_data'
121
- features:
122
- id: serial primary key
123
- name: varchar( 100 ) not null
124
- ```
125
-
126
- ```python
127
- from ddeutil.workflow.vendors.pg import PostgresTbl
128
-
129
- dataset = PostgresTbl.from_loader(name='ds_postgres_customer_tbl', externals={})
130
- assert dataset.exists()
131
- ```
90
+ ---
132
91
 
133
92
  ### Schedule
134
93
 
@@ -139,7 +98,7 @@ schd_for_node:
139
98
  ```
140
99
 
141
100
  ```python
142
- from ddeutil.workflow.schedule import Schedule
101
+ from ddeutil.workflow.on import Schedule
143
102
 
144
103
  scdl = Schedule.from_loader(name='schd_for_node', externals={})
145
104
  assert '*/5 * * * *' == str(scdl.cronjob)
@@ -152,18 +111,35 @@ assert '2022-01-01 00:20:00' f"{cron_iterate.next:%Y-%m-%d %H:%M:%S}"
152
111
  assert '2022-01-01 00:25:00' f"{cron_iterate.next:%Y-%m-%d %H:%M:%S}"
153
112
  ```
154
113
 
114
+ ---
115
+
116
+ ### Pipeline
117
+
118
+ ```yaml
119
+ run_py_local:
120
+ type: ddeutil.workflow.pipeline.Pipeline
121
+ ...
122
+ ```
123
+
124
+ ```python
125
+ from ddeutil.workflow.pipeline import Pipeline
126
+
127
+ pipe = Pipeline.from_loader(name='run_py_local', externals={})
128
+ pipe.execute(params={'author-run': 'Local Workflow', 'run-date': '2024-01-01'})
129
+ ```
130
+
155
131
  ## Examples
156
132
 
157
133
  This is examples that use workflow file for running common Data Engineering
158
134
  use-case.
159
135
 
160
- ### Python
136
+ ### Python & Shell
161
137
 
162
138
  The state of doing lists that worker should to do. It be collection of the stage.
163
139
 
164
140
  ```yaml
165
141
  run_py_local:
166
- type: ddeutil.workflow.pipe.Pipeline
142
+ type: ddeutil.workflow.pipeline.Pipeline
167
143
  params:
168
144
  author-run:
169
145
  type: str
@@ -194,6 +170,12 @@ run_py_local:
194
170
  echo: ${{ stages.define-func.outputs.echo }}
195
171
  run: |
196
172
  echo('Caller')
173
+ second-job:
174
+ stages:
175
+ - name: Echo Shell Script
176
+ id: shell-echo
177
+ shell: |
178
+ echo "Hello World from Shell"
197
179
  ```
198
180
 
199
181
  ```python
@@ -207,13 +189,16 @@ pipe.execute(params={'author-run': 'Local Workflow', 'run-date': '2024-01-01'})
207
189
  > Hello Local Workflow
208
190
  > Receive x from above with Local Workflow
209
191
  > Hello Caller
192
+ > Hello World from Shell
210
193
  ```
211
194
 
195
+ ---
196
+
212
197
  ### Tasks (Extract & Load)
213
198
 
214
199
  ```yaml
215
200
  pipe_el_pg_to_lake:
216
- type: ddeutil.workflow.pipe.Pipeline
201
+ type: ddeutil.workflow.pipeline.Pipeline
217
202
  params:
218
203
  run-date:
219
204
  type: datetime
@@ -236,11 +221,15 @@ pipe_el_pg_to_lake:
236
221
  endpoint: "/${{ params.name }}"
237
222
  ```
238
223
 
224
+ ---
225
+
239
226
  ### Tasks (Transform)
240
227
 
228
+ > I recommend you to use task for all actions that you want to do.
229
+
241
230
  ```yaml
242
231
  pipe_hook_mssql_proc:
243
- type: ddeutil.workflow.pipe.Pipeline
232
+ type: ddeutil.workflow.pipeline.Pipeline
244
233
  params:
245
234
  run_date: datetime
246
235
  sp_name: str
@@ -261,6 +250,16 @@ pipe_hook_mssql_proc:
261
250
  target: ${{ params.target_name }}
262
251
  ```
263
252
 
253
+ > [!NOTE]
254
+ > The above parameter use short declarative statement. You can pass a parameter
255
+ > type to the key of a parameter name.
256
+
257
+ ## Configuration
258
+
259
+ ```text
260
+
261
+ ```
262
+
264
263
  ## License
265
264
 
266
265
  This project was licensed under the terms of the [MIT license](LICENSE).
@@ -3,6 +3,8 @@
3
3
  [![test](https://github.com/ddeutils/ddeutil-workflow/actions/workflows/tests.yml/badge.svg?branch=main)](https://github.com/ddeutils/ddeutil-workflow/actions/workflows/tests.yml)
4
4
  [![python support version](https://img.shields.io/pypi/pyversions/ddeutil-workflow)](https://pypi.org/project/ddeutil-workflow/)
5
5
  [![size](https://img.shields.io/github/languages/code-size/ddeutils/ddeutil-workflow)](https://github.com/ddeutils/ddeutil-workflow)
6
+ [![gh license](https://img.shields.io/github/license/ddeutils/ddeutil-workflow)](https://github.com/ddeutils/ddeutil-workflow/blob/main/LICENSE)
7
+
6
8
 
7
9
  **Table of Contents**:
8
10
 
@@ -11,10 +13,11 @@
11
13
  - [Connection](#connection)
12
14
  - [Dataset](#dataset)
13
15
  - [Schedule](#schedule)
14
- - [Examples](#examples)
15
- - [Python](#python)
16
+ - [Pipeline Examples](#examples)
17
+ - [Python & Shell](#python--shell)
16
18
  - [Tasks (EL)](#tasks-extract--load)
17
- - [Hooks (T)](#hooks-transform)
19
+ - [Hooks (T)](#tasks-transform)
20
+ - [Configuration](#configuration)
18
21
 
19
22
  This **Utility Workflow** objects was created for easy to make a simple metadata
20
23
  driven pipeline that able to **ETL, T, EL, or ELT** by `.yaml` file.
@@ -45,7 +48,7 @@ This project need `ddeutil-io`, `ddeutil-model` extension namespace packages.
45
48
 
46
49
  The first step, you should start create the connections and datasets for In and
47
50
  Out of you data that want to use in pipeline of workflow. Some of this component
48
- is similar component of the **Airflow** because I like it concepts.
51
+ is similar component of the **Airflow** because I like it orchestration concepts.
49
52
 
50
53
  The main feature of this project is the `Pipeline` object that can call any
51
54
  registries function. The pipeline can handle everything that you want to do, it
@@ -56,44 +59,7 @@ will passing parameters and catching the output for re-use it to next step.
56
59
  > dynamic registries instead of main features because it have a lot of maintain
57
60
  > vendor codes and deps. (I do not have time to handle this features)
58
61
 
59
- ### Connection
60
-
61
- The connection for worker able to do any thing.
62
-
63
- ```yaml
64
- conn_postgres_data:
65
- type: conn.Postgres
66
- url: 'postgres//username:${ENV_PASS}@hostname:port/database?echo=True&time_out=10'
67
- ```
68
-
69
- ```python
70
- from ddeutil.workflow.conn import Conn
71
-
72
- conn = Conn.from_loader(name='conn_postgres_data', externals={})
73
- assert conn.ping()
74
- ```
75
-
76
- ### Dataset
77
-
78
- The dataset is define any objects on the connection. This feature was implemented
79
- on `/vendors` because it has a lot of tools that can interact with any data systems
80
- in the data tool stacks.
81
-
82
- ```yaml
83
- ds_postgres_customer_tbl:
84
- type: dataset.PostgresTbl
85
- conn: 'conn_postgres_data'
86
- features:
87
- id: serial primary key
88
- name: varchar( 100 ) not null
89
- ```
90
-
91
- ```python
92
- from ddeutil.workflow.vendors.pg import PostgresTbl
93
-
94
- dataset = PostgresTbl.from_loader(name='ds_postgres_customer_tbl', externals={})
95
- assert dataset.exists()
96
- ```
62
+ ---
97
63
 
98
64
  ### Schedule
99
65
 
@@ -104,7 +70,7 @@ schd_for_node:
104
70
  ```
105
71
 
106
72
  ```python
107
- from ddeutil.workflow.schedule import Schedule
73
+ from ddeutil.workflow.on import Schedule
108
74
 
109
75
  scdl = Schedule.from_loader(name='schd_for_node', externals={})
110
76
  assert '*/5 * * * *' == str(scdl.cronjob)
@@ -117,18 +83,35 @@ assert '2022-01-01 00:20:00' f"{cron_iterate.next:%Y-%m-%d %H:%M:%S}"
117
83
  assert '2022-01-01 00:25:00' f"{cron_iterate.next:%Y-%m-%d %H:%M:%S}"
118
84
  ```
119
85
 
86
+ ---
87
+
88
+ ### Pipeline
89
+
90
+ ```yaml
91
+ run_py_local:
92
+ type: ddeutil.workflow.pipeline.Pipeline
93
+ ...
94
+ ```
95
+
96
+ ```python
97
+ from ddeutil.workflow.pipeline import Pipeline
98
+
99
+ pipe = Pipeline.from_loader(name='run_py_local', externals={})
100
+ pipe.execute(params={'author-run': 'Local Workflow', 'run-date': '2024-01-01'})
101
+ ```
102
+
120
103
  ## Examples
121
104
 
122
105
  This is examples that use workflow file for running common Data Engineering
123
106
  use-case.
124
107
 
125
- ### Python
108
+ ### Python & Shell
126
109
 
127
110
  The state of doing lists that worker should to do. It be collection of the stage.
128
111
 
129
112
  ```yaml
130
113
  run_py_local:
131
- type: ddeutil.workflow.pipe.Pipeline
114
+ type: ddeutil.workflow.pipeline.Pipeline
132
115
  params:
133
116
  author-run:
134
117
  type: str
@@ -159,6 +142,12 @@ run_py_local:
159
142
  echo: ${{ stages.define-func.outputs.echo }}
160
143
  run: |
161
144
  echo('Caller')
145
+ second-job:
146
+ stages:
147
+ - name: Echo Shell Script
148
+ id: shell-echo
149
+ shell: |
150
+ echo "Hello World from Shell"
162
151
  ```
163
152
 
164
153
  ```python
@@ -172,13 +161,16 @@ pipe.execute(params={'author-run': 'Local Workflow', 'run-date': '2024-01-01'})
172
161
  > Hello Local Workflow
173
162
  > Receive x from above with Local Workflow
174
163
  > Hello Caller
164
+ > Hello World from Shell
175
165
  ```
176
166
 
167
+ ---
168
+
177
169
  ### Tasks (Extract & Load)
178
170
 
179
171
  ```yaml
180
172
  pipe_el_pg_to_lake:
181
- type: ddeutil.workflow.pipe.Pipeline
173
+ type: ddeutil.workflow.pipeline.Pipeline
182
174
  params:
183
175
  run-date:
184
176
  type: datetime
@@ -201,11 +193,15 @@ pipe_el_pg_to_lake:
201
193
  endpoint: "/${{ params.name }}"
202
194
  ```
203
195
 
196
+ ---
197
+
204
198
  ### Tasks (Transform)
205
199
 
200
+ > I recommend you to use task for all actions that you want to do.
201
+
206
202
  ```yaml
207
203
  pipe_hook_mssql_proc:
208
- type: ddeutil.workflow.pipe.Pipeline
204
+ type: ddeutil.workflow.pipeline.Pipeline
209
205
  params:
210
206
  run_date: datetime
211
207
  sp_name: str
@@ -226,6 +222,16 @@ pipe_hook_mssql_proc:
226
222
  target: ${{ params.target_name }}
227
223
  ```
228
224
 
225
+ > [!NOTE]
226
+ > The above parameter use short declarative statement. You can pass a parameter
227
+ > type to the key of a parameter name.
228
+
229
+ ## Configuration
230
+
231
+ ```text
232
+
233
+ ```
234
+
229
235
  ## License
230
236
 
231
237
  This project was licensed under the terms of the [MIT license](LICENSE).
@@ -12,8 +12,7 @@ keywords = ['data', 'workflow', 'utility', 'pipeline']
12
12
  classifiers = [
13
13
  "Topic :: Utilities",
14
14
  "Natural Language :: English",
15
- "Development Status :: 3 - Alpha",
16
- # "Development Status :: 4 - Beta",
15
+ "Development Status :: 4 - Beta",
17
16
  # "Development Status :: 5 - Production/Stable",
18
17
  "Intended Audience :: Developers",
19
18
  "Operating System :: OS Independent",
@@ -28,7 +27,8 @@ requires-python = ">=3.9.13"
28
27
  dependencies = [
29
28
  "fmtutil",
30
29
  "ddeutil-io",
31
- "python-dotenv",
30
+ "python-dotenv==1.0.1",
31
+ "schedule==1.2.2",
32
32
  ]
33
33
  dynamic = ["version"]
34
34
 
@@ -36,21 +36,6 @@ dynamic = ["version"]
36
36
  Homepage = "https://github.com/ddeutils/ddeutil-workflow/"
37
37
  "Source Code" = "https://github.com/ddeutils/ddeutil-workflow/"
38
38
 
39
- [project.optional-dependencies]
40
- test = [
41
- "sqlalchemy==2.0.30",
42
- # SFTP
43
- "paramiko==3.4.0",
44
- "sshtunnel==0.4.0",
45
- # AWS Client
46
- "boto3==1.34.117",
47
- # Open files
48
- "fsspec==2024.5.0",
49
- # Polars
50
- "polars==0.20.31",
51
- "pyarrow==16.1.0",
52
- ]
53
-
54
39
  [tool.setuptools.dynamic]
55
40
  version = {attr = "ddeutil.workflow.__about__.__version__"}
56
41
 
@@ -80,8 +65,8 @@ addopts = [
80
65
  ]
81
66
  filterwarnings = ["error"]
82
67
  log_cli = true
83
- log_cli_level = "INFO"
84
- log_cli_format = "%(asctime)s [%(levelname)8s] %(message)s (%(filename)s:%(lineno)s)"
68
+ log_cli_level = "DEBUG"
69
+ log_cli_format = "%(asctime)s [%(levelname)7s] %(message)s (%(filename)s:%(lineno)s)"
85
70
  log_cli_date_format = "%Y-%m-%d %H:%M:%S"
86
71
 
87
72
  [tool.black]
@@ -0,0 +1 @@
1
+ __version__: str = "0.0.5"