ddeutil-workflow 0.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,185 @@
1
+ from typing import Any, Optional
2
+
3
+ try:
4
+ import boto3
5
+ import botocore.exceptions
6
+ except ImportError:
7
+ raise ImportError(
8
+ "Please install boto3 package if want to use boto wrapped object.\n\t\t"
9
+ "$ pip install boto3"
10
+ ) from None
11
+
12
+
13
+ class WrapBoto3:
14
+ """Difference in boto3 between resource, client, and session
15
+ docs: https://stackoverflow.com/questions/42809096/
16
+ difference-in-boto3-between-resource-client-and-session
17
+
18
+ .. config::
19
+
20
+ ~/.aws/credentials
21
+
22
+ [my-user]
23
+ aws_access_key_id = AKIAxxx
24
+ aws_secret_access_key = xxx
25
+
26
+ [my-role]
27
+ source_profile = my-user
28
+ role_arn = arn:aws:iam::123456789012:role/the-role
29
+
30
+ ~/.aws/config
31
+
32
+ [profile my-role]
33
+ region = ap-southeast-2
34
+ """
35
+
36
+ def __init__(
37
+ self,
38
+ access_key_id: str,
39
+ secret_access_key: str,
40
+ region_name: Optional[str] = None,
41
+ *,
42
+ role_session_name: Optional[str] = None,
43
+ role_arn: Optional[str] = None,
44
+ mfa_serial: Optional[str] = None,
45
+ ):
46
+ self.access_key_id = access_key_id
47
+ self.secret_access_key = secret_access_key
48
+ self.region_name: str = region_name or "ap-southeast-1"
49
+
50
+ # Optional for session.
51
+ self.role_session_name: str = role_session_name or "AssumeRoleSession"
52
+ self.role_arn = role_arn
53
+ self.mfa_serial = mfa_serial
54
+
55
+ # Create credential
56
+ self.cred = self.make_cred()
57
+
58
+ def make_cred(self) -> dict[str, str]:
59
+ if self.role_arn is None:
60
+ return {
61
+ "AccessKeyId": self.access_key_id,
62
+ "SecretAccessKey": self.secret_access_key,
63
+ }
64
+ # NOTE: A low-level client representing AWS Security Token Service (STS)
65
+ # >>> sess = boto3.session.Session(
66
+ # ... aws_access_key_id=ARN_ACCESS_KEY,
67
+ # ... aws_secret_access_key=ARN_SECRET_KEY
68
+ # ... )
69
+ # >>> sts_client = sess.client('sts')
70
+ sts_client = boto3.client(
71
+ service_name="sts",
72
+ region_name=self.region_name,
73
+ aws_access_key_id=self.access_key_id,
74
+ aws_secret_access_key=self.secret_access_key,
75
+ )
76
+ mfa_optional: dict[str, str] = {}
77
+ if self.mfa_serial:
78
+ mfa_otp: str = str(input("Enter the MFA code: "))
79
+ mfa_optional = (
80
+ {"SerialNumber": self.mfa_serial, "TokenCode": mfa_otp},
81
+ )
82
+ assumed_role = sts_client.assume_role(
83
+ RoleArn=self.role_arn,
84
+ RoleSessionName=self.role_session_name,
85
+ DurationSeconds=3600,
86
+ **mfa_optional,
87
+ )
88
+ # NOTE: From the response that contains the assumed role, get the
89
+ # temporary credentials that can be used to make subsequent API calls
90
+ return assumed_role["Credentials"]
91
+
92
+ @property
93
+ def session(self):
94
+ """Can use by
95
+ ``s3 = self.session.client('s3')``
96
+ ``s3 = self.session.resource('s3')``
97
+ """
98
+ return boto3.session.Session(
99
+ aws_access_key_id=self.cred["AccessKeyId"],
100
+ aws_secret_access_key=self.cred["SecretAccessKey"],
101
+ aws_session_token=self.cred.get("SessionToken"),
102
+ )
103
+
104
+ @property
105
+ def s3(self):
106
+ return boto3.client(
107
+ service_name="s3",
108
+ region_name=self.region_name,
109
+ aws_access_key_id=self.cred["AccessKeyId"],
110
+ aws_secret_access_key=self.cred["SecretAccessKey"],
111
+ aws_session_token=self.cred.get("SessionToken"),
112
+ )
113
+
114
+ def list_objects(self, bucket: str, prefix: str):
115
+ objs: list[dict[str, Any]] = []
116
+ kwargs = {"Bucket": bucket, "Prefix": prefix}
117
+ while True:
118
+ resp = self.s3.list_objects_v2(**kwargs)
119
+ for obj in resp["Contents"]:
120
+ objs.append(obj)
121
+ try:
122
+ kwargs["ContinuationToken"] = resp["NextContinuationToken"]
123
+ except KeyError:
124
+ break
125
+ return objs
126
+
127
+ def paginate(
128
+ self,
129
+ bucket: str,
130
+ prefix: str,
131
+ *,
132
+ marker: Optional[str] = None,
133
+ search: Optional[str] = None,
134
+ ):
135
+ """
136
+ .. docs:
137
+ - https://boto3.amazonaws.com/v1/documentation/api/latest/
138
+ guide/paginators.html
139
+
140
+ .. search::
141
+ - "Contents[?Size > `100`][]"
142
+ - "Contents[?contains(LastModified, `'"2022-01-01"'`)]"
143
+ - "Contents[?LastModified>=`YYYY-MM-DD`].Key"
144
+ - "DeleteMarkers[?LastModified>=`2020-07-07T00:00:00`
145
+ && IsLatest==`true`].[Key,VersionId]"
146
+ """
147
+ paginator = self.s3.get_paginator("list_objects_v2")
148
+ page_iterator = paginator.paginate(
149
+ Bucket=bucket,
150
+ Prefix=prefix,
151
+ PaginationConfig={
152
+ # 'MaxItems': 10,
153
+ "PageSize": 10,
154
+ "StartingToken": marker,
155
+ },
156
+ )
157
+
158
+ for page in page_iterator:
159
+ print("# This is new page")
160
+ print("Contents Count:", len(page["Contents"]))
161
+ if "NextContinuationToken" in page.keys():
162
+ print(page["NextContinuationToken"])
163
+
164
+ # filtered_iterator = page_iterator.search("Contents[?Size > `100`][]")
165
+ # for key_data in filtered_iterator:
166
+ # print(key_data)
167
+
168
+ # page_iterator = paginator.paginate(
169
+ # Bucket=bucket,
170
+ # Prefix=prefix,
171
+ # PaginationConfig={
172
+ # 'MaxItems': 10,
173
+ # 'PageSize': 10,
174
+ # 'StartingToken': marker
175
+ # }
176
+ # )
177
+
178
+ def exists(self, bucket: str, prefix: str) -> bool:
179
+ try:
180
+ self.s3.head_object(Bucket=bucket, Key=prefix)
181
+ return True
182
+ except botocore.exceptions.ClientError as err:
183
+ if err.response["Error"]["Code"]:
184
+ return False
185
+ raise
@@ -0,0 +1,11 @@
1
+ class WarpMinio:
2
+
3
+ def __init__(
4
+ self,
5
+ host: str,
6
+ access_key: str,
7
+ secret_access_key: str,
8
+ ):
9
+ self.host: str = host
10
+ self.access_key: str = access_key
11
+ self.secret_access_key: str = secret_access_key
@@ -0,0 +1,209 @@
1
+ import contextlib
2
+ from collections import deque
3
+ from collections.abc import Generator, Iterator
4
+ from ftplib import FTP
5
+ from stat import S_ISDIR, S_ISREG
6
+ from typing import Optional
7
+
8
+ try:
9
+ import paramiko
10
+ from paramiko import SFTPAttributes, SFTPClient
11
+ from sshtunnel import BaseSSHTunnelForwarderError, SSHTunnelForwarder
12
+ except ImportError:
13
+ raise ImportError(
14
+ "Please install paramiko and sshtunnel packages before using,\n\t\t"
15
+ "$ pip install paramiko sshtunnel"
16
+ ) from None
17
+
18
+
19
+ class WrapFTP:
20
+ """Wrapped FTP Client.
21
+
22
+ FTP (File Transfer Protocol) is the standard for transferring files
23
+ between the Client and Server is one of the most popular formats.
24
+
25
+ But there is a disadvantage that the data sent and received is not
26
+ encrypted. There is an opportunity for a third party to read the information
27
+ along the transferring.
28
+ """
29
+
30
+ def __init__(
31
+ self,
32
+ host: str,
33
+ user: str,
34
+ pwd: str,
35
+ port: int = 21,
36
+ ):
37
+ self.host: str = host
38
+ self.port: int = port
39
+ self.user: str = user
40
+ self.pwd: str = pwd
41
+
42
+ def fpt_connect(self):
43
+ return FTP(
44
+ host=self.host,
45
+ user=self.user,
46
+ passwd=self.pwd,
47
+ )
48
+
49
+
50
+ class WrapSFTP:
51
+ """Wrapped SFTP Client.
52
+
53
+ SFTP (Secure File Transfer Protocol) it is a standard that helps
54
+ increase security in file transfers. By encrypting data and commands before
55
+ transferring files between the client and server with SSH (Secure Shell),
56
+ we can be confident that the data we upload/download can be done safely.
57
+
58
+ It cannot be accessed by third parties or if the information is
59
+ obtained, it is encrypted and cannot be read.
60
+
61
+ See-Also:
62
+
63
+ This object will wrap the [Paramiko](https://www.paramiko.org/) package
64
+ with my connection interface.
65
+ """
66
+
67
+ def __init__(
68
+ self,
69
+ host: str,
70
+ user: Optional[str] = None,
71
+ port: Optional[int] = None,
72
+ *,
73
+ pwd: Optional[str] = None,
74
+ private_key: Optional[str] = None,
75
+ private_key_password: Optional[str] = None,
76
+ ) -> None:
77
+ self.host: str = host
78
+ self.user: str = user or ""
79
+ self.port: int = port or 22
80
+ self.pwd: Optional[str] = pwd
81
+
82
+ # Private key path like, ``/home/user/.ssh/id_rsa``.
83
+ self.private_key = private_key
84
+
85
+ # If this private key have password, private_key passphrase.
86
+ self.private_key_pwd = private_key_password
87
+
88
+ def get(self, remote_path, local_path):
89
+ with self.transport_client() as sftp:
90
+ sftp.get(remote_path, local_path)
91
+
92
+ def put(self, remote_path, local_path):
93
+ with self.transport_client() as sftp:
94
+ sftp.put(remote_path, local_path)
95
+
96
+ def rm(self, remote_path: str):
97
+ with self.transport_client() as sftp:
98
+ sftp.remove(remote_path)
99
+
100
+ def mkdir(self, remote_path: str):
101
+ with self.transport_client() as sftp:
102
+ sftp.mkdir(remote_path)
103
+
104
+ @contextlib.contextmanager
105
+ def ssh_tunnel(self) -> Iterator:
106
+ try:
107
+ with SSHTunnelForwarder(
108
+ (self.host, self.port),
109
+ ssh_username=self.user,
110
+ ssh_password=self.pwd,
111
+ ssh_pkey=self.private_key,
112
+ ssh_private_key_password=self.private_key_pwd,
113
+ local_bind_address=("0.0.0.0", 22),
114
+ # Use a suitable remote_bind_address that able to be DB host on
115
+ # that SSH Server.
116
+ remote_bind_address=("127.0.0.1", self.port),
117
+ ) as tunnel:
118
+ tunnel.check_tunnels()
119
+ client = paramiko.SSHClient()
120
+ if self.private_key:
121
+ client.load_system_host_keys()
122
+ # NOTE: Add SSH key to known_hosts file.
123
+ client.set_missing_host_key_policy(
124
+ paramiko.MissingHostKeyPolicy()
125
+ )
126
+
127
+ # NOTE: Start connect to SSH Server
128
+ client.connect(
129
+ "127.0.0.1",
130
+ port=tunnel.local_bind_port,
131
+ **(
132
+ {
133
+ "username": self.user,
134
+ "password": self.pwd,
135
+ "allow_agent": False,
136
+ "look_for_keys": False,
137
+ "banner_timeout": 20,
138
+ }
139
+ if self.pwd
140
+ else {}
141
+ ),
142
+ )
143
+ with client.open_sftp() as sftp:
144
+ yield sftp
145
+ client.close()
146
+ except BaseSSHTunnelForwarderError as err:
147
+ raise ValueError(
148
+ "This config data does not connect to the Server"
149
+ ) from err
150
+
151
+ @contextlib.contextmanager
152
+ def transport_client(self) -> Generator[SFTPClient, None, None]:
153
+ with paramiko.Transport(sock=(self.host, self.port)) as transport:
154
+ transport.connect(
155
+ hostkey=None,
156
+ username=self.user,
157
+ password=self.pwd,
158
+ )
159
+ with paramiko.SFTPClient.from_transport(transport) as sftp:
160
+ yield sftp
161
+
162
+ @contextlib.contextmanager
163
+ def simple_client(self) -> Generator[SFTPClient, None, None]:
164
+ client = paramiko.SSHClient()
165
+ client.set_missing_host_key_policy(paramiko.MissingHostKeyPolicy())
166
+ client.connect(
167
+ hostname=self.host,
168
+ port=self.port,
169
+ username=self.user,
170
+ password=self.pwd,
171
+ )
172
+ with client.open_sftp() as sftp:
173
+ yield sftp
174
+ client.close()
175
+
176
+ def glob(self, pattern: str) -> Iterator[str]:
177
+ with self.transport_client() as sftp:
178
+ try:
179
+ # NOTE: List files matching the pattern on the SFTP server
180
+ f: SFTPAttributes
181
+ for f in sftp.listdir_attr(pattern):
182
+ yield pattern + f.filename
183
+ except FileNotFoundError:
184
+ raise FileNotFoundError(
185
+ f"Pattern {pattern!r} does not found on SFTP server"
186
+ ) from None
187
+
188
+ def walk(self, pattern: str) -> Iterator[str]:
189
+ dirs: deque = deque([pattern])
190
+ with self.transport_client() as sftp:
191
+ while len(dirs) > 0:
192
+ d: str = dirs.popleft()
193
+ f: SFTPAttributes
194
+ for f in sftp.listdir_attr(d):
195
+ rs: str = (
196
+ (d + f.filename) if d == "/" else (d + "/" + f.filename)
197
+ )
198
+ if S_ISDIR(f.st_mode):
199
+ dirs.append(rs)
200
+ elif S_ISREG(f.st_mode):
201
+ yield rs
202
+
203
+ @staticmethod
204
+ def isdir(path: SFTPAttributes):
205
+ try:
206
+ return S_ISDIR(path.st_mode)
207
+ except OSError:
208
+ # NOTE: Path does not exist, so by definition not a directory
209
+ return False
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2024 Data Developer & Engineering
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1,251 @@
1
+ Metadata-Version: 2.1
2
+ Name: ddeutil-workflow
3
+ Version: 0.0.1
4
+ Summary: Data Developer & Engineer Workflow Utility Objects
5
+ Author-email: ddeutils <korawich.anu@gmail.com>
6
+ License: MIT
7
+ Project-URL: Homepage, https://github.com/ddeutils/ddeutil-workflow/
8
+ Project-URL: Source Code, https://github.com/ddeutils/ddeutil-workflow/
9
+ Keywords: data,workflow,utility,pipeline
10
+ Classifier: Topic :: Utilities
11
+ Classifier: Natural Language :: English
12
+ Classifier: Development Status :: 3 - Alpha
13
+ Classifier: Intended Audience :: Developers
14
+ Classifier: Operating System :: OS Independent
15
+ Classifier: Programming Language :: Python
16
+ Classifier: Programming Language :: Python :: 3 :: Only
17
+ Classifier: Programming Language :: Python :: 3.9
18
+ Classifier: Programming Language :: Python :: 3.10
19
+ Classifier: Programming Language :: Python :: 3.11
20
+ Classifier: Programming Language :: Python :: 3.12
21
+ Requires-Python: >=3.9.13
22
+ Description-Content-Type: text/markdown
23
+ License-File: LICENSE
24
+ Requires-Dist: fmtutil
25
+ Requires-Dist: ddeutil-io
26
+ Requires-Dist: ddeutil-model
27
+ Requires-Dist: python-dotenv
28
+ Provides-Extra: test
29
+ Requires-Dist: sqlalchemy ==2.0.30 ; extra == 'test'
30
+ Requires-Dist: paramiko ==3.4.0 ; extra == 'test'
31
+ Requires-Dist: sshtunnel ==0.4.0 ; extra == 'test'
32
+ Requires-Dist: boto3 ==1.34.109 ; extra == 'test'
33
+ Requires-Dist: fsspec ==2024.5.0 ; extra == 'test'
34
+ Requires-Dist: polars ==0.20.26 ; extra == 'test'
35
+ Requires-Dist: pyarrow ==16.1.0 ; extra == 'test'
36
+
37
+ # Data Utility: _Workflow_
38
+
39
+ [![test](https://github.com/ddeutils/ddeutil-workflow/actions/workflows/tests.yml/badge.svg?branch=main)](https://github.com/ddeutils/ddeutil-workflow/actions/workflows/tests.yml)
40
+ [![python support version](https://img.shields.io/pypi/pyversions/ddeutil-workflow)](https://pypi.org/project/ddeutil-workflow/)
41
+ [![size](https://img.shields.io/github/languages/code-size/ddeutils/ddeutil-workflow)](https://github.com/ddeutils/ddeutil-workflow)
42
+
43
+ **Table of Contents**:
44
+
45
+ - [Installation](#installation)
46
+ - [Getting Started](#getting-started)
47
+ - [Connection](#connection)
48
+ - [Dataset](#dataset)
49
+ - [Schedule](#schedule)
50
+ - [Examples](#examples)
51
+ - [Python](#python)
52
+ - [Tasks (EL)](#tasks-extract--load)
53
+ - [Hooks (T)](#hooks-transform)
54
+
55
+ This **Utility Workflow** objects was created for easy to make a simple metadata
56
+ driven pipeline that able to **ETL, T, EL, or ELT** by `.yaml` file.
57
+
58
+ I think we should not create the multiple pipeline per use-case if we able to
59
+ write some dynamic pipeline that just change the input parameters per use-case
60
+ instead. This way we can handle a lot of pipelines in our orgs with metadata only.
61
+ It called **Metadata Driven**.
62
+
63
+ Next, we should get some monitoring tools for manage logging that return from
64
+ pipeline running. Because it not show us what is a use-case that running data
65
+ pipeline.
66
+
67
+ > [!NOTE]
68
+ > _Disclaimer_: I inspire the dynamic statement from the GitHub Action `.yml` files
69
+ > and all of config file from several data orchestration framework tools from my
70
+ > experience on Data Engineer.
71
+
72
+ ## Installation
73
+
74
+ ```shell
75
+ pip install ddeutil-workflow
76
+ ```
77
+
78
+ This project need `ddeutil-io`, `ddeutil-model` extension namespace packages.
79
+
80
+ ## Getting Started
81
+
82
+ The first step, you should start create the connections and datasets for In and
83
+ Out of you data that want to use in pipeline of workflow. Some of this component
84
+ is similar component of the **Airflow** because I like it concepts.
85
+
86
+ ### Connection
87
+
88
+ The connection for worker able to do any thing.
89
+
90
+ ```yaml
91
+ conn_postgres_data:
92
+ type: conn.Postgres
93
+ url: 'postgres//username:${ENV_PASS}@hostname:port/database?echo=True&time_out=10'
94
+ ```
95
+
96
+ ```python
97
+ from ddeutil.workflow.conn import Conn
98
+
99
+ conn = Conn.from_loader(name='conn_postgres_data', externals={})
100
+ assert conn.ping()
101
+ ```
102
+
103
+ ### Dataset
104
+
105
+ The dataset is define any objects on the connection.
106
+
107
+ ```yaml
108
+ ds_postgres_customer_tbl:
109
+ type: dataset.PostgresTbl
110
+ conn: 'conn_postgres_data'
111
+ features:
112
+ id: serial primary key
113
+ name: varchar( 100 ) not null
114
+ ```
115
+
116
+ ```python
117
+ from ddeutil.workflow.dataset import PostgresTbl
118
+
119
+ dataset = PostgresTbl.from_loader(name='ds_postgres_customer_tbl', externals={})
120
+ assert dataset.exists()
121
+ ```
122
+
123
+ ### Schedule
124
+
125
+ ```yaml
126
+ schd_for_node:
127
+ type: schedule.Scdl
128
+ cron: "*/5 * * * *"
129
+ ```
130
+
131
+ ```python
132
+ from ddeutil.workflow.schedule import Scdl
133
+
134
+ scdl = Scdl.from_loader(name='schd_for_node', externals={})
135
+ assert '*/5 * * * *' == str(scdl.cronjob)
136
+
137
+ cron_iterate = scdl.generate('2022-01-01 00:00:00')
138
+ assert '2022-01-01 00:05:00' f"{cron_iterate.next:%Y-%m-%d %H:%M:%S}"
139
+ assert '2022-01-01 00:10:00' f"{cron_iterate.next:%Y-%m-%d %H:%M:%S}"
140
+ assert '2022-01-01 00:15:00' f"{cron_iterate.next:%Y-%m-%d %H:%M:%S}"
141
+ assert '2022-01-01 00:20:00' f"{cron_iterate.next:%Y-%m-%d %H:%M:%S}"
142
+ assert '2022-01-01 00:25:00' f"{cron_iterate.next:%Y-%m-%d %H:%M:%S}"
143
+ ```
144
+
145
+ ## Examples
146
+
147
+ This is examples that use workflow file for running common Data Engineering
148
+ use-case.
149
+
150
+ ### Python
151
+
152
+ The state of doing lists that worker should to do. It be collection of the stage.
153
+
154
+ ```yaml
155
+ run_py_local:
156
+ type: ddeutil.workflow.pipe.Pipeline
157
+ params:
158
+ author-run: utils.receive.string
159
+ run-date: utils.receive.datetime
160
+ jobs:
161
+ first-job:
162
+ stages:
163
+ - name: Printing Information
164
+ id: define-func
165
+ run: |
166
+ x = '${{ params.author-run }}'
167
+ print(f'Hello {x}')
168
+
169
+ def echo(name: str):
170
+ print(f'Hello {name}')
171
+
172
+ - name: Run Sequence and use var from Above
173
+ vars:
174
+ x: ${{ params.author-run }}
175
+ run: |
176
+ print(f'Receive x from above with {x}')
177
+ # Change x value
178
+ x: int = 1
179
+
180
+ - name: Call Function
181
+ vars:
182
+ echo: ${{ stages.define-func.outputs.echo }}
183
+ run: |
184
+ echo('Caller')
185
+ ```
186
+
187
+ ```python
188
+ from ddeutil.workflow.pipeline import Pipeline
189
+
190
+ pipe = Pipeline.from_loader(name='run_py_local', externals={})
191
+ pipe.execute(params={'author-run': 'Local Workflow', 'run-date': '2024-01-01'})
192
+ ```
193
+
194
+ ```shell
195
+ > Hello Local Workflow
196
+ > Receive x from above with Local Workflow
197
+ > Hello Caller
198
+ ```
199
+
200
+ ### Tasks (Extract & Load)
201
+
202
+ ```yaml
203
+ pipe_el_pg_to_lake:
204
+ type: ddeutil.workflow.pipe.Pipeline
205
+ params:
206
+ run-date: utils.receive.datetime
207
+ author-email: utils.receive.string
208
+ jobs:
209
+ extract-load:
210
+ stages:
211
+ - name: "Extract Load from Postgres to Lake"
212
+ id: extract
213
+ task: tasks/postgres-to-delta@polars
214
+ with:
215
+ source:
216
+ conn: conn_postgres_url
217
+ query: |
218
+ select * from ${{ params.name }}
219
+ where update_date = '${{ params.datetime }}'
220
+ sink:
221
+ conn: conn_az_lake
222
+ endpoint: "/${{ params.name }}"
223
+ ```
224
+
225
+ ### Hooks (Transform)
226
+
227
+ ```yaml
228
+ pipe_hook_mssql_proc:
229
+ type: ddeutil.workflow.pipe.Pipeline
230
+ params:
231
+ run_date: utils.receive.datetime
232
+ sp_name: utils.receive.string
233
+ source_name: utils.receive.string
234
+ target_name: utils.receive.string
235
+ jobs:
236
+ transform:
237
+ stages:
238
+ - name: "Transform Data in MS SQL Server"
239
+ hook: hooks/mssql-proc@odbc
240
+ with:
241
+ exec: ${{ params.sp_name }}
242
+ params:
243
+ run_mode: "T"
244
+ run_date: ${{ params.run_date }}
245
+ source: ${{ params.source_name }}
246
+ target: ${{ params.target_name }}
247
+ ```
248
+
249
+ ## License
250
+
251
+ This project was licensed under the terms of the [MIT license](LICENSE).