ddeutil-workflow 0.0.3__py3-none-any.whl → 0.0.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ddeutil/workflow/__about__.py +1 -1
- ddeutil/workflow/{vendors/__schedule.py → __scheduler.py} +153 -135
- ddeutil/workflow/loader.py +9 -1
- ddeutil/workflow/on.py +143 -0
- ddeutil/workflow/pipeline.py +102 -46
- ddeutil/workflow/tasks/__init__.py +1 -1
- ddeutil/workflow/tasks/dummy.py +52 -0
- ddeutil/workflow/utils.py +33 -5
- {ddeutil_workflow-0.0.3.dist-info → ddeutil_workflow-0.0.5.dist-info}/METADATA +57 -58
- ddeutil_workflow-0.0.5.dist-info/RECORD +17 -0
- {ddeutil_workflow-0.0.3.dist-info → ddeutil_workflow-0.0.5.dist-info}/WHEEL +1 -1
- ddeutil/workflow/conn.py +0 -240
- ddeutil/workflow/schedule.py +0 -82
- ddeutil/workflow/tasks/_pandas.py +0 -54
- ddeutil/workflow/tasks/_polars.py +0 -92
- ddeutil/workflow/vendors/__dataset.py +0 -127
- ddeutil/workflow/vendors/__dict.py +0 -333
- ddeutil/workflow/vendors/__init__.py +0 -0
- ddeutil/workflow/vendors/aws.py +0 -185
- ddeutil/workflow/vendors/az.py +0 -0
- ddeutil/workflow/vendors/minio.py +0 -11
- ddeutil/workflow/vendors/pd.py +0 -13
- ddeutil/workflow/vendors/pg.py +0 -11
- ddeutil/workflow/vendors/pl.py +0 -172
- ddeutil/workflow/vendors/sftp.py +0 -209
- ddeutil_workflow-0.0.3.dist-info/RECORD +0 -29
- {ddeutil_workflow-0.0.3.dist-info → ddeutil_workflow-0.0.5.dist-info}/LICENSE +0 -0
- {ddeutil_workflow-0.0.3.dist-info → ddeutil_workflow-0.0.5.dist-info}/top_level.txt +0 -0
ddeutil/workflow/vendors/aws.py
DELETED
@@ -1,185 +0,0 @@
|
|
1
|
-
from typing import Any, Optional
|
2
|
-
|
3
|
-
try:
|
4
|
-
import boto3
|
5
|
-
import botocore.exceptions
|
6
|
-
except ImportError:
|
7
|
-
raise ImportError(
|
8
|
-
"Please install boto3 package if want to use boto wrapped object.\n\t\t"
|
9
|
-
"$ pip install boto3"
|
10
|
-
) from None
|
11
|
-
|
12
|
-
|
13
|
-
class WrapBoto3:
|
14
|
-
"""Difference in boto3 between resource, client, and session
|
15
|
-
docs: https://stackoverflow.com/questions/42809096/
|
16
|
-
difference-in-boto3-between-resource-client-and-session
|
17
|
-
|
18
|
-
.. config::
|
19
|
-
|
20
|
-
~/.aws/credentials
|
21
|
-
|
22
|
-
[my-user]
|
23
|
-
aws_access_key_id = AKIAxxx
|
24
|
-
aws_secret_access_key = xxx
|
25
|
-
|
26
|
-
[my-role]
|
27
|
-
source_profile = my-user
|
28
|
-
role_arn = arn:aws:iam::123456789012:role/the-role
|
29
|
-
|
30
|
-
~/.aws/config
|
31
|
-
|
32
|
-
[profile my-role]
|
33
|
-
region = ap-southeast-2
|
34
|
-
"""
|
35
|
-
|
36
|
-
def __init__(
|
37
|
-
self,
|
38
|
-
access_key_id: str,
|
39
|
-
secret_access_key: str,
|
40
|
-
region_name: Optional[str] = None,
|
41
|
-
*,
|
42
|
-
role_session_name: Optional[str] = None,
|
43
|
-
role_arn: Optional[str] = None,
|
44
|
-
mfa_serial: Optional[str] = None,
|
45
|
-
):
|
46
|
-
self.access_key_id = access_key_id
|
47
|
-
self.secret_access_key = secret_access_key
|
48
|
-
self.region_name: str = region_name or "ap-southeast-1"
|
49
|
-
|
50
|
-
# Optional for session.
|
51
|
-
self.role_session_name: str = role_session_name or "AssumeRoleSession"
|
52
|
-
self.role_arn = role_arn
|
53
|
-
self.mfa_serial = mfa_serial
|
54
|
-
|
55
|
-
# Create credential
|
56
|
-
self.cred = self.make_cred()
|
57
|
-
|
58
|
-
def make_cred(self) -> dict[str, str]:
|
59
|
-
if self.role_arn is None:
|
60
|
-
return {
|
61
|
-
"AccessKeyId": self.access_key_id,
|
62
|
-
"SecretAccessKey": self.secret_access_key,
|
63
|
-
}
|
64
|
-
# NOTE: A low-level client representing AWS Security Token Service (STS)
|
65
|
-
# >>> sess = boto3.session.Session(
|
66
|
-
# ... aws_access_key_id=ARN_ACCESS_KEY,
|
67
|
-
# ... aws_secret_access_key=ARN_SECRET_KEY
|
68
|
-
# ... )
|
69
|
-
# >>> sts_client = sess.client('sts')
|
70
|
-
sts_client = boto3.client(
|
71
|
-
service_name="sts",
|
72
|
-
region_name=self.region_name,
|
73
|
-
aws_access_key_id=self.access_key_id,
|
74
|
-
aws_secret_access_key=self.secret_access_key,
|
75
|
-
)
|
76
|
-
mfa_optional: dict[str, str] = {}
|
77
|
-
if self.mfa_serial:
|
78
|
-
mfa_otp: str = str(input("Enter the MFA code: "))
|
79
|
-
mfa_optional = (
|
80
|
-
{"SerialNumber": self.mfa_serial, "TokenCode": mfa_otp},
|
81
|
-
)
|
82
|
-
assumed_role = sts_client.assume_role(
|
83
|
-
RoleArn=self.role_arn,
|
84
|
-
RoleSessionName=self.role_session_name,
|
85
|
-
DurationSeconds=3600,
|
86
|
-
**mfa_optional,
|
87
|
-
)
|
88
|
-
# NOTE: From the response that contains the assumed role, get the
|
89
|
-
# temporary credentials that can be used to make subsequent API calls
|
90
|
-
return assumed_role["Credentials"]
|
91
|
-
|
92
|
-
@property
|
93
|
-
def session(self):
|
94
|
-
"""Can use by
|
95
|
-
``s3 = self.session.client('s3')``
|
96
|
-
``s3 = self.session.resource('s3')``
|
97
|
-
"""
|
98
|
-
return boto3.session.Session(
|
99
|
-
aws_access_key_id=self.cred["AccessKeyId"],
|
100
|
-
aws_secret_access_key=self.cred["SecretAccessKey"],
|
101
|
-
aws_session_token=self.cred.get("SessionToken"),
|
102
|
-
)
|
103
|
-
|
104
|
-
@property
|
105
|
-
def s3(self):
|
106
|
-
return boto3.client(
|
107
|
-
service_name="s3",
|
108
|
-
region_name=self.region_name,
|
109
|
-
aws_access_key_id=self.cred["AccessKeyId"],
|
110
|
-
aws_secret_access_key=self.cred["SecretAccessKey"],
|
111
|
-
aws_session_token=self.cred.get("SessionToken"),
|
112
|
-
)
|
113
|
-
|
114
|
-
def list_objects(self, bucket: str, prefix: str):
|
115
|
-
objs: list[dict[str, Any]] = []
|
116
|
-
kwargs = {"Bucket": bucket, "Prefix": prefix}
|
117
|
-
while True:
|
118
|
-
resp = self.s3.list_objects_v2(**kwargs)
|
119
|
-
for obj in resp["Contents"]:
|
120
|
-
objs.append(obj)
|
121
|
-
try:
|
122
|
-
kwargs["ContinuationToken"] = resp["NextContinuationToken"]
|
123
|
-
except KeyError:
|
124
|
-
break
|
125
|
-
return objs
|
126
|
-
|
127
|
-
def paginate(
|
128
|
-
self,
|
129
|
-
bucket: str,
|
130
|
-
prefix: str,
|
131
|
-
*,
|
132
|
-
marker: Optional[str] = None,
|
133
|
-
search: Optional[str] = None,
|
134
|
-
):
|
135
|
-
"""
|
136
|
-
.. docs:
|
137
|
-
- https://boto3.amazonaws.com/v1/documentation/api/latest/
|
138
|
-
guide/paginators.html
|
139
|
-
|
140
|
-
.. search::
|
141
|
-
- "Contents[?Size > `100`][]"
|
142
|
-
- "Contents[?contains(LastModified, `'"2022-01-01"'`)]"
|
143
|
-
- "Contents[?LastModified>=`YYYY-MM-DD`].Key"
|
144
|
-
- "DeleteMarkers[?LastModified>=`2020-07-07T00:00:00`
|
145
|
-
&& IsLatest==`true`].[Key,VersionId]"
|
146
|
-
"""
|
147
|
-
paginator = self.s3.get_paginator("list_objects_v2")
|
148
|
-
page_iterator = paginator.paginate(
|
149
|
-
Bucket=bucket,
|
150
|
-
Prefix=prefix,
|
151
|
-
PaginationConfig={
|
152
|
-
# 'MaxItems': 10,
|
153
|
-
"PageSize": 10,
|
154
|
-
"StartingToken": marker,
|
155
|
-
},
|
156
|
-
)
|
157
|
-
|
158
|
-
for page in page_iterator:
|
159
|
-
print("# This is new page")
|
160
|
-
print("Contents Count:", len(page["Contents"]))
|
161
|
-
if "NextContinuationToken" in page.keys():
|
162
|
-
print(page["NextContinuationToken"])
|
163
|
-
|
164
|
-
# filtered_iterator = page_iterator.search("Contents[?Size > `100`][]")
|
165
|
-
# for key_data in filtered_iterator:
|
166
|
-
# print(key_data)
|
167
|
-
|
168
|
-
# page_iterator = paginator.paginate(
|
169
|
-
# Bucket=bucket,
|
170
|
-
# Prefix=prefix,
|
171
|
-
# PaginationConfig={
|
172
|
-
# 'MaxItems': 10,
|
173
|
-
# 'PageSize': 10,
|
174
|
-
# 'StartingToken': marker
|
175
|
-
# }
|
176
|
-
# )
|
177
|
-
|
178
|
-
def exists(self, bucket: str, prefix: str) -> bool:
|
179
|
-
try:
|
180
|
-
self.s3.head_object(Bucket=bucket, Key=prefix)
|
181
|
-
return True
|
182
|
-
except botocore.exceptions.ClientError as err:
|
183
|
-
if err.response["Error"]["Code"]:
|
184
|
-
return False
|
185
|
-
raise
|
ddeutil/workflow/vendors/az.py
DELETED
File without changes
|
ddeutil/workflow/vendors/pd.py
DELETED
ddeutil/workflow/vendors/pg.py
DELETED
@@ -1,11 +0,0 @@
|
|
1
|
-
# ------------------------------------------------------------------------------
|
2
|
-
# Copyright (c) 2022 Korawich Anuttra. All rights reserved.
|
3
|
-
# Licensed under the MIT License. See LICENSE in the project root for
|
4
|
-
# license information.
|
5
|
-
# ------------------------------------------------------------------------------
|
6
|
-
from __future__ import annotations
|
7
|
-
|
8
|
-
from .__dataset import TblDataset
|
9
|
-
|
10
|
-
|
11
|
-
class PostgresTbl(TblDataset): ...
|
ddeutil/workflow/vendors/pl.py
DELETED
@@ -1,172 +0,0 @@
|
|
1
|
-
# ------------------------------------------------------------------------------
|
2
|
-
# Copyright (c) 2022 Korawich Anuttra. All rights reserved.
|
3
|
-
# Licensed under the MIT License. See LICENSE in the project root for
|
4
|
-
# license information.
|
5
|
-
# ------------------------------------------------------------------------------
|
6
|
-
from __future__ import annotations
|
7
|
-
|
8
|
-
from datetime import datetime
|
9
|
-
from typing import Any, Optional
|
10
|
-
|
11
|
-
from fmtutil import Datetime, FormatterGroupType, make_group
|
12
|
-
from pydantic import BaseModel, Field
|
13
|
-
|
14
|
-
try:
|
15
|
-
import polars as pl
|
16
|
-
except ImportError:
|
17
|
-
raise ImportError(
|
18
|
-
"Please install polars package\n\t\t$ pip install polars"
|
19
|
-
) from None
|
20
|
-
|
21
|
-
from ..__types import TupleStr
|
22
|
-
from .__dataset import FlDataFrame, TblDataFrame
|
23
|
-
|
24
|
-
EXCLUDED_EXTRAS: TupleStr = ("type",)
|
25
|
-
OBJ_FMTS: FormatterGroupType = make_group({"datetime": Datetime})
|
26
|
-
|
27
|
-
|
28
|
-
class PolarsCsvArgs(BaseModel):
|
29
|
-
"""CSV file should use format rfc4180 as CSV standard format.
|
30
|
-
|
31
|
-
docs: [RFC4180](https://datatracker.ietf.org/doc/html/rfc4180)
|
32
|
-
"""
|
33
|
-
|
34
|
-
header: bool = True
|
35
|
-
separator: str = ","
|
36
|
-
skip_rows: int = 0
|
37
|
-
encoding: str = "utf-8"
|
38
|
-
|
39
|
-
|
40
|
-
class PolarsCsv(FlDataFrame):
|
41
|
-
extras: PolarsCsvArgs
|
42
|
-
|
43
|
-
def load_options(self) -> dict[str, Any]:
|
44
|
-
return {
|
45
|
-
"has_header": self.extras.header,
|
46
|
-
"separator": self.extras.separator,
|
47
|
-
"skip_rows": self.extras.skip_rows,
|
48
|
-
"encoding": self.extras.encoding,
|
49
|
-
}
|
50
|
-
|
51
|
-
def load(
|
52
|
-
self,
|
53
|
-
_object: str | None = None,
|
54
|
-
options: dict[str, Any] | None = None,
|
55
|
-
*,
|
56
|
-
override: bool = False,
|
57
|
-
) -> pl.DataFrame:
|
58
|
-
"""Load CSV file to Polars DataFrame with ``read_csv`` method."""
|
59
|
-
return pl.read_csv(
|
60
|
-
f"{self.conn.get_spec()}/{_object or self.object}",
|
61
|
-
**(
|
62
|
-
(options or {})
|
63
|
-
if override
|
64
|
-
else (self.load_options() | (options or {}))
|
65
|
-
),
|
66
|
-
)
|
67
|
-
|
68
|
-
def scan(
|
69
|
-
self,
|
70
|
-
_object: str | None = None,
|
71
|
-
options: dict[str, Any] | None = None,
|
72
|
-
) -> pl.LazyFrame:
|
73
|
-
"""Load CSV file to Polars LazyFrame with ``scan_csv`` method."""
|
74
|
-
# FIXME: Save Csv does not support for the fsspec file url.
|
75
|
-
return pl.scan_csv(
|
76
|
-
f"{self.conn.endpoint}/{_object or self.object}",
|
77
|
-
**(self.load_options() | (options or {})),
|
78
|
-
)
|
79
|
-
|
80
|
-
def save_options(self) -> dict[str, Any]:
|
81
|
-
return {
|
82
|
-
"include_header": self.extras.header,
|
83
|
-
"separator": self.extras.separator,
|
84
|
-
}
|
85
|
-
|
86
|
-
def save(
|
87
|
-
self,
|
88
|
-
df: pl.DataFrame,
|
89
|
-
_object: str | None = None,
|
90
|
-
options: dict[str, Any] | None = None,
|
91
|
-
) -> None:
|
92
|
-
"""Save Polars Dataframe to CSV file with ``write_csv`` method."""
|
93
|
-
# FIXME: Save Csv does not support for the fsspec file url.
|
94
|
-
return df.write_csv(
|
95
|
-
f"{self.conn.endpoint}/{_object or self.object}",
|
96
|
-
**(self.save_options() | (options or {})),
|
97
|
-
)
|
98
|
-
|
99
|
-
def sink(
|
100
|
-
self,
|
101
|
-
df: pl.LazyFrame,
|
102
|
-
_object: str | None = None,
|
103
|
-
options: dict[str, Any] | None = None,
|
104
|
-
) -> None:
|
105
|
-
"""Save Polars Dataframe to CSV file with ``sink_csv`` method."""
|
106
|
-
# FIXME: Save Csv does not support for the fsspec file url.
|
107
|
-
return df.sink_csv(
|
108
|
-
f"{self.conn.endpoint}/{_object or self.object}",
|
109
|
-
**(self.save_options() | (options or {})),
|
110
|
-
)
|
111
|
-
|
112
|
-
|
113
|
-
class PolarsJson(FlDataFrame):
|
114
|
-
|
115
|
-
def load(
|
116
|
-
self,
|
117
|
-
_object: str | None = None,
|
118
|
-
options: dict[str, Any] | None = None,
|
119
|
-
*,
|
120
|
-
dt: str | datetime | None = None,
|
121
|
-
):
|
122
|
-
"""Load Json file to Polars Dataframe with ``read_json`` method."""
|
123
|
-
# FIXME: Load Json does not support for the fsspec file url.
|
124
|
-
return pl.read_json(
|
125
|
-
f"{self.conn.endpoint}/"
|
126
|
-
f"{self.format_object(_object or self.object, dt=dt)}",
|
127
|
-
**(options or {}),
|
128
|
-
)
|
129
|
-
|
130
|
-
def save(
|
131
|
-
self,
|
132
|
-
df: pl.DataFrame,
|
133
|
-
_object: str | None = None,
|
134
|
-
options: dict[str, Any] | None = None,
|
135
|
-
): ...
|
136
|
-
|
137
|
-
|
138
|
-
class PolarsNdJson(FlDataFrame): ...
|
139
|
-
|
140
|
-
|
141
|
-
class PolarsParqArgs(BaseModel):
|
142
|
-
compression: Optional[str] = None
|
143
|
-
use_pyarrow: bool = False
|
144
|
-
pyarrow_options: dict[str, Any] = Field(default_factory=dict)
|
145
|
-
|
146
|
-
|
147
|
-
class PolarsParq(FlDataFrame):
|
148
|
-
extras: PolarsParqArgs
|
149
|
-
|
150
|
-
def save_options(self):
|
151
|
-
excluded: list[str] = []
|
152
|
-
if not self.extras.pyarrow_options:
|
153
|
-
excluded.append("pyarrow_options")
|
154
|
-
return self.extras.model_dump(exclude=excluded)
|
155
|
-
|
156
|
-
def save(
|
157
|
-
self,
|
158
|
-
df: pl.DataFrame,
|
159
|
-
_object: str | None = None,
|
160
|
-
options: dict[str, Any] | None = None,
|
161
|
-
):
|
162
|
-
print(
|
163
|
-
f"Start write parquet to "
|
164
|
-
f"{self.conn.endpoint}/{_object or self.object}"
|
165
|
-
)
|
166
|
-
return df.write_parquet(
|
167
|
-
f"{self.conn.endpoint}/{_object or self.object}",
|
168
|
-
**(self.save_options() | (options or {})),
|
169
|
-
)
|
170
|
-
|
171
|
-
|
172
|
-
class PolarsPostgres(TblDataFrame): ...
|
ddeutil/workflow/vendors/sftp.py
DELETED
@@ -1,209 +0,0 @@
|
|
1
|
-
import contextlib
|
2
|
-
from collections import deque
|
3
|
-
from collections.abc import Generator, Iterator
|
4
|
-
from ftplib import FTP
|
5
|
-
from stat import S_ISDIR, S_ISREG
|
6
|
-
from typing import Optional
|
7
|
-
|
8
|
-
try:
|
9
|
-
import paramiko
|
10
|
-
from paramiko import SFTPAttributes, SFTPClient
|
11
|
-
from sshtunnel import BaseSSHTunnelForwarderError, SSHTunnelForwarder
|
12
|
-
except ImportError:
|
13
|
-
raise ImportError(
|
14
|
-
"Please install paramiko and sshtunnel packages before using,\n\t\t"
|
15
|
-
"$ pip install paramiko sshtunnel"
|
16
|
-
) from None
|
17
|
-
|
18
|
-
|
19
|
-
class WrapFTP:
|
20
|
-
"""Wrapped FTP Client.
|
21
|
-
|
22
|
-
FTP (File Transfer Protocol) is the standard for transferring files
|
23
|
-
between the Client and Server is one of the most popular formats.
|
24
|
-
|
25
|
-
But there is a disadvantage that the data sent and received is not
|
26
|
-
encrypted. There is an opportunity for a third party to read the information
|
27
|
-
along the transferring.
|
28
|
-
"""
|
29
|
-
|
30
|
-
def __init__(
|
31
|
-
self,
|
32
|
-
host: str,
|
33
|
-
user: str,
|
34
|
-
pwd: str,
|
35
|
-
port: int = 21,
|
36
|
-
):
|
37
|
-
self.host: str = host
|
38
|
-
self.port: int = port
|
39
|
-
self.user: str = user
|
40
|
-
self.pwd: str = pwd
|
41
|
-
|
42
|
-
def fpt_connect(self):
|
43
|
-
return FTP(
|
44
|
-
host=self.host,
|
45
|
-
user=self.user,
|
46
|
-
passwd=self.pwd,
|
47
|
-
)
|
48
|
-
|
49
|
-
|
50
|
-
class WrapSFTP:
|
51
|
-
"""Wrapped SFTP Client.
|
52
|
-
|
53
|
-
SFTP (Secure File Transfer Protocol) it is a standard that helps
|
54
|
-
increase security in file transfers. By encrypting data and commands before
|
55
|
-
transferring files between the client and server with SSH (Secure Shell),
|
56
|
-
we can be confident that the data we upload/download can be done safely.
|
57
|
-
|
58
|
-
It cannot be accessed by third parties or if the information is
|
59
|
-
obtained, it is encrypted and cannot be read.
|
60
|
-
|
61
|
-
See-Also:
|
62
|
-
|
63
|
-
This object will wrap the [Paramiko](https://www.paramiko.org/) package
|
64
|
-
with my connection interface.
|
65
|
-
"""
|
66
|
-
|
67
|
-
def __init__(
|
68
|
-
self,
|
69
|
-
host: str,
|
70
|
-
user: Optional[str] = None,
|
71
|
-
port: Optional[int] = None,
|
72
|
-
*,
|
73
|
-
pwd: Optional[str] = None,
|
74
|
-
private_key: Optional[str] = None,
|
75
|
-
private_key_password: Optional[str] = None,
|
76
|
-
) -> None:
|
77
|
-
self.host: str = host
|
78
|
-
self.user: str = user or ""
|
79
|
-
self.port: int = port or 22
|
80
|
-
self.pwd: Optional[str] = pwd
|
81
|
-
|
82
|
-
# Private key path like, ``/home/user/.ssh/id_rsa``.
|
83
|
-
self.private_key = private_key
|
84
|
-
|
85
|
-
# If this private key have password, private_key passphrase.
|
86
|
-
self.private_key_pwd = private_key_password
|
87
|
-
|
88
|
-
def get(self, remote_path, local_path):
|
89
|
-
with self.transport_client() as sftp:
|
90
|
-
sftp.get(remote_path, local_path)
|
91
|
-
|
92
|
-
def put(self, remote_path, local_path):
|
93
|
-
with self.transport_client() as sftp:
|
94
|
-
sftp.put(remote_path, local_path)
|
95
|
-
|
96
|
-
def rm(self, remote_path: str):
|
97
|
-
with self.transport_client() as sftp:
|
98
|
-
sftp.remove(remote_path)
|
99
|
-
|
100
|
-
def mkdir(self, remote_path: str):
|
101
|
-
with self.transport_client() as sftp:
|
102
|
-
sftp.mkdir(remote_path)
|
103
|
-
|
104
|
-
@contextlib.contextmanager
|
105
|
-
def ssh_tunnel(self) -> Iterator:
|
106
|
-
try:
|
107
|
-
with SSHTunnelForwarder(
|
108
|
-
(self.host, self.port),
|
109
|
-
ssh_username=self.user,
|
110
|
-
ssh_password=self.pwd,
|
111
|
-
ssh_pkey=self.private_key,
|
112
|
-
ssh_private_key_password=self.private_key_pwd,
|
113
|
-
local_bind_address=("0.0.0.0", 22),
|
114
|
-
# Use a suitable remote_bind_address that able to be DB host on
|
115
|
-
# that SSH Server.
|
116
|
-
remote_bind_address=("127.0.0.1", self.port),
|
117
|
-
) as tunnel:
|
118
|
-
tunnel.check_tunnels()
|
119
|
-
client = paramiko.SSHClient()
|
120
|
-
if self.private_key:
|
121
|
-
client.load_system_host_keys()
|
122
|
-
# NOTE: Add SSH key to known_hosts file.
|
123
|
-
client.set_missing_host_key_policy(
|
124
|
-
paramiko.MissingHostKeyPolicy()
|
125
|
-
)
|
126
|
-
|
127
|
-
# NOTE: Start connect to SSH Server
|
128
|
-
client.connect(
|
129
|
-
"127.0.0.1",
|
130
|
-
port=tunnel.local_bind_port,
|
131
|
-
**(
|
132
|
-
{
|
133
|
-
"username": self.user,
|
134
|
-
"password": self.pwd,
|
135
|
-
"allow_agent": False,
|
136
|
-
"look_for_keys": False,
|
137
|
-
"banner_timeout": 20,
|
138
|
-
}
|
139
|
-
if self.pwd
|
140
|
-
else {}
|
141
|
-
),
|
142
|
-
)
|
143
|
-
with client.open_sftp() as sftp:
|
144
|
-
yield sftp
|
145
|
-
client.close()
|
146
|
-
except BaseSSHTunnelForwarderError as err:
|
147
|
-
raise ValueError(
|
148
|
-
"This config data does not connect to the Server"
|
149
|
-
) from err
|
150
|
-
|
151
|
-
@contextlib.contextmanager
|
152
|
-
def transport_client(self) -> Generator[SFTPClient, None, None]:
|
153
|
-
with paramiko.Transport(sock=(self.host, self.port)) as transport:
|
154
|
-
transport.connect(
|
155
|
-
hostkey=None,
|
156
|
-
username=self.user,
|
157
|
-
password=self.pwd,
|
158
|
-
)
|
159
|
-
with paramiko.SFTPClient.from_transport(transport) as sftp:
|
160
|
-
yield sftp
|
161
|
-
|
162
|
-
@contextlib.contextmanager
|
163
|
-
def simple_client(self) -> Generator[SFTPClient, None, None]:
|
164
|
-
client = paramiko.SSHClient()
|
165
|
-
client.set_missing_host_key_policy(paramiko.MissingHostKeyPolicy())
|
166
|
-
client.connect(
|
167
|
-
hostname=self.host,
|
168
|
-
port=self.port,
|
169
|
-
username=self.user,
|
170
|
-
password=self.pwd,
|
171
|
-
)
|
172
|
-
with client.open_sftp() as sftp:
|
173
|
-
yield sftp
|
174
|
-
client.close()
|
175
|
-
|
176
|
-
def glob(self, pattern: str) -> Iterator[str]:
|
177
|
-
with self.transport_client() as sftp:
|
178
|
-
try:
|
179
|
-
# NOTE: List files matching the pattern on the SFTP server
|
180
|
-
f: SFTPAttributes
|
181
|
-
for f in sftp.listdir_attr(pattern):
|
182
|
-
yield pattern + f.filename
|
183
|
-
except FileNotFoundError:
|
184
|
-
raise FileNotFoundError(
|
185
|
-
f"Pattern {pattern!r} does not found on SFTP server"
|
186
|
-
) from None
|
187
|
-
|
188
|
-
def walk(self, pattern: str) -> Iterator[str]:
|
189
|
-
dirs: deque = deque([pattern])
|
190
|
-
with self.transport_client() as sftp:
|
191
|
-
while len(dirs) > 0:
|
192
|
-
d: str = dirs.popleft()
|
193
|
-
f: SFTPAttributes
|
194
|
-
for f in sftp.listdir_attr(d):
|
195
|
-
rs: str = (
|
196
|
-
(d + f.filename) if d == "/" else (d + "/" + f.filename)
|
197
|
-
)
|
198
|
-
if S_ISDIR(f.st_mode):
|
199
|
-
dirs.append(rs)
|
200
|
-
elif S_ISREG(f.st_mode):
|
201
|
-
yield rs
|
202
|
-
|
203
|
-
@staticmethod
|
204
|
-
def isdir(path: SFTPAttributes):
|
205
|
-
try:
|
206
|
-
return S_ISDIR(path.st_mode)
|
207
|
-
except OSError:
|
208
|
-
# NOTE: Path does not exist, so by definition not a directory
|
209
|
-
return False
|
@@ -1,29 +0,0 @@
|
|
1
|
-
ddeutil/workflow/__about__.py,sha256=smA9c0CTLewINRoxj2VBHoiYDESoFGtXYFDvRT31dgs,27
|
2
|
-
ddeutil/workflow/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
3
|
-
ddeutil/workflow/__regex.py,sha256=bOngaQ0zJgy3vfNwF2MlI8XhLu_Ei1Vz8y50iLj8ao4,1061
|
4
|
-
ddeutil/workflow/__types.py,sha256=AkpQq6QlrclpurCZZVY9RMxoyS9z2WGzhaz_ikeTaCU,453
|
5
|
-
ddeutil/workflow/conn.py,sha256=POtNcyqFNGxZnkg5J_H1OIvQVnnqG-ajmBBzjoHl9sg,7238
|
6
|
-
ddeutil/workflow/exceptions.py,sha256=XAq82VHSMLNb4UjGatp7hYfjxFtMiKFtBqJyAhwTl-s,434
|
7
|
-
ddeutil/workflow/loader.py,sha256=TXS4k2dqNycBYSTYcJ80WIsPMKNZbHNeBbcufX6lrJc,5483
|
8
|
-
ddeutil/workflow/pipeline.py,sha256=fG6ta-SNx4OWS6n8w7YpYDadfnbqayj8A1uY03TvLUA,16942
|
9
|
-
ddeutil/workflow/schedule.py,sha256=RMbTC7L32D3fJ5gYxJDCn-vPr2RYEBMSD0G2kj1Qows,2712
|
10
|
-
ddeutil/workflow/utils.py,sha256=z7evB9kOsgTr30uVuL994bmOMDNZB5xDY2KjO7gL1dc,5379
|
11
|
-
ddeutil/workflow/tasks/__init__.py,sha256=TIcw9JinrdepWgyazSMLk_QflUFms99ILI4GvLHUGD0,338
|
12
|
-
ddeutil/workflow/tasks/_pandas.py,sha256=rqz5_VMSqkEdirk7i3EElZoqnRYFyyK_Z8_Zt8FyeTg,1693
|
13
|
-
ddeutil/workflow/tasks/_polars.py,sha256=SYEBx-0I9tbY046QGSMokVugK8Fqjhiw4dzpL6y6Hww,2917
|
14
|
-
ddeutil/workflow/vendors/__dataset.py,sha256=n9EwTIoVlgCKyCQQgQrijeoQgauOoKtdKiWCzSIErns,4065
|
15
|
-
ddeutil/workflow/vendors/__dict.py,sha256=ETwkeA0qzKNgedfeRgAz1qShNXTIXIS4DXzJB4lM4jo,9962
|
16
|
-
ddeutil/workflow/vendors/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
17
|
-
ddeutil/workflow/vendors/__schedule.py,sha256=cUIwtTli9G0klrNFxAIbG3VTiv6umRuNAZiKA-kOtpE,20690
|
18
|
-
ddeutil/workflow/vendors/aws.py,sha256=zjq_LCu3ffVBRrxS2vqss9X24yrtuAEt9ouy2_WvS0o,5980
|
19
|
-
ddeutil/workflow/vendors/az.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
20
|
-
ddeutil/workflow/vendors/minio.py,sha256=pScLy38Du9moOrGaSBSFsoQRhiQ686FQyloOeLA0OQk,261
|
21
|
-
ddeutil/workflow/vendors/pd.py,sha256=J6Nkb4RqUnz3NMfo3cHX-Udw3HPjqjUimojS86rR4os,116
|
22
|
-
ddeutil/workflow/vendors/pg.py,sha256=TGwkV6nsarGLbiRTT_wB4uAy3xCR89EPPCMWqlWhFe8,422
|
23
|
-
ddeutil/workflow/vendors/pl.py,sha256=B-l9zcZ9vATAKVMLv5tjKiWo5Qt8ZIv_aQzuVFinKbY,5087
|
24
|
-
ddeutil/workflow/vendors/sftp.py,sha256=lQn4mnHhgvE9g1pbpoQF7HvZOxab8Z2XaDtSIJvumGM,7090
|
25
|
-
ddeutil_workflow-0.0.3.dist-info/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
|
26
|
-
ddeutil_workflow-0.0.3.dist-info/METADATA,sha256=KcsTd-FjufMK-4fhiIq27yeQUuA7NeB8TCkbXADQ1Dc,7992
|
27
|
-
ddeutil_workflow-0.0.3.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
|
28
|
-
ddeutil_workflow-0.0.3.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
|
29
|
-
ddeutil_workflow-0.0.3.dist-info/RECORD,,
|
File without changes
|
File without changes
|