ddeutil-workflow 0.0.2__py3-none-any.whl → 0.0.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -18,8 +18,8 @@ from .__types import DictData
18
18
  from .loader import Loader
19
19
 
20
20
 
21
- class BaseScdl(BaseModel):
22
- """Base Scdl (Schedule) Model"""
21
+ class BaseSchedule(BaseModel):
22
+ """Base Schedule (Schedule) Model"""
23
23
 
24
24
  model_config = ConfigDict(arbitrary_types_allowed=True)
25
25
 
@@ -61,16 +61,16 @@ class BaseScdl(BaseModel):
61
61
  return self.cronjob.schedule(date=(start.astimezone(ZoneInfo(self.tz))))
62
62
 
63
63
 
64
- class Scdl(BaseScdl):
65
- """Scdl (Schedule) Model.
64
+ class Schedule(BaseSchedule):
65
+ """Schedule (Schedule) Model.
66
66
 
67
67
  See Also:
68
68
  * ``generate()`` is the main usecase of this schedule object.
69
69
  """
70
70
 
71
71
 
72
- class ScdlBkk(Scdl):
73
- """Asia Bangkok Scdl (Schedule) timezone Model.
72
+ class ScheduleBkk(Schedule):
73
+ """Asia Bangkok Schedule (Schedule) timezone Model.
74
74
 
75
75
  This model use for change timezone from utc to Asia/Bangkok
76
76
  """
@@ -78,5 +78,5 @@ class ScdlBkk(Scdl):
78
78
  tz: Annotated[str, Field(description="Timezone")] = "Asia/Bangkok"
79
79
 
80
80
 
81
- class AwsScdl(BaseScdl):
81
+ class AwsSchedule(BaseSchedule):
82
82
  """Implement Schedule for AWS Service."""
@@ -4,7 +4,7 @@ import math
4
4
  try:
5
5
  import pandas as pd
6
6
 
7
- logging.debug(f"Polars version: {pd.__version__}")
7
+ logging.debug(f"Pandas version: {pd.__version__}")
8
8
  except ImportError as err:
9
9
  raise ImportError(
10
10
  "``split_iterable`` function want to use pandas package that does"
@@ -5,13 +5,21 @@
5
5
  # ------------------------------------------------------------------------------
6
6
  from __future__ import annotations
7
7
 
8
+ import logging
8
9
  from typing import Any
9
10
  from uuid import uuid4
10
11
 
11
- import polars as pl
12
+ try:
13
+ import polars as pl
14
+
15
+ logging.debug(f"Polars version: {pl.__version__}")
16
+ except ImportError:
17
+ raise ImportError(
18
+ "Please install polars if you want to use any relate task"
19
+ ) from None
12
20
  import pyarrow.parquet as pq
13
- from ddeutil.workflow.dataset import PolarsCsv, PolarsParq
14
21
  from ddeutil.workflow.utils import tag
22
+ from ddeutil.workflow.vendors.pl import PolarsCsv, PolarsParq
15
23
 
16
24
 
17
25
  def polars_dtype():
ddeutil/workflow/utils.py CHANGED
@@ -6,11 +6,17 @@
6
6
  from __future__ import annotations
7
7
 
8
8
  import inspect
9
+ from abc import ABC, abstractmethod
10
+ from datetime import date, datetime
9
11
  from functools import wraps
10
12
  from importlib import import_module
11
- from typing import Callable, Protocol
13
+ from typing import Any, Callable, Literal, Optional, Protocol, Union
12
14
 
13
15
  from ddeutil.core import lazy
16
+ from ddeutil.io.models.lineage import dt_now
17
+ from pydantic import BaseModel, Field
18
+ from pydantic.functional_validators import model_validator
19
+ from typing_extensions import Self
14
20
 
15
21
 
16
22
  class TagFunc(Protocol):
@@ -63,3 +69,112 @@ def make_registry(module: str) -> dict[str, dict[str, Callable[[], TagFunc]]]:
63
69
  # NOTE: Create new register name if it not exists
64
70
  rs[func.name] = {func.tag: lazy(f"{module}.{fstr}")}
65
71
  return rs
72
+
73
+
74
+ class BaseParams(BaseModel, ABC):
75
+ """Base Parameter that use to make Params Model."""
76
+
77
+ desc: Optional[str] = None
78
+ required: bool = True
79
+ type: str
80
+
81
+ @abstractmethod
82
+ def receive(self, value: Optional[Any] = None) -> Any:
83
+ raise ValueError(
84
+ "Receive value and validate typing before return valid value."
85
+ )
86
+
87
+
88
+ class DefaultParams(BaseParams):
89
+ """Default Parameter that will check default if it required"""
90
+
91
+ default: Optional[str] = None
92
+
93
+ @abstractmethod
94
+ def receive(self, value: Optional[Any] = None) -> Any:
95
+ raise ValueError(
96
+ "Receive value and validate typing before return valid value."
97
+ )
98
+
99
+ @model_validator(mode="after")
100
+ def check_default(self) -> Self:
101
+ if not self.required and self.default is None:
102
+ raise ValueError(
103
+ "Default should set when this parameter does not required."
104
+ )
105
+ return self
106
+
107
+
108
+ class DatetimeParams(DefaultParams):
109
+ """Datetime parameter."""
110
+
111
+ type: Literal["datetime"] = "datetime"
112
+ required: bool = False
113
+ default: datetime = Field(default_factory=dt_now)
114
+
115
+ def receive(self, value: str | datetime | date | None = None) -> datetime:
116
+ if value is None:
117
+ return self.default
118
+
119
+ if isinstance(value, datetime):
120
+ return value
121
+ elif isinstance(value, date):
122
+ return datetime(value.year, value.month, value.day)
123
+ elif not isinstance(value, str):
124
+ raise ValueError(
125
+ f"Value that want to convert to datetime does not support for "
126
+ f"type: {type(value)}"
127
+ )
128
+ return datetime.fromisoformat(value)
129
+
130
+
131
+ class StrParams(DefaultParams):
132
+ """String parameter."""
133
+
134
+ type: Literal["str"] = "str"
135
+
136
+ def receive(self, value: Optional[str] = None) -> str | None:
137
+ if value is None:
138
+ return self.default
139
+ return str(value)
140
+
141
+
142
+ class IntParams(DefaultParams):
143
+ """Integer parameter."""
144
+
145
+ type: Literal["int"] = "int"
146
+
147
+ def receive(self, value: Optional[int] = None) -> int | None:
148
+ if value is None:
149
+ return self.default
150
+ if not isinstance(value, int):
151
+ try:
152
+ return int(str(value))
153
+ except TypeError as err:
154
+ raise ValueError(
155
+ f"Value that want to convert to integer does not support "
156
+ f"for type: {type(value)}"
157
+ ) from err
158
+ return value
159
+
160
+
161
+ class ChoiceParams(BaseParams):
162
+ type: Literal["choice"] = "choice"
163
+ options: list[str]
164
+
165
+ def receive(self, value: Optional[str] = None) -> str:
166
+ """Receive value that match with options."""
167
+ # NOTE:
168
+ # Return the first value in options if does not pass any input value
169
+ if value is None:
170
+ return self.options[0]
171
+ if any(value not in self.options):
172
+ raise ValueError(f"{value} does not match any value in options")
173
+ return value
174
+
175
+
176
+ Params = Union[
177
+ ChoiceParams,
178
+ DatetimeParams,
179
+ StrParams,
180
+ ]
@@ -0,0 +1,127 @@
1
+ # ------------------------------------------------------------------------------
2
+ # Copyright (c) 2022 Korawich Anuttra. All rights reserved.
3
+ # Licensed under the MIT License. See LICENSE in the project root for
4
+ # license information.
5
+ # ------------------------------------------------------------------------------
6
+ from __future__ import annotations
7
+
8
+ from datetime import datetime
9
+ from typing import Annotated, Any, Optional
10
+
11
+ from fmtutil import Datetime, FormatterGroupType, make_group
12
+ from fmtutil.utils import escape_fmt_group
13
+ from pydantic import BaseModel, Field
14
+ from typing_extensions import Self
15
+
16
+ from ..__types import DictData, TupleStr
17
+ from ..conn import SubclassConn
18
+ from ..loader import Loader
19
+
20
+ EXCLUDED_EXTRAS: TupleStr = ("type",)
21
+ OBJ_FMTS: FormatterGroupType = make_group({"datetime": Datetime})
22
+
23
+
24
+ class BaseDataset(BaseModel):
25
+ """Base Dataset Model. This model implement only loading construction."""
26
+
27
+ conn: Annotated[SubclassConn, Field(description="Connection Model")]
28
+ endpoint: Annotated[
29
+ Optional[str],
30
+ Field(description="Endpoint of connection"),
31
+ ] = None
32
+ object: str = Field(description="Dataset object that want to contract")
33
+ features: list = Field(default_factory=list)
34
+ extras: dict[str, Any] = Field(default_factory=dict)
35
+
36
+ @classmethod
37
+ def from_loader(
38
+ cls,
39
+ name: str,
40
+ externals: DictData,
41
+ ) -> Self:
42
+ """Construct Connection with Loader object with specific config name.
43
+
44
+ :param name: A name of dataset that want to load from config file.
45
+ :param externals: An external parameters.
46
+ """
47
+ loader: Loader = Loader(name, externals=externals)
48
+
49
+ # NOTE: Validate the config type match with current dataset model
50
+ if loader.type != cls:
51
+ raise ValueError(f"Type {loader.type} does not match with {cls}")
52
+
53
+ filter_data: DictData = {
54
+ k: loader.data.pop(k)
55
+ for k in loader.data.copy()
56
+ if k not in cls.model_fields and k not in EXCLUDED_EXTRAS
57
+ }
58
+
59
+ if "conn" not in loader.data:
60
+ raise ValueError("Dataset config does not set ``conn`` value")
61
+
62
+ # NOTE: Start loading connection config
63
+ conn_name: str = loader.data.pop("conn")
64
+ conn_loader: Loader = Loader(conn_name, externals=externals)
65
+ conn_model: SubclassConn = conn_loader.type.from_loader(
66
+ name=conn_name, externals=externals
67
+ )
68
+
69
+ # NOTE: Override ``endpoint`` value to getter connection data.
70
+ if "endpoint" in loader.data:
71
+ # NOTE: Update endpoint path without Pydantic validator.
72
+ conn_model.__dict__["endpoint"] = loader.data["endpoint"]
73
+ else:
74
+ loader.data.update({"endpoint": conn_model.endpoint})
75
+ return cls.model_validate(
76
+ obj={
77
+ "extras": (
78
+ loader.data.pop("extras", {}) | filter_data | externals
79
+ ),
80
+ "conn": conn_model,
81
+ **loader.data,
82
+ }
83
+ )
84
+
85
+
86
+ class Dataset(BaseDataset):
87
+
88
+ def exists(self) -> bool:
89
+ raise NotImplementedError("Object exists does not implement")
90
+
91
+ def format_object(
92
+ self,
93
+ _object: str | None = None,
94
+ dt: str | datetime | None = None,
95
+ ) -> str:
96
+ """Format the object value that implement datetime"""
97
+ if dt is None:
98
+ dt = datetime.now()
99
+ dt: datetime = (
100
+ dt if isinstance(dt, datetime) else datetime.fromisoformat(dt)
101
+ )
102
+ return (
103
+ OBJ_FMTS({"datetime": dt})
104
+ .format(escape_fmt_group(_object or self.object))
105
+ .replace("\\", "")
106
+ )
107
+
108
+
109
+ class FlDataset(Dataset):
110
+
111
+ def exists(self) -> bool:
112
+ return self.conn.find_object(self.object)
113
+
114
+
115
+ class TblDataset(Dataset):
116
+
117
+ def exists(self) -> bool:
118
+ return self.conn.find_object(self.object)
119
+
120
+
121
+ class FlDataFrame(Dataset):
122
+
123
+ def exists(self) -> bool:
124
+ return self.conn.find_object(self.object)
125
+
126
+
127
+ class TblDataFrame(Dataset): ...
File without changes
@@ -0,0 +1,13 @@
1
+ class PandasCSV: ...
2
+
3
+
4
+ class PandasJson: ...
5
+
6
+
7
+ class PandasParq: ...
8
+
9
+
10
+ class PandasDb: ...
11
+
12
+
13
+ class PandasExcel: ...
@@ -0,0 +1,11 @@
1
+ # ------------------------------------------------------------------------------
2
+ # Copyright (c) 2022 Korawich Anuttra. All rights reserved.
3
+ # Licensed under the MIT License. See LICENSE in the project root for
4
+ # license information.
5
+ # ------------------------------------------------------------------------------
6
+ from __future__ import annotations
7
+
8
+ from .__dataset import TblDataset
9
+
10
+
11
+ class PostgresTbl(TblDataset): ...
@@ -6,12 +6,10 @@
6
6
  from __future__ import annotations
7
7
 
8
8
  from datetime import datetime
9
- from typing import Annotated, Any, Optional
9
+ from typing import Any, Optional
10
10
 
11
11
  from fmtutil import Datetime, FormatterGroupType, make_group
12
- from fmtutil.utils import escape_fmt_group
13
12
  from pydantic import BaseModel, Field
14
- from typing_extensions import Self
15
13
 
16
14
  try:
17
15
  import polars as pl
@@ -20,135 +18,13 @@ except ImportError:
20
18
  "Please install polars package\n\t\t$ pip install polars"
21
19
  ) from None
22
20
 
23
- from .__types import DictData, TupleStr
24
- from .conn import SubclassConn
25
- from .loader import Loader
21
+ from ..__types import TupleStr
22
+ from .__dataset import FlDataFrame, TblDataFrame
26
23
 
27
24
  EXCLUDED_EXTRAS: TupleStr = ("type",)
28
25
  OBJ_FMTS: FormatterGroupType = make_group({"datetime": Datetime})
29
26
 
30
27
 
31
- class BaseDataset(BaseModel):
32
- """Base Dataset Model. This model implement only loading constructor."""
33
-
34
- conn: Annotated[SubclassConn, Field(description="Connection Model")]
35
- endpoint: Annotated[
36
- Optional[str],
37
- Field(description="Endpoint of connection"),
38
- ] = None
39
- object: str
40
- features: list = Field(default_factory=list)
41
- extras: dict[str, Any] = Field(default_factory=dict)
42
-
43
- @classmethod
44
- def from_loader(
45
- cls,
46
- name: str,
47
- externals: DictData,
48
- ) -> Self:
49
- """Construct Connection with Loader object with specific config name.
50
-
51
- :param name: A name of dataset that want to load from config file.
52
- :param externals: An external parameters.
53
- """
54
- loader: Loader = Loader(name, externals=externals)
55
-
56
- # NOTE: Validate the config type match with current dataset model
57
- if loader.type != cls:
58
- raise ValueError(f"Type {loader.type} does not match with {cls}")
59
-
60
- filter_data: DictData = {
61
- k: loader.data.pop(k)
62
- for k in loader.data.copy()
63
- if k not in cls.model_fields and k not in EXCLUDED_EXTRAS
64
- }
65
-
66
- if "conn" not in loader.data:
67
- raise ValueError("Dataset config does not set ``conn`` value")
68
-
69
- # NOTE: Start loading connection config
70
- conn_name: str = loader.data.pop("conn")
71
- conn_loader: Loader = Loader(conn_name, externals=externals)
72
- conn_model: SubclassConn = conn_loader.type.from_loader(
73
- name=conn_name, externals=externals
74
- )
75
-
76
- # NOTE: Override ``endpoint`` value to getter connection data.
77
- if "endpoint" in loader.data:
78
- # NOTE: Update endpoint path without Pydantic validator.
79
- conn_model.__dict__["endpoint"] = loader.data["endpoint"]
80
- else:
81
- loader.data.update({"endpoint": conn_model.endpoint})
82
- return cls.model_validate(
83
- obj={
84
- "extras": (
85
- loader.data.pop("extras", {}) | filter_data | externals
86
- ),
87
- "conn": conn_model,
88
- **loader.data,
89
- }
90
- )
91
-
92
-
93
- class Dataset(BaseDataset):
94
-
95
- def exists(self) -> bool:
96
- raise NotImplementedError("Object exists does not implement")
97
-
98
- def format_object(
99
- self,
100
- _object: str | None = None,
101
- dt: str | datetime | None = None,
102
- ) -> str:
103
- """Format the object value that implement datetime"""
104
- if dt is None:
105
- dt = datetime.now()
106
- dt: datetime = (
107
- dt if isinstance(dt, datetime) else datetime.fromisoformat(dt)
108
- )
109
- return (
110
- OBJ_FMTS({"datetime": dt})
111
- .format(escape_fmt_group(_object or self.object))
112
- .replace("\\", "")
113
- )
114
-
115
-
116
- class FlDataset(Dataset):
117
-
118
- def exists(self) -> bool:
119
- return self.conn.find_object(self.object)
120
-
121
-
122
- class TblDataset(Dataset):
123
-
124
- def exists(self) -> bool:
125
- return self.conn.find_object(self.object)
126
-
127
-
128
- class FlDataFrame(Dataset):
129
-
130
- def exists(self) -> bool:
131
- return self.conn.find_object(self.object)
132
-
133
-
134
- class TblDataFrame(Dataset): ...
135
-
136
-
137
- class PandasCSV: ...
138
-
139
-
140
- class PandasJson: ...
141
-
142
-
143
- class PandasParq: ...
144
-
145
-
146
- class PandasDb: ...
147
-
148
-
149
- class PandasExcel: ...
150
-
151
-
152
28
  class PolarsCsvArgs(BaseModel):
153
29
  """CSV file should use format rfc4180 as CSV standard format.
154
30
 
@@ -293,10 +169,4 @@ class PolarsParq(FlDataFrame):
293
169
  )
294
170
 
295
171
 
296
- class PostgresTbl(TblDataset): ...
297
-
298
-
299
- class SqliteTbl(TblDataset): ...
300
-
301
-
302
172
  class PolarsPostgres(TblDataFrame): ...
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.2
3
+ Version: 0.0.3
4
4
  Summary: Data Developer & Engineer Workflow Utility Objects
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
@@ -83,7 +83,13 @@ Out of you data that want to use in pipeline of workflow. Some of this component
83
83
  is similar component of the **Airflow** because I like it concepts.
84
84
 
85
85
  The main feature of this project is the `Pipeline` object that can call any
86
- registried function.
86
+ registries function. The pipeline can handle everything that you want to do, it
87
+ will passing parameters and catching the output for re-use it to next step.
88
+
89
+ > [!IMPORTANT]
90
+ > In the future of this project, I will drop the connection and dataset to
91
+ > dynamic registries instead of main features because it have a lot of maintain
92
+ > vendor codes and deps. (I do not have time to handle this features)
87
93
 
88
94
  ### Connection
89
95
 
@@ -104,7 +110,9 @@ assert conn.ping()
104
110
 
105
111
  ### Dataset
106
112
 
107
- The dataset is define any objects on the connection.
113
+ The dataset is define any objects on the connection. This feature was implemented
114
+ on `/vendors` because it has a lot of tools that can interact with any data systems
115
+ in the data tool stacks.
108
116
 
109
117
  ```yaml
110
118
  ds_postgres_customer_tbl:
@@ -116,7 +124,7 @@ ds_postgres_customer_tbl:
116
124
  ```
117
125
 
118
126
  ```python
119
- from ddeutil.workflow.dataset import PostgresTbl
127
+ from ddeutil.workflow.vendors.pg import PostgresTbl
120
128
 
121
129
  dataset = PostgresTbl.from_loader(name='ds_postgres_customer_tbl', externals={})
122
130
  assert dataset.exists()
@@ -126,14 +134,14 @@ assert dataset.exists()
126
134
 
127
135
  ```yaml
128
136
  schd_for_node:
129
- type: schedule.Scdl
137
+ type: schedule.Schedule
130
138
  cron: "*/5 * * * *"
131
139
  ```
132
140
 
133
141
  ```python
134
- from ddeutil.workflow.schedule import Scdl
142
+ from ddeutil.workflow.schedule import Schedule
135
143
 
136
- scdl = Scdl.from_loader(name='schd_for_node', externals={})
144
+ scdl = Schedule.from_loader(name='schd_for_node', externals={})
137
145
  assert '*/5 * * * *' == str(scdl.cronjob)
138
146
 
139
147
  cron_iterate = scdl.generate('2022-01-01 00:00:00')
@@ -234,14 +242,10 @@ pipe_el_pg_to_lake:
234
242
  pipe_hook_mssql_proc:
235
243
  type: ddeutil.workflow.pipe.Pipeline
236
244
  params:
237
- run_date:
238
- type: datetime
239
- sp_name:
240
- type: str
241
- source_name:
242
- type: str
243
- target_name:
244
- type: str
245
+ run_date: datetime
246
+ sp_name: str
247
+ source_name: str
248
+ target_name: str
245
249
  jobs:
246
250
  transform:
247
251
  stages:
@@ -0,0 +1,29 @@
1
+ ddeutil/workflow/__about__.py,sha256=smA9c0CTLewINRoxj2VBHoiYDESoFGtXYFDvRT31dgs,27
2
+ ddeutil/workflow/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
+ ddeutil/workflow/__regex.py,sha256=bOngaQ0zJgy3vfNwF2MlI8XhLu_Ei1Vz8y50iLj8ao4,1061
4
+ ddeutil/workflow/__types.py,sha256=AkpQq6QlrclpurCZZVY9RMxoyS9z2WGzhaz_ikeTaCU,453
5
+ ddeutil/workflow/conn.py,sha256=POtNcyqFNGxZnkg5J_H1OIvQVnnqG-ajmBBzjoHl9sg,7238
6
+ ddeutil/workflow/exceptions.py,sha256=XAq82VHSMLNb4UjGatp7hYfjxFtMiKFtBqJyAhwTl-s,434
7
+ ddeutil/workflow/loader.py,sha256=TXS4k2dqNycBYSTYcJ80WIsPMKNZbHNeBbcufX6lrJc,5483
8
+ ddeutil/workflow/pipeline.py,sha256=fG6ta-SNx4OWS6n8w7YpYDadfnbqayj8A1uY03TvLUA,16942
9
+ ddeutil/workflow/schedule.py,sha256=RMbTC7L32D3fJ5gYxJDCn-vPr2RYEBMSD0G2kj1Qows,2712
10
+ ddeutil/workflow/utils.py,sha256=z7evB9kOsgTr30uVuL994bmOMDNZB5xDY2KjO7gL1dc,5379
11
+ ddeutil/workflow/tasks/__init__.py,sha256=TIcw9JinrdepWgyazSMLk_QflUFms99ILI4GvLHUGD0,338
12
+ ddeutil/workflow/tasks/_pandas.py,sha256=rqz5_VMSqkEdirk7i3EElZoqnRYFyyK_Z8_Zt8FyeTg,1693
13
+ ddeutil/workflow/tasks/_polars.py,sha256=SYEBx-0I9tbY046QGSMokVugK8Fqjhiw4dzpL6y6Hww,2917
14
+ ddeutil/workflow/vendors/__dataset.py,sha256=n9EwTIoVlgCKyCQQgQrijeoQgauOoKtdKiWCzSIErns,4065
15
+ ddeutil/workflow/vendors/__dict.py,sha256=ETwkeA0qzKNgedfeRgAz1qShNXTIXIS4DXzJB4lM4jo,9962
16
+ ddeutil/workflow/vendors/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
17
+ ddeutil/workflow/vendors/__schedule.py,sha256=cUIwtTli9G0klrNFxAIbG3VTiv6umRuNAZiKA-kOtpE,20690
18
+ ddeutil/workflow/vendors/aws.py,sha256=zjq_LCu3ffVBRrxS2vqss9X24yrtuAEt9ouy2_WvS0o,5980
19
+ ddeutil/workflow/vendors/az.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
20
+ ddeutil/workflow/vendors/minio.py,sha256=pScLy38Du9moOrGaSBSFsoQRhiQ686FQyloOeLA0OQk,261
21
+ ddeutil/workflow/vendors/pd.py,sha256=J6Nkb4RqUnz3NMfo3cHX-Udw3HPjqjUimojS86rR4os,116
22
+ ddeutil/workflow/vendors/pg.py,sha256=TGwkV6nsarGLbiRTT_wB4uAy3xCR89EPPCMWqlWhFe8,422
23
+ ddeutil/workflow/vendors/pl.py,sha256=B-l9zcZ9vATAKVMLv5tjKiWo5Qt8ZIv_aQzuVFinKbY,5087
24
+ ddeutil/workflow/vendors/sftp.py,sha256=lQn4mnHhgvE9g1pbpoQF7HvZOxab8Z2XaDtSIJvumGM,7090
25
+ ddeutil_workflow-0.0.3.dist-info/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
26
+ ddeutil_workflow-0.0.3.dist-info/METADATA,sha256=KcsTd-FjufMK-4fhiIq27yeQUuA7NeB8TCkbXADQ1Dc,7992
27
+ ddeutil_workflow-0.0.3.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
28
+ ddeutil_workflow-0.0.3.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
29
+ ddeutil_workflow-0.0.3.dist-info/RECORD,,
@@ -1,25 +0,0 @@
1
- ddeutil/workflow/__about__.py,sha256=Ow9Rd-50zm5qXBM0iKjjMszFJAKgfR8LsocpgUKYVrI,27
2
- ddeutil/workflow/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
- ddeutil/workflow/__regex.py,sha256=bOngaQ0zJgy3vfNwF2MlI8XhLu_Ei1Vz8y50iLj8ao4,1061
4
- ddeutil/workflow/__types.py,sha256=7t-i5_-eif9zUBwb7jnv5RbSjzOihyl3yFDqXzaeyxk,428
5
- ddeutil/workflow/conn.py,sha256=KpQywXVurVvUjOdDg8zrPp1mKyRwR6qqrEjkPBhi_yc,6838
6
- ddeutil/workflow/dataset.py,sha256=Qszg65YyMCy6Bojya_OSbXtUjCqS8sZnDJvJtu706YA,8578
7
- ddeutil/workflow/exceptions.py,sha256=LAa3QlnPqECGxq6PpXoXNZVca7v7l4szjTZ3zWXJoWM,824
8
- ddeutil/workflow/loader.py,sha256=iTE1Rhjl-bq3YHZCKYWY51iwbQie2N6-MsaduOF1qSg,4463
9
- ddeutil/workflow/pipeline.py,sha256=r3FjbiC51eDArkoOQmfoSE--bZgggxUvRUh99bj3A9Y,14529
10
- ddeutil/workflow/schedule.py,sha256=f3V6dM66fM2C67vt9ltFSyqmxCMx1yo_gJuUDR_RMIE,2672
11
- ddeutil/workflow/utils.py,sha256=j2CmadYq9nXTyGWYGp_L0LS-fOnkTyzTNpf359ne1dA,2011
12
- ddeutil/workflow/tasks/__init__.py,sha256=TIcw9JinrdepWgyazSMLk_QflUFms99ILI4GvLHUGD0,338
13
- ddeutil/workflow/tasks/_pandas.py,sha256=ob8Ozq9ASiky8SwxWcR8HUr_j-qQoJez6EEckHrfR3s,1693
14
- ddeutil/workflow/tasks/_polars.py,sha256=0EbASTqc-aNtNnLN1QIYONqb5gzJvoqqZ-V584UNQtI,2708
15
- ddeutil/workflow/vendors/__dict.py,sha256=ETwkeA0qzKNgedfeRgAz1qShNXTIXIS4DXzJB4lM4jo,9962
16
- ddeutil/workflow/vendors/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
17
- ddeutil/workflow/vendors/__schedule.py,sha256=cUIwtTli9G0klrNFxAIbG3VTiv6umRuNAZiKA-kOtpE,20690
18
- ddeutil/workflow/vendors/aws_warpped.py,sha256=zjq_LCu3ffVBRrxS2vqss9X24yrtuAEt9ouy2_WvS0o,5980
19
- ddeutil/workflow/vendors/minio_warpped.py,sha256=pScLy38Du9moOrGaSBSFsoQRhiQ686FQyloOeLA0OQk,261
20
- ddeutil/workflow/vendors/sftp_wrapped.py,sha256=lQn4mnHhgvE9g1pbpoQF7HvZOxab8Z2XaDtSIJvumGM,7090
21
- ddeutil_workflow-0.0.2.dist-info/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
22
- ddeutil_workflow-0.0.2.dist-info/METADATA,sha256=o1ECtqH1WSBpz0UNu9OdzFqmxrmpbZP2H-KqHF4l8i0,7511
23
- ddeutil_workflow-0.0.2.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
24
- ddeutil_workflow-0.0.2.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
25
- ddeutil_workflow-0.0.2.dist-info/RECORD,,
File without changes
File without changes
File without changes