ddeutil-workflow 0.0.10__py3-none-any.whl → 0.0.12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
ddeutil/workflow/stage.py CHANGED
@@ -5,7 +5,7 @@
5
5
  # ------------------------------------------------------------------------------
6
6
  """Stage Model that use for getting stage data template from Job Model.
7
7
  The stage that handle the minimize task that run in some thread (same thread at
8
- its job owner) that mean it is the lowest executor of a pipeline workflow that
8
+ its job owner) that mean it is the lowest executor of a workflow workflow that
9
9
  can tracking logs.
10
10
 
11
11
  The output of stage execution only return 0 status because I do not want to
@@ -551,12 +551,12 @@ class HookStage(BaseStage):
551
551
 
552
552
 
553
553
  class TriggerStage(BaseStage):
554
- """Trigger Pipeline execution stage that execute another pipeline object.
554
+ """Trigger Workflow execution stage that execute another workflow object.
555
555
 
556
556
  Data Validate:
557
557
  >>> stage = {
558
- ... "name": "Trigger pipeline stage execution",
559
- ... "trigger": 'pipeline-name-for-loader',
558
+ ... "name": "Trigger workflow stage execution",
559
+ ... "trigger": 'workflow-name-for-loader',
560
560
  ... "params": {
561
561
  ... "run-date": "2024-08-01",
562
562
  ... "source": "src",
@@ -564,31 +564,31 @@ class TriggerStage(BaseStage):
564
564
  ... }
565
565
  """
566
566
 
567
- trigger: str = Field(description="A trigger pipeline name.")
567
+ trigger: str = Field(description="A trigger workflow name.")
568
568
  params: DictData = Field(
569
569
  default_factory=dict,
570
- description="A parameter that want to pass to pipeline execution.",
570
+ description="A parameter that want to pass to workflow execution.",
571
571
  )
572
572
 
573
573
  @handler_result("Raise from TriggerStage")
574
574
  def execute(self, params: DictData) -> Result:
575
- """Trigger pipeline execution.
575
+ """Trigger workflow execution.
576
576
 
577
577
  :param params: A parameter data that want to use in this execution.
578
578
  :rtype: Result
579
579
  """
580
- from .pipeline import Pipeline
580
+ from . import Workflow
581
581
 
582
- # NOTE: Loading pipeline object from trigger name.
582
+ # NOTE: Loading workflow object from trigger name.
583
583
  _trigger: str = param2template(self.trigger, params=params)
584
584
 
585
- # NOTE: Set running pipeline ID from running stage ID to external
585
+ # NOTE: Set running workflow ID from running stage ID to external
586
586
  # params on Loader object.
587
- pipe: Pipeline = Pipeline.from_loader(
587
+ wf: Workflow = Workflow.from_loader(
588
588
  name=_trigger, externals={"run_id": self.run_id}
589
589
  )
590
590
  logger.info(f"({self.run_id}) [STAGE]: Trigger-Execute: {_trigger!r}")
591
- return pipe.execute(params=param2template(self.params, params))
591
+ return wf.execute(params=param2template(self.params, params))
592
592
 
593
593
 
594
594
  # NOTE: Order of parsing stage data
ddeutil/workflow/utils.py CHANGED
@@ -151,7 +151,7 @@ def config() -> ConfParams:
151
151
 
152
152
  class SimLoad:
153
153
  """Simple Load Object that will search config data by given some identity
154
- value like name of pipeline or on.
154
+ value like name of workflow or on.
155
155
 
156
156
  :param name: A name of config data that will read by Yaml Loader object.
157
157
  :param params: A Params model object.
@@ -517,7 +517,7 @@ Param = Union[
517
517
 
518
518
  class Result(BaseModel):
519
519
  """Result Pydantic Model for passing parameter and receiving output from
520
- the pipeline execution.
520
+ the workflow execution.
521
521
  """
522
522
 
523
523
  status: int = Field(default=2)
@@ -655,7 +655,7 @@ def get_args_const(
655
655
 
656
656
  if len(body) > 1:
657
657
  raise UtilException(
658
- "Post-filter function should be only one calling per pipe"
658
+ "Post-filter function should be only one calling per wf"
659
659
  )
660
660
 
661
661
  caller: Union[Name, Call]
@@ -771,7 +771,7 @@ def str2template(
771
771
  ``RE_CALLER`` regular expression.
772
772
 
773
773
  The getter value that map a template should have typing support align
774
- with the pipeline parameter types that is `str`, `int`, `datetime`, and
774
+ with the workflow parameter types that is `str`, `int`, `datetime`, and
775
775
  `list`.
776
776
 
777
777
  :param value: A string value that want to mapped with an params
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.10
3
+ Version: 0.0.12
4
4
  Summary: Lightweight workflow orchestration with less dependencies
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
@@ -24,10 +24,9 @@ License-File: LICENSE
24
24
  Requires-Dist: ddeutil-io
25
25
  Requires-Dist: python-dotenv ==1.0.1
26
26
  Requires-Dist: typer <1.0.0,==0.12.5
27
+ Requires-Dist: schedule <2.0.0,==1.2.2
27
28
  Provides-Extra: api
28
- Requires-Dist: fastapi[standard] <1.0.0,==0.112.2 ; extra == 'api'
29
- Provides-Extra: schedule
30
- Requires-Dist: schedule <2.0.0,==1.2.2 ; extra == 'schedule'
29
+ Requires-Dist: fastapi <1.0.0,==0.112.2 ; extra == 'api'
31
30
 
32
31
  # Workflow
33
32
 
@@ -38,22 +37,22 @@ Requires-Dist: schedule <2.0.0,==1.2.2 ; extra == 'schedule'
38
37
  [![code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
39
38
 
40
39
  The **Lightweight workflow orchestration** with less dependencies the was created
41
- for easy to make a simple metadata driven for data pipeline orchestration.
40
+ for easy to make a simple metadata driven for data workflow orchestration.
42
41
  It can to use for data operator by a `.yaml` template.
43
42
 
44
43
  > [!WARNING]
45
44
  > This package provide only orchestration workload. That mean you should not use
46
45
  > workflow stage to process any large data which use lot of compute usecase.
47
46
 
48
- In my opinion, I think it should not create duplicate pipeline codes if I can
49
- write with dynamic input parameters on the one template pipeline that just change
47
+ In my opinion, I think it should not create duplicate workflow codes if I can
48
+ write with dynamic input parameters on the one template workflow that just change
50
49
  the input parameters per use-case instead.
51
- This way I can handle a lot of logical pipelines in our orgs with only metadata
52
- configuration. It called **Metadata Driven Data Pipeline**.
50
+ This way I can handle a lot of logical workflows in our orgs with only metadata
51
+ configuration. It called **Metadata Driven Data Workflow**.
53
52
 
54
53
  Next, we should get some monitoring tools for manage logging that return from
55
- pipeline running. Because it not show us what is a use-case that running data
56
- pipeline.
54
+ workflow running. Because it not show us what is a use-case that running data
55
+ workflow.
57
56
 
58
57
  > [!NOTE]
59
58
  > _Disclaimer_: I inspire the dynamic statement from the GitHub Action `.yml` files
@@ -75,8 +74,7 @@ this package with application add-ons, you should add `app` in installation;
75
74
  | Usecase | Install Optional | Support |
76
75
  |-------------------|------------------------------------------|--------------------|
77
76
  | Python & CLI | `pip install ddeutil-workflow` | :heavy_check_mark: |
78
- | Scheduler Service | `pip install ddeutil-workflow[schedule]` | :x: |
79
- | FastAPI Server | `pip install ddeutil-workflow[api]` | :x: |
77
+ | FastAPI Server | `pip install ddeutil-workflow[api]` | :heavy_check_mark: |
80
78
 
81
79
 
82
80
  > I added this feature to the main milestone.
@@ -97,67 +95,65 @@ use-case.
97
95
 
98
96
  > [!IMPORTANT]
99
97
  > I recommend you to use the `hook` stage for all actions that you want to do
100
- > with pipeline activity that you want to orchestrate. Because it able to dynamic
98
+ > with workflow activity that you want to orchestrate. Because it able to dynamic
101
99
  > an input argument with the same hook function that make you use less time to
102
- > maintenance your data pipelines.
100
+ > maintenance your data workflows.
103
101
 
104
102
  ```yaml
105
103
  run_py_local:
106
- type: pipeline.Pipeline
107
- on:
108
- - cronjob: '*/5 * * * *'
109
- timezone: "Asia/Bangkok"
110
- params:
111
- author-run: str
112
- run-date: datetime
113
- jobs:
114
- getting-api-data:
115
- stages:
116
- - name: "Retrieve API Data"
117
- id: retrieve-api
118
- uses: tasks/get-api-with-oauth-to-s3@requests
119
- with:
120
- url: https://open-data/
121
- auth: ${API_ACCESS_REFRESH_TOKEN}
122
- aws_s3_path: my-data/open-data/
123
-
124
- # This Authentication code should implement with your custom hook function.
125
- # The template allow you to use environment variable.
126
- aws_access_client_id: ${AWS_ACCESS_CLIENT_ID}
127
- aws_access_client_secret: ${AWS_ACCESS_CLIENT_SECRET}
104
+ type: Workflow
105
+ on:
106
+ # If workflow deploy to schedule, it will running every 5 minutes
107
+ # with Asia/Bangkok timezone.
108
+ - cronjob: '*/5 * * * *'
109
+ timezone: "Asia/Bangkok"
110
+ params:
111
+ # Incoming execution parameters will validate with this type. It allow
112
+ # to set default value or templating.
113
+ author-run: str
114
+ run-date: datetime
115
+ jobs:
116
+ getting-api-data:
117
+ stages:
118
+ - name: "Retrieve API Data"
119
+ id: retrieve-api
120
+ uses: tasks/get-api-with-oauth-to-s3@requests
121
+ with:
122
+ url: https://open-data/
123
+ auth: ${API_ACCESS_REFRESH_TOKEN}
124
+ aws_s3_path: my-data/open-data/
125
+
126
+ # This Authentication code should implement with your custom hook function.
127
+ # The template allow you to use environment variable.
128
+ aws_access_client_id: ${AWS_ACCESS_CLIENT_ID}
129
+ aws_access_client_secret: ${AWS_ACCESS_CLIENT_SECRET}
128
130
  ```
129
131
 
130
132
  ## Configuration
131
133
 
132
- | Environment | Component | Default | Description |
133
- |-------------------------------------|-----------|------------------------------|----------------------------------------------------------------------------|
134
- | `WORKFLOW_ROOT_PATH` | Core | . | The root path of the workflow application |
135
- | `WORKFLOW_CORE_REGISTRY` | Core | ddeutil.workflow,tests.utils | List of importable string for the hook stage |
136
- | `WORKFLOW_CORE_REGISTRY_FILTER` | Core | ddeutil.workflow.utils | List of importable string for the filter template |
137
- | `WORKFLOW_CORE_PATH_CONF` | Core | conf | The config path that keep all template `.yaml` files |
138
- | `WORKFLOW_CORE_TIMEZONE` | Core | Asia/Bangkok | A Timezone string value that will pass to `ZoneInfo` object |
139
- | `WORKFLOW_CORE_STAGE_DEFAULT_ID` | Core | true | A flag that enable default stage ID that use for catch an execution output |
140
- | `WORKFLOW_CORE_STAGE_RAISE_ERROR` | Core | true | A flag that all stage raise StageException from stage execution |
141
- | `WORKFLOW_CORE_MAX_PIPELINE_POKING` | Core | 4 | |
142
- | `WORKFLOW_CORE_MAX_JOB_PARALLEL` | Core | 2 | The maximum job number that able to run parallel in pipeline executor |
143
- | `WORKFLOW_LOG_DEBUG_MODE` | Log | true | A flag that enable logging with debug level mode |
144
- | `WORKFLOW_LOG_ENABLE_WRITE` | Log | true | A flag that enable logging object saving log to its destination |
145
-
146
-
147
- **Application**:
148
-
149
- | Environment | Default | Description |
150
- |-------------------------------------|----------------------------------|-------------------------------------------------------------------------|
151
- | `WORKFLOW_APP_PROCESS_WORKER` | 2 | The maximum process worker number that run in scheduler app module |
152
- | `WORKFLOW_APP_SCHEDULE_PER_PROCESS` | 100 | A schedule per process that run parallel |
153
- | `WORKFLOW_APP_STOP_BOUNDARY_DELTA` | '{"minutes": 5, "seconds": 20}' | A time delta value that use to stop scheduler app in json string format |
154
-
155
- **API server**:
156
-
157
- | Environment | Default | Description |
158
- |--------------------------------------|---------|-----------------------------------------------------------------------------------|
159
- | `WORKFLOW_API_ENABLE_ROUTE_WORKFLOW` | true | A flag that enable workflow route to manage execute manually and workflow logging |
160
- | `WORKFLOW_API_ENABLE_ROUTE_SCHEDULE` | true | A flag that enable run scheduler |
134
+ | Environment | Component | Default | Description |
135
+ |-------------------------------------|-----------|----------------------------------|----------------------------------------------------------------------------|
136
+ | `WORKFLOW_ROOT_PATH` | Core | . | The root path of the workflow application |
137
+ | `WORKFLOW_CORE_REGISTRY` | Core | src.ddeutil.workflow,tests.utils | List of importable string for the hook stage |
138
+ | `WORKFLOW_CORE_REGISTRY_FILTER` | Core | ddeutil.workflow.utils | List of importable string for the filter template |
139
+ | `WORKFLOW_CORE_PATH_CONF` | Core | conf | The config path that keep all template `.yaml` files |
140
+ | `WORKFLOW_CORE_TIMEZONE` | Core | Asia/Bangkok | A Timezone string value that will pass to `ZoneInfo` object |
141
+ | `WORKFLOW_CORE_STAGE_DEFAULT_ID` | Core | true | A flag that enable default stage ID that use for catch an execution output |
142
+ | `WORKFLOW_CORE_STAGE_RAISE_ERROR` | Core | true | A flag that all stage raise StageException from stage execution |
143
+ | `WORKFLOW_CORE_MAX_NUM_POKING` | Core | 4 | |
144
+ | `WORKFLOW_CORE_MAX_JOB_PARALLEL` | Core | 2 | The maximum job number that able to run parallel in workflow executor |
145
+ | `WORKFLOW_LOG_DEBUG_MODE` | Log | true | A flag that enable logging with debug level mode |
146
+ | `WORKFLOW_LOG_ENABLE_WRITE` | Log | true | A flag that enable logging object saving log to its destination |
147
+ | `WORKFLOW_APP_PROCESS_WORKER` | Schedule | 2 | The maximum process worker number that run in scheduler app module |
148
+ | `WORKFLOW_APP_SCHEDULE_PER_PROCESS` | Schedule | 100 | A schedule per process that run parallel |
149
+ | `WORKFLOW_APP_STOP_BOUNDARY_DELTA` | Schedule | '{"minutes": 5, "seconds": 20}' | A time delta value that use to stop scheduler app in json string format |
150
+
151
+ **API Application**:
152
+
153
+ | Environment | Component | Default | Description |
154
+ |--------------------------------------|-----------|---------|-----------------------------------------------------------------------------------|
155
+ | `WORKFLOW_API_ENABLE_ROUTE_WORKFLOW` | API | true | A flag that enable workflow route to manage execute manually and workflow logging |
156
+ | `WORKFLOW_API_ENABLE_ROUTE_SCHEDULE` | API | true | A flag that enable run scheduler |
161
157
 
162
158
  ## Deployment
163
159
 
@@ -165,18 +161,18 @@ This package able to run as a application service for receive manual trigger
165
161
  from the master node via RestAPI or use to be Scheduler background service
166
162
  like crontab job but via Python API.
167
163
 
168
- ### Schedule Service
164
+ ### Schedule App
169
165
 
170
166
  ```shell
171
- (venv) $ python src.ddeutil.workflow.app
167
+ (venv) $ ddeutil-workflow schedule
172
168
  ```
173
169
 
174
170
  ### API Server
175
171
 
176
172
  ```shell
177
- (venv) $ uvicorn src.ddeutil.workflow.api:app --host 0.0.0.0 --port 80 --reload
173
+ (venv) $ uvicorn src.ddeutil.workflow.api:app --host 127.0.0.1 --port 80
178
174
  ```
179
175
 
180
176
  > [!NOTE]
181
177
  > If this package already deploy, it able to use
182
- > `uvicorn ddeutil.workflow.api:app --host 0.0.0.0 --port 80 --workers 4`
178
+ > `uvicorn ddeutil.workflow.api:app --host 127.0.0.1 --port 80 --workers 4`
@@ -0,0 +1,21 @@
1
+ ddeutil/workflow/__about__.py,sha256=dDoptHBL0iJdvMM__PxrYhbuaSInQh_epfEsH-vdqVw,28
2
+ ddeutil/workflow/__init__.py,sha256=aEQiEWwTPGhfwpzzdb99xXaHchi5ABWUHl2iLIyT18E,664
3
+ ddeutil/workflow/__types.py,sha256=SYMoxbENQX8uPsiCZkjtpHAqqHOh8rUrarAFicAJd0E,1773
4
+ ddeutil/workflow/api.py,sha256=xVP8eGu1nnR8HM0ULTwxs9TV9tsxCOjZ68cAffw2f3o,4802
5
+ ddeutil/workflow/cli.py,sha256=Ikcq526WeIl-737-v55T0PwAZ2pNiZFxlN0Y-DjhDbQ,3374
6
+ ddeutil/workflow/cron.py,sha256=uhp3E5pl_tX_H88bsDujcwdhZmOE53csyV-ouPpPdK8,25321
7
+ ddeutil/workflow/exceptions.py,sha256=Uf1-Tn8rAzj0aiVHSqo4fBqO80W0za7UFZgKv24E-tg,706
8
+ ddeutil/workflow/job.py,sha256=eESvmIbIyYtiKgVLfILtb0lTz-shMhy4Bi7kHrtZSk0,19663
9
+ ddeutil/workflow/log.py,sha256=bZyyqf3oNBB8oRf8RI0YvII7wHHoj4wC-nmW_pQjQ1c,6036
10
+ ddeutil/workflow/on.py,sha256=Sxwnu0vPbIrMR_WWvH3_rOvD0tbiJntcB5378WoV19M,7163
11
+ ddeutil/workflow/repeat.py,sha256=e3dekPTlMlxCCizfBYsZ8dD8Juy4rtfqDZJU3Iky2oA,5011
12
+ ddeutil/workflow/route.py,sha256=ABEk-WlVo9XGFc7zCPbckX33URCNH7woQFU1keX_8PQ,6970
13
+ ddeutil/workflow/scheduler.py,sha256=ISiVoKM0puh3XaXZ9NTi-J-vREGxdEa-lk1jR6a4OXk,41639
14
+ ddeutil/workflow/stage.py,sha256=nYsKKT5ZKelEsFnDScBwaitXOv_aUEFkubDPQVK5isM,20644
15
+ ddeutil/workflow/utils.py,sha256=TbqgPkDDYBpqCZ7HV2TU3AH1_Mv-zfrJdwVL-l2SPUo,28559
16
+ ddeutil_workflow-0.0.12.dist-info/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
17
+ ddeutil_workflow-0.0.12.dist-info/METADATA,sha256=U95bFDPtWdPuA7KcIq8kDYNor6CBztiXjdjhrAPvMDY,9395
18
+ ddeutil_workflow-0.0.12.dist-info/WHEEL,sha256=UvcQYKBHoFqaQd6LKyqHw9fxEolWLQnlzP0h_LgJAfI,91
19
+ ddeutil_workflow-0.0.12.dist-info/entry_points.txt,sha256=0BVOgO3LdUdXVZ-CiHHDKxzEk2c8J30jEwHeKn2YCWI,62
20
+ ddeutil_workflow-0.0.12.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
21
+ ddeutil_workflow-0.0.12.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (73.0.1)
2
+ Generator: setuptools (74.0.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5