kubernetes-watch 0.1.5__tar.gz → 0.1.9__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. {kubernetes_watch-0.1.5 → kubernetes_watch-0.1.9}/LICENSE +21 -21
  2. {kubernetes_watch-0.1.5 → kubernetes_watch-0.1.9}/PKG-INFO +5 -3
  3. {kubernetes_watch-0.1.5 → kubernetes_watch-0.1.9}/README.md +117 -117
  4. {kubernetes_watch-0.1.5 → kubernetes_watch-0.1.9}/kube_watch/enums/kube.py +5 -5
  5. {kubernetes_watch-0.1.5 → kubernetes_watch-0.1.9}/kube_watch/enums/logic.py +8 -8
  6. {kubernetes_watch-0.1.5 → kubernetes_watch-0.1.9}/kube_watch/enums/providers.py +12 -12
  7. {kubernetes_watch-0.1.5 → kubernetes_watch-0.1.9}/kube_watch/enums/workflow.py +17 -17
  8. {kubernetes_watch-0.1.5 → kubernetes_watch-0.1.9}/kube_watch/models/common.py +16 -16
  9. {kubernetes_watch-0.1.5 → kubernetes_watch-0.1.9}/kube_watch/models/workflow.py +60 -60
  10. {kubernetes_watch-0.1.5 → kubernetes_watch-0.1.9}/kube_watch/modules/clusters/kube.py +185 -185
  11. kubernetes_watch-0.1.9/kube_watch/modules/database/model.py +12 -0
  12. kubernetes_watch-0.1.9/kube_watch/modules/database/postgre.py +271 -0
  13. {kubernetes_watch-0.1.5 → kubernetes_watch-0.1.9}/kube_watch/modules/logic/actions.py +55 -55
  14. {kubernetes_watch-0.1.5 → kubernetes_watch-0.1.9}/kube_watch/modules/logic/checks.py +7 -7
  15. {kubernetes_watch-0.1.5 → kubernetes_watch-0.1.9}/kube_watch/modules/logic/load.py +23 -23
  16. {kubernetes_watch-0.1.5 → kubernetes_watch-0.1.9}/kube_watch/modules/logic/merge.py +31 -31
  17. {kubernetes_watch-0.1.5 → kubernetes_watch-0.1.9}/kube_watch/modules/logic/scheduler.py +74 -74
  18. {kubernetes_watch-0.1.5 → kubernetes_watch-0.1.9}/kube_watch/modules/mock/mock_generator.py +53 -53
  19. kubernetes_watch-0.1.9/kube_watch/modules/providers/__init__.py +0 -0
  20. {kubernetes_watch-0.1.5 → kubernetes_watch-0.1.9}/kube_watch/modules/providers/aws.py +210 -210
  21. kubernetes_watch-0.1.9/kube_watch/modules/providers/git.py +121 -0
  22. {kubernetes_watch-0.1.5 → kubernetes_watch-0.1.9}/kube_watch/modules/providers/github.py +126 -126
  23. {kubernetes_watch-0.1.5 → kubernetes_watch-0.1.9}/kube_watch/modules/providers/vault.py +188 -188
  24. {kubernetes_watch-0.1.5 → kubernetes_watch-0.1.9}/kube_watch/standalone/metarecogen/ckan_to_gn.py +132 -132
  25. kubernetes_watch-0.1.9/kube_watch/watch/__init__.py +1 -0
  26. {kubernetes_watch-0.1.5 → kubernetes_watch-0.1.9}/kube_watch/watch/helpers.py +170 -170
  27. kubernetes_watch-0.1.9/kube_watch/watch/workflow.py +232 -0
  28. {kubernetes_watch-0.1.5 → kubernetes_watch-0.1.9}/pyproject.toml +28 -23
  29. kubernetes_watch-0.1.5/kube_watch/modules/providers/git.py +0 -33
  30. kubernetes_watch-0.1.5/kube_watch/watch/__init__.py +0 -1
  31. kubernetes_watch-0.1.5/kube_watch/watch/workflow.py +0 -100
  32. {kubernetes_watch-0.1.5 → kubernetes_watch-0.1.9}/kube_watch/__init__.py +0 -0
  33. {kubernetes_watch-0.1.5 → kubernetes_watch-0.1.9}/kube_watch/enums/__init__.py +0 -0
  34. {kubernetes_watch-0.1.5 → kubernetes_watch-0.1.9}/kube_watch/models/__init__.py +0 -0
  35. {kubernetes_watch-0.1.5 → kubernetes_watch-0.1.9}/kube_watch/modules/__init__.py +0 -0
  36. {kubernetes_watch-0.1.5 → kubernetes_watch-0.1.9}/kube_watch/modules/clusters/__init__.py +0 -0
  37. {kubernetes_watch-0.1.5/kube_watch/modules/mock → kubernetes_watch-0.1.9/kube_watch/modules/database}/__init__.py +0 -0
  38. {kubernetes_watch-0.1.5 → kubernetes_watch-0.1.9}/kube_watch/modules/logic/trasnform.py +0 -0
  39. {kubernetes_watch-0.1.5/kube_watch/modules/providers → kubernetes_watch-0.1.9/kube_watch/modules/mock}/__init__.py +0 -0
@@ -1,21 +1,21 @@
1
- MIT License
2
-
3
- Copyright (c) 2024 Benyamin Motevalli
4
-
5
- Permission is hereby granted, free of charge, to any person obtaining a copy
6
- of this software and associated documentation files (the "Software"), to deal
7
- in the Software without restriction, including without limitation the rights
8
- to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
- copies of the Software, and to permit persons to whom the Software is
10
- furnished to do so, subject to the following conditions:
11
-
12
- The above copyright notice and this permission notice shall be included in all
13
- copies or substantial portions of the Software.
14
-
15
- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
- IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
- FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
- AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
- LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
- OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
- SOFTWARE.
1
+ MIT License
2
+
3
+ Copyright (c) 2024 Benyamin Motevalli
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -1,10 +1,10 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: kubernetes-watch
3
- Version: 0.1.5
3
+ Version: 0.1.9
4
4
  Summary:
5
5
  Author: bmotevalli
6
6
  Author-email: b.motevalli@gmail.com
7
- Requires-Python: >=3.10,<4
7
+ Requires-Python: >=3.10,<3.14
8
8
  Classifier: Programming Language :: Python :: 3
9
9
  Classifier: Programming Language :: Python :: 3.10
10
10
  Classifier: Programming Language :: Python :: 3.11
@@ -15,7 +15,9 @@ Requires-Dist: boto3 (>=1.34.68,<2.0.0)
15
15
  Requires-Dist: humps (>=0.2.2,<0.3.0)
16
16
  Requires-Dist: hvac (>=2.1.0,<3.0.0)
17
17
  Requires-Dist: kubernetes (>=29.0.0,<30.0.0)
18
- Requires-Dist: prefect (>=2.18.0,<3.0.0)
18
+ Requires-Dist: prefect (>=3.4.17,<4.0.0)
19
+ Requires-Dist: psycopg2 (>=2.9.10,<3.0.0)
20
+ Requires-Dist: pydantic (>=2.11.7,<3.0.0)
19
21
  Requires-Dist: requests (>=2.32.3,<3.0.0)
20
22
  Description-Content-Type: text/markdown
21
23
 
@@ -1,117 +1,117 @@
1
- # kube_watch
2
-
3
- # To setup the project:
4
-
5
- - `poetry install`
6
- - `poetry shell`
7
-
8
- # To install package to your environment locally:
9
-
10
- python setup.py install
11
-
12
- # To publish
13
-
14
- `poetry config pypi-token.pypi your-api-token`
15
- `poetry build`
16
- `poetry publish`
17
-
18
-
19
- # Description
20
- The kube_watch library is build on top of <a href='https://docs.prefect.io/latest/'>Prefect</a>. The library is designed to define workflows in a declaritive and flexible fashion. Originally, workflows in Prefect are defined via decorators such as @flow and @task. In kube_watch, workflows can be defined in a declaritive form via yaml files. The library is mainly focused on running scheduled workflows in kubernetes environment. However, it can easily be extended to be used for any purpose requiring a workflow. The workflow manifest has the following generic structure:
21
-
22
- ```
23
- workflow:
24
- name: Dummy Workflow
25
- runner: concurrent
26
- tasks:
27
- - name: Task_A
28
- module: <module_path>
29
- task: <func_name>
30
- inputsArgType: arg
31
- inputs:
32
- parameters:
33
- - name: x1
34
- value: y1
35
- - name: x2
36
- value: y2
37
- - name: x3
38
- type: env
39
- value: Y3
40
-
41
- - name: Task_B
42
- module: <module_path>
43
- task: <func_name>
44
- inputsArgType: arg
45
- inputs:
46
- parameters:
47
- - name: xx1
48
- value: yy1
49
- - name: xx2
50
- value: yy2
51
- dependency:
52
- - taskName: Task_A
53
- inputParamName: xx3
54
-
55
- - name: Task_C
56
- module: <module_path>
57
- task: <func_name>
58
- inputsArgType: arg
59
- conditional:
60
- tasks: ["Task_B"]
61
- ```
62
-
63
-
64
- **runner**: concurrent | sequential: if concurrent selected, tasks will be run concurrently.
65
-
66
- **module**: all modules are located in 'modules' directory in kube_watch. This is where you can extend the library and add new tasks / modules. Below modules, there are submodules such as providers, clusters, and logic. Within each of this submodules, specific modules are defined. For example: providers.aws contains a series of tasks related to AWS. In this case, <module_path> = providers.aws. To add new tasks, add a new module with a similar pattern and refer the path in your task block.
67
-
68
- **task**: task is simply the name function that you put in the <module_path>. i.e. as you define a function in a module, you can simply start to use it in your manifests.
69
-
70
- **inputArgType**: arg | dict | list: if the task functions accept known-fixed number of parameters, then use arg.
71
-
72
- **dependency**: this block defines dependency of a child task to its parent. If **inputParamName** is defined, then OUTPUT of the parent task is passed to the child with an argument name defined by inputParamName.
73
-
74
- **IMPORTATN NOTE**: A strict assumption is that task functions return a single output. If there are cases with multiple output, wrap them into a dictionary and unwrap them in the child task.
75
-
76
- **conditional**: These are blocks where you can define when a task runs depending on the outcome of its parent. The parent task should return True or False.
77
-
78
-
79
- Parameters have also a type entry: env | static. static is default value. If type is defined as env, the parameter value is loaded from Environment Variables. In this case, value should be the name of the corresponding env var.
80
-
81
- In above examples:
82
-
83
- ```
84
- def Task_A(x1, x2, x3):
85
- # do something
86
- return output_A
87
-
88
- def Task_B(xx1, xx2, xx3):
89
- # do something else
90
- return output_B
91
-
92
- def Task_C():
93
- # do another thing
94
- return output_C
95
- ```
96
-
97
-
98
-
99
- # Batch workflows
100
- kube_watch also enables to run workflows in batch. A separate manifest with following form is required:
101
-
102
- batchFlows:
103
- runner: sequential
104
- items:
105
- - path: path_to_flow_A.yaml
106
- - path: path_to_flow_B.yaml
107
- - path: path_to_flow_C.yaml
108
-
109
-
110
- # cron_app
111
- The cron_app folder contains an example use case of kube_watch library. The cron_app can be used to deploy a CronJob in a kubernetes environment. The app assumes the manifests are located in a separate repository. It will clone the repo and read the manifests and runs the workflows.
112
-
113
- # Connect to a server
114
- ## Start Server
115
- `prefect server start`
116
- ## To Connect
117
- To connect to a server, simply set the following environment variable: `PREFECT_API_URL`
1
+ # kube_watch
2
+
3
+ # To setup the project:
4
+
5
+ - `poetry install`
6
+ - `poetry shell`
7
+
8
+ # To install package to your environment locally:
9
+
10
+ python setup.py install
11
+
12
+ # To publish
13
+
14
+ `poetry config pypi-token.pypi your-api-token`
15
+ `poetry build`
16
+ `poetry publish`
17
+
18
+
19
+ # Description
20
+ The kube_watch library is build on top of <a href='https://docs.prefect.io/latest/'>Prefect</a>. The library is designed to define workflows in a declaritive and flexible fashion. Originally, workflows in Prefect are defined via decorators such as @flow and @task. In kube_watch, workflows can be defined in a declaritive form via yaml files. The library is mainly focused on running scheduled workflows in kubernetes environment. However, it can easily be extended to be used for any purpose requiring a workflow. The workflow manifest has the following generic structure:
21
+
22
+ ```
23
+ workflow:
24
+ name: Dummy Workflow
25
+ runner: concurrent
26
+ tasks:
27
+ - name: Task_A
28
+ module: <module_path>
29
+ task: <func_name>
30
+ inputsArgType: arg
31
+ inputs:
32
+ parameters:
33
+ - name: x1
34
+ value: y1
35
+ - name: x2
36
+ value: y2
37
+ - name: x3
38
+ type: env
39
+ value: Y3
40
+
41
+ - name: Task_B
42
+ module: <module_path>
43
+ task: <func_name>
44
+ inputsArgType: arg
45
+ inputs:
46
+ parameters:
47
+ - name: xx1
48
+ value: yy1
49
+ - name: xx2
50
+ value: yy2
51
+ dependency:
52
+ - taskName: Task_A
53
+ inputParamName: xx3
54
+
55
+ - name: Task_C
56
+ module: <module_path>
57
+ task: <func_name>
58
+ inputsArgType: arg
59
+ conditional:
60
+ tasks: ["Task_B"]
61
+ ```
62
+
63
+
64
+ **runner**: concurrent | sequential: if concurrent selected, tasks will be run concurrently.
65
+
66
+ **module**: all modules are located in 'modules' directory in kube_watch. This is where you can extend the library and add new tasks / modules. Below modules, there are submodules such as providers, clusters, and logic. Within each of this submodules, specific modules are defined. For example: providers.aws contains a series of tasks related to AWS. In this case, <module_path> = providers.aws. To add new tasks, add a new module with a similar pattern and refer the path in your task block.
67
+
68
+ **task**: task is simply the name function that you put in the <module_path>. i.e. as you define a function in a module, you can simply start to use it in your manifests.
69
+
70
+ **inputArgType**: arg | dict | list: if the task functions accept known-fixed number of parameters, then use arg.
71
+
72
+ **dependency**: this block defines dependency of a child task to its parent. If **inputParamName** is defined, then OUTPUT of the parent task is passed to the child with an argument name defined by inputParamName.
73
+
74
+ **IMPORTATN NOTE**: A strict assumption is that task functions return a single output. If there are cases with multiple output, wrap them into a dictionary and unwrap them in the child task.
75
+
76
+ **conditional**: These are blocks where you can define when a task runs depending on the outcome of its parent. The parent task should return True or False.
77
+
78
+
79
+ Parameters have also a type entry: env | static. static is default value. If type is defined as env, the parameter value is loaded from Environment Variables. In this case, value should be the name of the corresponding env var.
80
+
81
+ In above examples:
82
+
83
+ ```
84
+ def Task_A(x1, x2, x3):
85
+ # do something
86
+ return output_A
87
+
88
+ def Task_B(xx1, xx2, xx3):
89
+ # do something else
90
+ return output_B
91
+
92
+ def Task_C():
93
+ # do another thing
94
+ return output_C
95
+ ```
96
+
97
+
98
+
99
+ # Batch workflows
100
+ kube_watch also enables to run workflows in batch. A separate manifest with following form is required:
101
+
102
+ batchFlows:
103
+ runner: sequential
104
+ items:
105
+ - path: path_to_flow_A.yaml
106
+ - path: path_to_flow_B.yaml
107
+ - path: path_to_flow_C.yaml
108
+
109
+
110
+ # cron_app
111
+ The cron_app folder contains an example use case of kube_watch library. The cron_app can be used to deploy a CronJob in a kubernetes environment. The app assumes the manifests are located in a separate repository. It will clone the repo and read the manifests and runs the workflows.
112
+
113
+ # Connect to a server
114
+ ## Start Server
115
+ `prefect server start`
116
+ ## To Connect
117
+ To connect to a server, simply set the following environment variable: `PREFECT_API_URL`
@@ -1,6 +1,6 @@
1
- from enum import Enum
2
-
3
-
4
- class Hosts(str, Enum):
5
- LOCAL = 'local'
1
+ from enum import Enum
2
+
3
+
4
+ class Hosts(str, Enum):
5
+ LOCAL = 'local'
6
6
  REMOTE = 'remote'
@@ -1,9 +1,9 @@
1
- from enum import Enum
2
-
3
- class Operations(str, Enum):
4
- OR = 'or'
5
- AND = 'and'
6
- SUM = 'sum'
7
- AVG = 'avg'
8
- MAX = 'max'
1
+ from enum import Enum
2
+
3
+ class Operations(str, Enum):
4
+ OR = 'or'
5
+ AND = 'and'
6
+ SUM = 'sum'
7
+ AVG = 'avg'
8
+ MAX = 'max'
9
9
  MIN = 'min'
@@ -1,13 +1,13 @@
1
- from enum import Enum
2
-
3
- class Providers(str, Enum):
4
- AWS = "aws"
5
- AZURE = "azure"
6
- GCP = "gcp"
7
- VAULT = "vault"
8
-
9
-
10
- class AwsResources(str, Enum):
11
- ECR = "ecr" # elastic container registry
12
- S3 = "s3"
1
+ from enum import Enum
2
+
3
+ class Providers(str, Enum):
4
+ AWS = "aws"
5
+ AZURE = "azure"
6
+ GCP = "gcp"
7
+ VAULT = "vault"
8
+
9
+
10
+ class AwsResources(str, Enum):
11
+ ECR = "ecr" # elastic container registry
12
+ S3 = "s3"
13
13
  IAM = "iam"
@@ -1,18 +1,18 @@
1
- from enum import Enum
2
-
3
- class ParameterType(str, Enum):
4
- STATIC = 'static'
5
- FROM_ENV = 'env'
6
- FROM_FLOW = 'flow'
7
-
8
-
9
- class TaskRunners(str, Enum):
10
- SEQUENTIAL = 'sequential'
11
- CONCURRENT = 'concurrent'
12
- DASK = 'dask'
13
- RAY = 'ray'
14
-
15
-
16
- class TaskInputsType(str, Enum):
17
- ARG = 'arg'
1
+ from enum import Enum
2
+
3
+ class ParameterType(str, Enum):
4
+ STATIC = 'static'
5
+ FROM_ENV = 'env'
6
+ FROM_FLOW = 'flow'
7
+
8
+
9
+ class TaskRunners(str, Enum):
10
+ SEQUENTIAL = 'sequential'
11
+ CONCURRENT = 'concurrent'
12
+ DASK = 'dask'
13
+ RAY = 'ray'
14
+
15
+
16
+ class TaskInputsType(str, Enum):
17
+ ARG = 'arg'
18
18
  DICT = 'dict'
@@ -1,17 +1,17 @@
1
- from pydantic import BaseModel, ConfigDict
2
- from humps.camel import case
3
-
4
- def to_camel(string):
5
- if string == "id":
6
- return "_id"
7
- if string.startswith("_"): # "_id"
8
- return string
9
- return case(string)
10
-
11
- class CamelModel(BaseModel):
12
- """
13
- Replacement for pydanitc BaseModel which simply adds a camel case alias to every field
14
- NOTE: This has been updated for Pydantic 2 to remove some common encoding helpers
15
- """
16
-
1
+ from pydantic import BaseModel, ConfigDict
2
+ from humps.camel import case
3
+
4
+ def to_camel(string):
5
+ if string == "id":
6
+ return "_id"
7
+ if string.startswith("_"): # "_id"
8
+ return string
9
+ return case(string)
10
+
11
+ class CamelModel(BaseModel):
12
+ """
13
+ Replacement for pydanitc BaseModel which simply adds a camel case alias to every field
14
+ NOTE: This has been updated for Pydantic 2 to remove some common encoding helpers
15
+ """
16
+
17
17
  model_config = ConfigDict(alias_generator=to_camel, populate_by_name=True)
@@ -1,60 +1,60 @@
1
- from typing import List, Optional, Dict, Any
2
- from kube_watch.enums.workflow import ParameterType, TaskRunners, TaskInputsType
3
- from kube_watch.enums.logic import Operations
4
-
5
- from .common import CamelModel
6
-
7
- class Parameter(CamelModel):
8
- name: str
9
- value: Any
10
- type: Optional[ParameterType] = ParameterType.STATIC
11
-
12
- class Artifact(CamelModel):
13
- path: str
14
-
15
- class Inputs(CamelModel):
16
- parameters: Optional[List[Parameter]] = []
17
- artifacts: Optional[List[Artifact]] = []
18
-
19
- class Dependency(CamelModel):
20
- taskName: str
21
- inputParamName: Optional[str] = None
22
-
23
- class Condition(CamelModel):
24
- tasks: List[str]
25
- operation: Optional[Operations] = Operations.AND
26
-
27
- class Task(CamelModel):
28
- """
29
- :param plugin_path: define if referring to an external module outside the library.
30
- """
31
- module: str
32
- task: str
33
- name: str
34
- plugin_path: Optional[str] = ""
35
- inputsArgType: Optional[TaskInputsType] = TaskInputsType.ARG # @TODO refactor inputsArgType to inputs_arg_type
36
- inputs: Optional[Inputs] = None
37
- dependency: Optional[List[Dependency]] = None
38
- conditional: Optional[Condition] = None
39
- outputs: Optional[List[str]] = None
40
-
41
- class WorkflowConfig(CamelModel):
42
- name: str
43
- runner: TaskRunners = TaskRunners.CONCURRENT
44
- parameters: Optional[List[Parameter]] = []
45
- tasks: List[Task]
46
-
47
- class WorkflowOutput(CamelModel):
48
- flow_run: Any
49
- config: Any
50
-
51
- class BatchFlowItem(CamelModel):
52
- path: str
53
-
54
- class BatchFlowConfig(CamelModel):
55
- # Only possible runners are concurrent and sequential
56
- runner: TaskRunners = TaskRunners.CONCURRENT
57
- items: List[BatchFlowItem]
58
-
59
-
60
-
1
+ from typing import List, Optional, Dict, Any
2
+ from kube_watch.enums.workflow import ParameterType, TaskRunners, TaskInputsType
3
+ from kube_watch.enums.logic import Operations
4
+
5
+ from .common import CamelModel
6
+
7
+ class Parameter(CamelModel):
8
+ name: str
9
+ value: Any
10
+ type: Optional[ParameterType] = ParameterType.STATIC
11
+
12
+ class Artifact(CamelModel):
13
+ path: str
14
+
15
+ class Inputs(CamelModel):
16
+ parameters: Optional[List[Parameter]] = []
17
+ artifacts: Optional[List[Artifact]] = []
18
+
19
+ class Dependency(CamelModel):
20
+ taskName: str
21
+ inputParamName: Optional[str] = None
22
+
23
+ class Condition(CamelModel):
24
+ tasks: List[str]
25
+ operation: Optional[Operations] = Operations.AND
26
+
27
+ class Task(CamelModel):
28
+ """
29
+ :param plugin_path: define if referring to an external module outside the library.
30
+ """
31
+ module: str
32
+ task: str
33
+ name: str
34
+ plugin_path: Optional[str] = ""
35
+ inputsArgType: Optional[TaskInputsType] = TaskInputsType.ARG # @TODO refactor inputsArgType to inputs_arg_type
36
+ inputs: Optional[Inputs] = None
37
+ dependency: Optional[List[Dependency]] = None
38
+ conditional: Optional[Condition] = None
39
+ outputs: Optional[List[str]] = None
40
+
41
+ class WorkflowConfig(CamelModel):
42
+ name: str
43
+ runner: TaskRunners = TaskRunners.CONCURRENT
44
+ parameters: Optional[List[Parameter]] = []
45
+ tasks: List[Task]
46
+
47
+ class WorkflowOutput(CamelModel):
48
+ flow_run: Any
49
+ config: Any
50
+
51
+ class BatchFlowItem(CamelModel):
52
+ path: str
53
+
54
+ class BatchFlowConfig(CamelModel):
55
+ # Only possible runners are concurrent and sequential
56
+ runner: TaskRunners = TaskRunners.CONCURRENT
57
+ items: List[BatchFlowItem]
58
+
59
+
60
+