kubernetes-watch 0.1.5__py3-none-any.whl → 0.1.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,100 +1,232 @@
1
- from prefect import flow, get_run_logger, runtime
2
- import asyncio
3
- from typing import List
4
- import secrets
5
- import os
6
- import kube_watch.watch.helpers as helpers
7
- from kube_watch.models.workflow import WorkflowOutput
8
- from kube_watch.enums.workflow import TaskRunners, TaskInputsType
9
-
10
-
11
- # @TODO: CONCURRENCY DOES NOT WORK PROPERLY AT FLOW LEVEL
12
- def create_flow_based_on_config(yaml_file, run_async=True):
13
- workflow_config = helpers.load_workflow_config(yaml_file)
14
- flow_name = workflow_config.name
15
- runner = helpers.resolve_runner(workflow_config.runner)
16
- random_suffix = secrets.token_hex(6)
17
- flow_run_name = f"{flow_name} - {random_suffix}"
18
-
19
- @flow(name=flow_name, flow_run_name=flow_run_name, task_runner=runner)
20
- async def dynamic_workflow():
21
- logger = get_run_logger()
22
- tasks = {}
23
-
24
- for param in workflow_config.parameters:
25
- runtime.flow_run.parameters[param.name] = param.value
26
-
27
- logger.info(f"Starting flow: {flow_name}")
28
- for task_data in workflow_config.tasks:
29
- task_name = task_data.name
30
- func = helpers.get_task_function(task_data.module, task_data.task, task_data.plugin_path)
31
- task_inputs = helpers.prepare_task_inputs(task_data.inputs.parameters) if task_data.inputs else {}
32
-
33
- condition_result = True
34
- if task_data.conditional:
35
- condition_result = helpers.resolve_conditional(task_data, tasks)
36
-
37
- if condition_result:
38
- # Resolve dependencies only if the task is going to be executed
39
- if task_data.dependency:
40
- task_inputs = helpers.prepare_task_inputs_from_dep(task_data, task_inputs, tasks)
41
-
42
- task_future = helpers.submit_task(task_name, task_data, task_inputs, func)
43
- tasks[task_data.name] = task_future
44
-
45
-
46
- return tasks
47
- return dynamic_workflow
48
-
49
-
50
- # SINGLE
51
- def single_run_workflow(yaml_file, return_state=True) -> WorkflowOutput:
52
- dynamic_flow = create_flow_based_on_config(yaml_file, run_async=False)
53
- flow_run = dynamic_flow(return_state=return_state)
54
- return WorkflowOutput(**{'flow_run': flow_run, 'config': dynamic_flow})
55
-
56
-
57
- # BATCH
58
-
59
- @flow(name="Batch Workflow Runner - Sequential")
60
- def batch_run_sequential(batch_config, batch_dir) -> List[WorkflowOutput]:
61
- # batch_config = helpers.load_batch_config(batch_yaml_file)
62
- # batch_dir = os.path.dirname(batch_yaml_file)
63
- flows = []
64
- for item in batch_config.items:
65
- yaml_file_path = os.path.join(batch_dir, item.path)
66
- output = single_run_workflow(yaml_file_path, return_state = True)
67
- flows.append(output)
68
-
69
- return flows
70
-
71
- # @TODO: CONCURRENCY DOES NOT WORK PROPERLY AT FLOW LEVEL
72
- @flow(name="Batch Workflow Runner - Concurrent")
73
- async def batch_run_concurrent(batch_config, batch_dir) -> List[WorkflowOutput]:
74
- # Asynchronous flow run submissions
75
- flow_runs = []
76
- for item in batch_config.items:
77
- yaml_file_path = os.path.join(batch_dir, item.path)
78
- # Here you create flow runs but do not await them yet
79
- flow_function = create_flow_based_on_config(yaml_file_path, run_async=True)
80
- flow_run_future = flow_function(return_state=True) # Ensure this is submitted asynchronously
81
- flow_runs.append(flow_run_future)
82
- is_async = asyncio.iscoroutinefunction(flow_function)
83
-
84
-
85
- # Await all flow runs to finish concurrently
86
- results = await asyncio.gather(*flow_runs)
87
- return [WorkflowOutput(**{'flow_run': result, 'config': flow_function}) for result, flow_function in zip(results, flow_runs)]
88
-
89
-
90
- def batch_run_workflow(batch_yaml_file):
91
- batch_config = helpers.load_batch_config(batch_yaml_file)
92
- batch_dir = os.path.dirname(batch_yaml_file)
93
-
94
- if batch_config.runner == TaskRunners.SEQUENTIAL:
95
- return batch_run_sequential(batch_config, batch_dir)
96
-
97
- if batch_config.runner == TaskRunners.CONCURRENT:
98
- return asyncio.run(batch_run_concurrent(batch_config, batch_dir))
99
-
100
- raise ValueError('Invalid flow runner type')
1
+ from prefect import flow, get_run_logger, runtime
2
+ import asyncio
3
+ from typing import List
4
+ import secrets
5
+ import os
6
+ import kube_watch.watch.helpers as helpers
7
+ from kube_watch.models.workflow import WorkflowOutput
8
+ from kube_watch.enums.workflow import TaskRunners, TaskInputsType
9
+
10
+
11
+ # @TODO: CONCURRENCY DOES NOT WORK PROPERLY AT FLOW LEVEL
12
+ def create_flow_based_on_config(yaml_file, run_async=True):
13
+ workflow_config = helpers.load_workflow_config(yaml_file)
14
+ flow_name = workflow_config.name
15
+ runner = helpers.resolve_runner(workflow_config.runner)
16
+ random_suffix = secrets.token_hex(6)
17
+ flow_run_name = f"{flow_name} - {random_suffix}"
18
+
19
+ @flow(name=flow_name, flow_run_name=flow_run_name, task_runner=runner)
20
+ async def dynamic_workflow():
21
+ logger = get_run_logger()
22
+ tasks = {}
23
+
24
+ for param in workflow_config.parameters:
25
+ runtime.flow_run.parameters[param.name] = param.value
26
+
27
+ logger.info(f"Starting flow: {flow_name}")
28
+ for task_data in workflow_config.tasks:
29
+ task_name = task_data.name
30
+ func = helpers.get_task_function(task_data.module, task_data.task, task_data.plugin_path)
31
+ task_inputs = helpers.prepare_task_inputs(task_data.inputs.parameters) if task_data.inputs else {}
32
+
33
+ condition_result = True
34
+ if task_data.conditional:
35
+ condition_result = helpers.resolve_conditional(task_data, tasks)
36
+
37
+ if condition_result:
38
+ # Resolve dependencies only if the task is going to be executed
39
+ if task_data.dependency:
40
+ task_inputs = helpers.prepare_task_inputs_from_dep(task_data, task_inputs, tasks)
41
+
42
+ task_future = helpers.submit_task(task_name, task_data, task_inputs, func)
43
+ tasks[task_data.name] = task_future
44
+
45
+
46
+ return tasks
47
+ return dynamic_workflow
48
+
49
+
50
+ # SINGLE
51
+ def single_run_workflow(yaml_file, return_state=True) -> WorkflowOutput:
52
+ dynamic_flow = create_flow_based_on_config(yaml_file, run_async=False)
53
+ flow_run = dynamic_flow(return_state=return_state)
54
+ return WorkflowOutput(**{'flow_run': flow_run, 'config': dynamic_flow})
55
+
56
+
57
+ async def single_run_workflow_async(yaml_file, return_state=True) -> WorkflowOutput:
58
+ dynamic_flow = create_flow_based_on_config(yaml_file, run_async=True)
59
+ flow_run = await dynamic_flow(return_state=return_state)
60
+ return WorkflowOutput(**{'flow_run': flow_run, 'config': dynamic_flow})
61
+
62
+
63
+ # BATCH
64
+
65
+ @flow(name="Batch Workflow Runner - Sequential")
66
+ async def batch_run_sequential(batch_config, batch_dir) -> List[WorkflowOutput]:
67
+ # batch_config = helpers.load_batch_config(batch_yaml_file)
68
+ # batch_dir = os.path.dirname(batch_yaml_file)
69
+ flows = []
70
+ for item in batch_config.items:
71
+ yaml_file_path = os.path.join(batch_dir, item.path)
72
+ output = await single_run_workflow_async(yaml_file_path, return_state = True)
73
+ flows.append(output)
74
+
75
+ return flows
76
+
77
+ @flow(name="Batch Workflow Runner - Concurrent")
78
+ async def batch_run_concurrent(batch_config, batch_dir) -> List[WorkflowOutput]:
79
+ """
80
+ Run multiple workflows concurrently within a single flow to avoid database conflicts.
81
+ Instead of creating separate flows, we'll execute the workflow logic directly as tasks.
82
+ """
83
+ from prefect import task
84
+
85
+ @task
86
+ async def execute_workflow_tasks(yaml_file_path):
87
+ """Execute all tasks from a workflow config as a single unit"""
88
+ workflow_config = helpers.load_workflow_config(yaml_file_path)
89
+ logger = get_run_logger()
90
+ tasks = {}
91
+
92
+ # Set flow parameters
93
+ for param in workflow_config.parameters:
94
+ runtime.flow_run.parameters[param.name] = param.value
95
+
96
+ logger.info(f"Processing workflow: {workflow_config.name} from {yaml_file_path}")
97
+
98
+ # Execute tasks sequentially within this task to avoid conflicts
99
+ for task_data in workflow_config.tasks:
100
+ task_name = task_data.name
101
+ func = helpers.get_task_function(task_data.module, task_data.task, task_data.plugin_path)
102
+ task_inputs = helpers.prepare_task_inputs(task_data.inputs.parameters) if task_data.inputs else {}
103
+
104
+ condition_result = True
105
+ if task_data.conditional:
106
+ condition_result = helpers.resolve_conditional(task_data, tasks)
107
+
108
+ if condition_result:
109
+ # Resolve dependencies
110
+ if task_data.dependency:
111
+ task_inputs = helpers.prepare_task_inputs_from_dep(task_data, task_inputs, tasks)
112
+
113
+ # Execute the function directly instead of submitting as a separate task
114
+ try:
115
+ # Handle default inputsArgType if not specified
116
+ inputs_arg_type = getattr(task_data, 'inputsArgType', TaskInputsType.ARG)
117
+ if inputs_arg_type == TaskInputsType.ARG:
118
+ result = func(**task_inputs)
119
+ else: # TaskInputsType.DICT
120
+ result = func(task_inputs)
121
+
122
+ # Store result for dependencies
123
+ class MockTaskResult:
124
+ def __init__(self, value):
125
+ self._value = value
126
+ def result(self):
127
+ return self._value
128
+
129
+ tasks[task_data.name] = MockTaskResult(result)
130
+ logger.info(f"Completed task: {task_name}")
131
+
132
+ except Exception as e:
133
+ logger.error(f"Task {task_name} failed: {str(e)}")
134
+ raise
135
+
136
+ return {"workflow_name": workflow_config.name, "tasks_completed": len(tasks)}
137
+
138
+ # Submit all workflow executions as concurrent tasks
139
+ workflow_tasks = []
140
+ for item in batch_config.items:
141
+ yaml_file_path = os.path.join(batch_dir, item.path)
142
+ task_future = execute_workflow_tasks.submit(yaml_file_path)
143
+ workflow_tasks.append((task_future, yaml_file_path))
144
+
145
+ # Wait for all tasks to complete
146
+ results = []
147
+ for task_future, yaml_path in workflow_tasks:
148
+ try:
149
+ result = task_future.result() # .result() is synchronous, don't await it
150
+ # Create a mock WorkflowOutput for compatibility
151
+ workflow_output = WorkflowOutput(**{
152
+ 'flow_run': result,
153
+ 'config': {'name': result['workflow_name'], 'path': yaml_path}
154
+ })
155
+ results.append(workflow_output)
156
+ except Exception as e:
157
+ logger = get_run_logger()
158
+ logger.error(f"Workflow {yaml_path} failed: {str(e)}")
159
+ raise
160
+
161
+ return results
162
+
163
+
164
+ @flow(name="Batch Workflow Runner - Concurrent (Original Flow Approach)")
165
+ async def batch_run_concurrent_flows(batch_config, batch_dir) -> List[WorkflowOutput]:
166
+ """
167
+ Original approach: Run separate flows with staggered execution to reduce database contention.
168
+ This preserves the full Prefect flow semantics and task tracking.
169
+ Use this with PostgreSQL backend for better concurrent performance.
170
+
171
+ Advantages:
172
+ - Full Prefect flow semantics and UI tracking
173
+ - Each YAML gets its own flow run with proper task hierarchy
174
+ - Task names can be the same across YAML files without conflicts
175
+
176
+ Disadvantages:
177
+ - May cause database locking with SQLite
178
+ - More database writes due to separate flow runs
179
+ """
180
+ from prefect import task
181
+
182
+ @task
183
+ async def run_flow_with_delay(yaml_file_path, delay_seconds=0):
184
+ if delay_seconds > 0:
185
+ await asyncio.sleep(delay_seconds)
186
+
187
+ flow_function = create_flow_based_on_config(yaml_file_path, run_async=True)
188
+ result = await flow_function(return_state=True)
189
+ return WorkflowOutput(**{'flow_run': result, 'config': flow_function})
190
+
191
+ # Submit flows with staggered delays to reduce database contention
192
+ workflow_tasks = []
193
+ for i, item in enumerate(batch_config.items):
194
+ yaml_file_path = os.path.join(batch_dir, item.path)
195
+ # Add small delay between flow starts (0, 2, 4, 6 seconds...)
196
+ delay = i * 2
197
+ task_future = run_flow_with_delay.submit(yaml_file_path, delay)
198
+ workflow_tasks.append(task_future)
199
+
200
+ # Wait for all flows to complete
201
+ results = []
202
+ for task_future in workflow_tasks:
203
+ result = task_future.result()
204
+ results.append(result)
205
+
206
+ return results
207
+
208
+
209
+ def batch_run_workflow(batch_yaml_file, use_single_flow=True):
210
+ """
211
+ Run batch workflows with two different approaches:
212
+
213
+ Args:
214
+ batch_yaml_file: Path to the batch configuration YAML
215
+ use_single_flow: If True (default), uses single flow approach (better for SQLite)
216
+ If False, uses separate flows with delays (better for PostgreSQL)
217
+ """
218
+ batch_config = helpers.load_batch_config(batch_yaml_file)
219
+ batch_dir = os.path.dirname(batch_yaml_file)
220
+
221
+ if batch_config.runner == TaskRunners.SEQUENTIAL:
222
+ return asyncio.run(batch_run_sequential(batch_config, batch_dir))
223
+
224
+ if batch_config.runner == TaskRunners.CONCURRENT:
225
+ if use_single_flow:
226
+ # Single flow approach - better for SQLite, tasks scoped per YAML
227
+ return asyncio.run(batch_run_concurrent(batch_config, batch_dir))
228
+ else:
229
+ # Separate flows approach - better for PostgreSQL, full Prefect semantics
230
+ return asyncio.run(batch_run_concurrent_flows(batch_config, batch_dir))
231
+
232
+ raise ValueError('Invalid flow runner type')
@@ -1,21 +1,21 @@
1
- MIT License
2
-
3
- Copyright (c) 2024 Benyamin Motevalli
4
-
5
- Permission is hereby granted, free of charge, to any person obtaining a copy
6
- of this software and associated documentation files (the "Software"), to deal
7
- in the Software without restriction, including without limitation the rights
8
- to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
- copies of the Software, and to permit persons to whom the Software is
10
- furnished to do so, subject to the following conditions:
11
-
12
- The above copyright notice and this permission notice shall be included in all
13
- copies or substantial portions of the Software.
14
-
15
- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
- IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
- FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
- AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
- LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
- OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
- SOFTWARE.
1
+ MIT License
2
+
3
+ Copyright (c) 2024 Benyamin Motevalli
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -1,10 +1,10 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: kubernetes-watch
3
- Version: 0.1.5
3
+ Version: 0.1.9
4
4
  Summary:
5
5
  Author: bmotevalli
6
6
  Author-email: b.motevalli@gmail.com
7
- Requires-Python: >=3.10,<4
7
+ Requires-Python: >=3.10,<3.14
8
8
  Classifier: Programming Language :: Python :: 3
9
9
  Classifier: Programming Language :: Python :: 3.10
10
10
  Classifier: Programming Language :: Python :: 3.11
@@ -15,7 +15,9 @@ Requires-Dist: boto3 (>=1.34.68,<2.0.0)
15
15
  Requires-Dist: humps (>=0.2.2,<0.3.0)
16
16
  Requires-Dist: hvac (>=2.1.0,<3.0.0)
17
17
  Requires-Dist: kubernetes (>=29.0.0,<30.0.0)
18
- Requires-Dist: prefect (>=2.18.0,<3.0.0)
18
+ Requires-Dist: prefect (>=3.4.17,<4.0.0)
19
+ Requires-Dist: psycopg2 (>=2.9.10,<3.0.0)
20
+ Requires-Dist: pydantic (>=2.11.7,<3.0.0)
19
21
  Requires-Dist: requests (>=2.32.3,<3.0.0)
20
22
  Description-Content-Type: text/markdown
21
23
 
@@ -0,0 +1,36 @@
1
+ kube_watch/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
+ kube_watch/enums/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
+ kube_watch/enums/kube.py,sha256=rVKHMHAMkamsmmba7m4jhKlC131hIjZJW0U-G9MBiQE,89
4
+ kube_watch/enums/logic.py,sha256=JRloXghE-gIUpZF7CT7TvYdxjYwc-KAGwTN0HDWPCA0,146
5
+ kube_watch/enums/providers.py,sha256=wjiwmJe0Ph3My_HirKaK9k8VEPYjtxtlqEEY-wSdNUc,236
6
+ kube_watch/enums/workflow.py,sha256=lobyV34tTS_xAgf6VnzaSIjE9DK0IZUKQyHZ2FPtPNg,328
7
+ kube_watch/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
+ kube_watch/models/common.py,sha256=FQktpX552zSCigMxEzm4S07SvrHv5RA7YwVJHgv7uuI,527
9
+ kube_watch/models/workflow.py,sha256=ZFBMz_LmYgROcbz2amSvms38K770njnyZC6h1bpTXGU,1634
10
+ kube_watch/modules/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
+ kube_watch/modules/clusters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
12
+ kube_watch/modules/clusters/kube.py,sha256=_skDVeKIQcZ0nC9JLY0oaFmaeEVRHS-Xbrxh1HR2t9Y,7582
13
+ kube_watch/modules/database/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
14
+ kube_watch/modules/database/model.py,sha256=MWG9UW6g0KuBzW6MjzPBtknAk7GmuncQrdAq6HHarTo,207
15
+ kube_watch/modules/database/postgre.py,sha256=1Sq2YFwgCJM_FWKabV2S1bFAIl2GBwytTtPCuLfVhu8,8182
16
+ kube_watch/modules/logic/actions.py,sha256=hMvqqzR2EzcZ68_O8GdyLaPSLftA-tWAPaJnKdUMj-k,2416
17
+ kube_watch/modules/logic/checks.py,sha256=2q4iNi3nOtGU_CMPIX_j3XY0krzE12fCZ_lAYifcJcA,150
18
+ kube_watch/modules/logic/load.py,sha256=XC-SsWIChhW-QXJeCGMsLuLsn9v5nRni5Y6utwYLt48,781
19
+ kube_watch/modules/logic/merge.py,sha256=sRKm4aTYfNFZnrXceGRWdGPDyoaMcTp6XhAkazckpPU,895
20
+ kube_watch/modules/logic/scheduler.py,sha256=fwUfj4hnYX3yCj6fdZyLvlJE8RTBWHnHOwJPrpNJ8hw,3443
21
+ kube_watch/modules/logic/trasnform.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
22
+ kube_watch/modules/mock/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
23
+ kube_watch/modules/mock/mock_generator.py,sha256=BKKQFCxxQgFW_GFgeIbkyIbuNU4328xTTaFfTwFLsS8,1262
24
+ kube_watch/modules/providers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
25
+ kube_watch/modules/providers/aws.py,sha256=oly88Bihn-vR0NdOoCT-P1MNgPw8FLNfS1Ha4hc_4bc,8375
26
+ kube_watch/modules/providers/git.py,sha256=BevjDv8czDJOJ9M1x8WYIRcXCxtZKow6ZQYiD6MCtGs,4104
27
+ kube_watch/modules/providers/github.py,sha256=eQY8sLy2U6bOWMpFxA73DFCPVuswhTXSG25KmYSuo5s,5212
28
+ kube_watch/modules/providers/vault.py,sha256=etzzHbTrUDsTUpeUN-xg0Xh8ulqC0-1FA3tHRZinIOo,7193
29
+ kube_watch/standalone/metarecogen/ckan_to_gn.py,sha256=LWd7ikyxRIC1IGt6CtALnDOEoyuG07a8NoDHhgMkX4o,4635
30
+ kube_watch/watch/__init__.py,sha256=9KE0Sf1nLUTNaFvXbiQCgf11vpG8Xgmb5ddeMAmak3Q,88
31
+ kube_watch/watch/helpers.py,sha256=8BQnQ6AeLHs0JEq54iKYDvWURb1F-kROJxwIcl_nv_Y,6276
32
+ kube_watch/watch/workflow.py,sha256=CaXHFuEWVsFjBv5dU4IfVMeTlGJWyKaE1But9-YzVWk,9769
33
+ kubernetes_watch-0.1.9.dist-info/LICENSE,sha256=_H2QdL-2dXbivDmOpJ11DnqJewSFhSJwGpHx_WAE-CA,1075
34
+ kubernetes_watch-0.1.9.dist-info/METADATA,sha256=isBj78h77s8CXFHssio69S4RgLmb_sU0_cfYQUfSO5I,5056
35
+ kubernetes_watch-0.1.9.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
36
+ kubernetes_watch-0.1.9.dist-info/RECORD,,
@@ -1,33 +0,0 @@
1
- kube_watch/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- kube_watch/enums/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
- kube_watch/enums/kube.py,sha256=z6ceQwHV9-LB1bMitaMeX8kBXt2GCVdVDFAwc6DQffo,94
4
- kube_watch/enums/logic.py,sha256=Dzh24fZpYahIt0YWpXe1_4FIoJNlwwgYOCnwEAjo8Uk,154
5
- kube_watch/enums/providers.py,sha256=nMX-hXqhgLJMFmC5nmMy8Ajnr7tiya3B5NWn57EMcxk,248
6
- kube_watch/enums/workflow.py,sha256=W4EI98Kwh2Ptbzn4KgRMtTnulyKYxICRPgxo4g7wTjU,345
7
- kube_watch/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
- kube_watch/models/common.py,sha256=mECTcFVLEitlN38l8tqP-BI1kwYev3Jxq0kDSH5pE0o,543
9
- kube_watch/models/workflow.py,sha256=WE-ArxyaJfmze6gRvmwHYKQu7TpQBzxy88szqJO4Xxc,1694
10
- kube_watch/modules/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
- kube_watch/modules/clusters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
12
- kube_watch/modules/clusters/kube.py,sha256=Fje6-vUA1KQ4x8T6cUYJT_eVwUYw-dR71h95ixSLqLM,7767
13
- kube_watch/modules/logic/actions.py,sha256=lt7OkSw6m2ZQe2SfENUsjZeD8vdpvfoE4laEkv9veEA,2471
14
- kube_watch/modules/logic/checks.py,sha256=CFIMVURKJP5Y3mByyJkFCrJBlVUjTG2XixiwoRquXN4,157
15
- kube_watch/modules/logic/load.py,sha256=8b5Y_iRTB6c7LY1-9wQjOrl_oW4JyVp5bjwh169frkU,804
16
- kube_watch/modules/logic/merge.py,sha256=vwc2TwcGU-vH5W0bFXzAzOMHt36ksdS4if1c4IbTeXs,926
17
- kube_watch/modules/logic/scheduler.py,sha256=-p5qh3FnEQ1jlkaY0Lrj9U-vau1b07NYAXBP6M09yoU,3517
18
- kube_watch/modules/logic/trasnform.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
19
- kube_watch/modules/mock/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
20
- kube_watch/modules/mock/mock_generator.py,sha256=j8UfcJeA9giEEyqH9Sf3RGtlMfGO13NbWMZ80dj4UtE,1315
21
- kube_watch/modules/providers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
22
- kube_watch/modules/providers/aws.py,sha256=yvxVwL7seuvxpGR2ZCrmWEMKh9hesWdPTC6LvW7Bi9E,8585
23
- kube_watch/modules/providers/git.py,sha256=h3rcn1FhU82nF52Ol9YHyFk4cvPxxaz_AxHnip8OXPY,1183
24
- kube_watch/modules/providers/github.py,sha256=WCpZIKHr4U0a4El1leXkaCv1jznf9ob5xHVeTNSpNG0,5338
25
- kube_watch/modules/providers/vault.py,sha256=mPSjI5p1ycwXl6XFQNLohJ1rK_z_iT3QA9RJB1O3cpI,7381
26
- kube_watch/standalone/metarecogen/ckan_to_gn.py,sha256=FBiv6McWh4hqV6Bz08zGLzEIe4v1-D3FawjBKYbV7Ms,4767
27
- kube_watch/watch/__init__.py,sha256=6Ay9P_Ws7rP7ZaIrFRZtp_1uwVK4ZDmkkNhFyqPNQIU,61
28
- kube_watch/watch/helpers.py,sha256=T0xDSCfrW7NrmQzgIzOiojQzu_HesajMb7S_AX-tt98,6431
29
- kube_watch/watch/workflow.py,sha256=h0b_P_kfiPxqTFHZ6o2HkDkNaUBOwv1DKJnwEMMVXaI,4203
30
- kubernetes_watch-0.1.5.dist-info/LICENSE,sha256=StyinJRmy--Pc2vQbRToZSN4sjSVg3zccMFrktVcrEw,1096
31
- kubernetes_watch-0.1.5.dist-info/METADATA,sha256=GiDXVvgzxWV6zfK3VeEgqXTzRKSAKHan0Y4HML37Br0,4969
32
- kubernetes_watch-0.1.5.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
33
- kubernetes_watch-0.1.5.dist-info/RECORD,,