kubernetes-watch 0.1.2__py3-none-any.whl → 0.1.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -3,6 +3,7 @@ from enum import Enum
3
3
  class ParameterType(str, Enum):
4
4
  STATIC = 'static'
5
5
  FROM_ENV = 'env'
6
+ FROM_FLOW = 'flow'
6
7
 
7
8
 
8
9
  class TaskRunners(str, Enum):
@@ -25,10 +25,14 @@ class Condition(CamelModel):
25
25
  operation: Optional[Operations] = Operations.AND
26
26
 
27
27
  class Task(CamelModel):
28
+ """
29
+ :param plugin_path: define if referring to an external module outside the library.
30
+ """
28
31
  module: str
29
32
  task: str
30
33
  name: str
31
- inputsArgType: Optional[TaskInputsType] = TaskInputsType.ARG
34
+ plugin_path: Optional[str] = ""
35
+ inputsArgType: Optional[TaskInputsType] = TaskInputsType.ARG # @TODO refactor inputsArgType to inputs_arg_type
32
36
  inputs: Optional[Inputs] = None
33
37
  dependency: Optional[List[Dependency]] = None
34
38
  conditional: Optional[Condition] = None
@@ -37,6 +41,7 @@ class Task(CamelModel):
37
41
  class WorkflowConfig(CamelModel):
38
42
  name: str
39
43
  runner: TaskRunners = TaskRunners.CONCURRENT
44
+ parameters: Optional[List[Parameter]] = []
40
45
  tasks: List[Task]
41
46
 
42
47
  class WorkflowOutput(CamelModel):
@@ -1,5 +1,6 @@
1
1
  import subprocess
2
2
  import os
3
+ from typing import List
3
4
  from prefect import get_run_logger
4
5
  logger = get_run_logger()
5
6
 
@@ -8,7 +9,6 @@ def run_standalone_script(package_name, package_run, package_exec):
8
9
  # script_path = os.path.join(script_dir, package_name.replace('.', os.sep))
9
10
  target_dir = os.path.join(script_dir, os.pardir, os.pardir, *package_name.split('.'))
10
11
 
11
- # Change the current working directory to the script directory
12
12
  full_command = f"{package_run} {os.path.join(target_dir, package_exec)}"
13
13
 
14
14
  # Execute the command
@@ -23,4 +23,34 @@ def run_standalone_script(package_name, package_run, package_exec):
23
23
  except subprocess.CalledProcessError as e:
24
24
  # All logs should have already been handled above, now just raise an exception
25
25
  logger.error("The subprocess encountered an error: %s", e)
26
- raise Exception("Subprocess failed with exit code {}".format(e.returncode))
26
+ raise Exception("Subprocess failed with exit code {}".format(e.returncode))
27
+
28
+
29
+ def run_standalone_script_modified(base_path: str, package_name: str, package_run_cmds: List[str]):
30
+ # Construct the absolute path to the target directory
31
+ target_dir = os.path.join(base_path, *package_name.split('.'))
32
+
33
+ commands = [f"cd {target_dir}"] + package_run_cmds
34
+ full_command = " && ".join(commands)
35
+
36
+ # full_command = f"cd {target_dir} && {package_run_cmd}"
37
+
38
+ # Build the full command to execute
39
+ # full_command = f"{package_run} {os.path.join(target_dir, package_exec)}"
40
+
41
+ # print(full_command)
42
+
43
+ # Execute the command
44
+ try:
45
+ result = subprocess.run(full_command, shell=True, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
46
+ if result.stdout:
47
+ logger.info(result.stdout)
48
+ if result.stderr:
49
+ logger.error(result.stderr)
50
+ result.check_returncode()
51
+ except subprocess.CalledProcessError as e:
52
+ logger.error("Command failed with exit code %s", e.returncode)
53
+ logger.error("Output:\n%s", e.stdout)
54
+ logger.error("Errors:\n%s", e.stderr)
55
+ raise Exception(f"Subprocess failed with exit code {e.returncode}. Check logs for more details.")
56
+ # raise Exception(f"Subprocess failed with exit code {e.returncode}")
@@ -1,13 +1,42 @@
1
1
  import time
2
2
  import random
3
+ from prefect import runtime
3
4
 
4
5
  def generate_number():
5
6
  return 42
6
7
 
8
+
9
+ def mock_dict_data():
10
+ return {
11
+ "key1": "value1",
12
+ "key2": "value2",
13
+ "key3": {
14
+ "k1": 1, "k2": 2, "k3": [1, 3, "ali"]
15
+ }
16
+ }
17
+
18
+ def print_input(params):
19
+ print(params)
20
+
7
21
  def print_number(number, dummy_param, env_var_name):
8
22
  print(f"The generated number is: {number} and the dummy_value is: {dummy_param}")
9
23
  return number, dummy_param, env_var_name
10
24
 
25
+
26
+ def print_flow_parameters():
27
+ assert runtime.flow_run.parameters.get("WORK_DIR") is not None
28
+ assert runtime.flow_run.parameters.get("MODULE_PATH") is not None
29
+ print(runtime.flow_run.parameters.get("WORK_DIR"))
30
+ print(runtime.flow_run.parameters.get("MODULE_PATH"))
31
+
32
+ def print_from_flow_parameters(work_dir):
33
+ """
34
+ work_dir is provided via flow parameters
35
+ """
36
+ assert work_dir != ''
37
+ print(work_dir)
38
+
39
+
11
40
  def delay(seconds):
12
41
  time.sleep(seconds)
13
42
 
@@ -1,11 +1,10 @@
1
1
  #========================================================================
2
2
  # This class is deprecated. Please refer to aws.py
3
3
  #========================================================================
4
- import boto3
5
- import base64
6
4
  import json
7
-
5
+ import base64
8
6
  from datetime import datetime , timezone, timedelta
7
+ import boto3
9
8
  from botocore.exceptions import ClientError
10
9
  from prefect import get_run_logger
11
10
  from kube_watch.enums.providers import AwsResources
@@ -77,7 +76,17 @@ def task_get_latest_image_digest(session, resource, region, repository_name, tag
77
76
  #========================================================================================
78
77
  # IAM Cred update
79
78
  #========================================================================================
80
- def task_rotate_iam_creds(session, user_name, old_access_key_id, old_access_key_secret, access_key_id_var_name, access_secret_key_var_name, rotate_interval):
79
+ def task_rotate_iam_creds(
80
+ session,
81
+ user_name,
82
+ old_access_key_id,
83
+ old_access_key_secret,
84
+ access_key_id_var_name,
85
+ access_secret_key_var_name,
86
+ rotate_interval,
87
+ require_smtp_conversion = False,
88
+ ses_region = "ap-southeast-2"
89
+ ):
81
90
  iam = session.client('iam')
82
91
  creation_date = None
83
92
 
@@ -102,14 +111,15 @@ def task_rotate_iam_creds(session, user_name, old_access_key_id, old_access_key_
102
111
 
103
112
  curr_date = datetime.now(timezone.utc)
104
113
  # Check if the key needs rotation
105
- if (curr_date.weekday() == 5 and
106
- curr_date - creation_date > timedelta(days=dd,hours=hh,minutes=mm)):
114
+ if (curr_date - creation_date > timedelta(days=dd,hours=hh,minutes=mm)):
107
115
  logger.info("Key is older than rotation period, rotating now.")
108
116
  # Delete the old key
109
117
  delete_iam_user_key(session, user_name, old_access_key_id)
110
118
 
111
119
  # Create a new access key
112
120
  access_key_id, secret_access_key = create_iam_user_key(session, user_name)
121
+ if require_smtp_conversion:
122
+ secret_access_key = convert_to_smtp_password(secret_access_key, ses_region)
113
123
  return {access_key_id_var_name: access_key_id, access_secret_key_var_name: secret_access_key}
114
124
 
115
125
  else:
@@ -117,9 +127,9 @@ def task_rotate_iam_creds(session, user_name, old_access_key_id, old_access_key_
117
127
  return {access_key_id_var_name: old_access_key_id, access_secret_key_var_name: old_access_key_secret}
118
128
 
119
129
 
130
+
120
131
  def create_iam_user_key(session, user_name):
121
132
  iam = session.client('iam')
122
-
123
133
  # Check if the user exists
124
134
  try:
125
135
  iam.get_user(UserName=user_name)
@@ -152,3 +162,50 @@ def delete_iam_user_key(session, user_name, access_key_id):
152
162
  except Exception as e:
153
163
  raise Exception(f"Failed to delete old key: {e}")
154
164
 
165
+
166
+
167
+ def convert_to_smtp_password(secret_access_key, region):
168
+ """Convert IAM Secret Key to SMTP Password."""
169
+ import hmac
170
+ import hashlib
171
+ import base64
172
+
173
+ SMTP_REGIONS = [
174
+ 'us-east-2', # US East (Ohio)
175
+ 'us-east-1', # US East (N. Virginia)
176
+ 'us-west-2', # US West (Oregon)
177
+ 'ap-south-1', # Asia Pacific (Mumbai)
178
+ 'ap-northeast-2', # Asia Pacific (Seoul)
179
+ 'ap-southeast-1', # Asia Pacific (Singapore)
180
+ 'ap-southeast-2', # Asia Pacific (Sydney)
181
+ 'ap-northeast-1', # Asia Pacific (Tokyo)
182
+ 'ca-central-1', # Canada (Central)
183
+ 'eu-central-1', # Europe (Frankfurt)
184
+ 'eu-west-1', # Europe (Ireland)
185
+ 'eu-west-2', # Europe (London)
186
+ 'sa-east-1', # South America (Sao Paulo)
187
+ 'us-gov-west-1', # AWS GovCloud (US)
188
+ ]
189
+
190
+ # These values are required to calculate the signature. Do not change them.
191
+ DATE = "11111111"
192
+ SERVICE = "ses"
193
+ MESSAGE = "SendRawEmail"
194
+ TERMINAL = "aws4_request"
195
+ VERSION = 0x04
196
+
197
+
198
+ def sign(key, msg):
199
+ return hmac.new(key, msg.encode('utf-8'), hashlib.sha256).digest()
200
+
201
+ if region not in SMTP_REGIONS:
202
+ raise ValueError(f"The {region} Region doesn't have an SMTP endpoint.")
203
+
204
+ signature = sign(("AWS4" + secret_access_key).encode('utf-8'), DATE)
205
+ signature = sign(signature, region)
206
+ signature = sign(signature, SERVICE)
207
+ signature = sign(signature, TERMINAL)
208
+ signature = sign(signature, MESSAGE)
209
+ signature_and_version = bytes([VERSION]) + signature
210
+ smtp_password = base64.b64encode(signature_and_version)
211
+ return smtp_password.decode('utf-8')
@@ -31,7 +31,7 @@ def login(url, app_role_id, secret_id, path):
31
31
  logger.info("Authenticated with existing token.")
32
32
  return vault_client
33
33
  except hvac.exceptions.InvalidRequest as e:
34
- logger.warning("Failed to authenticate with the existing token:", str(e))
34
+ logger.warning(f"Failed to authenticate with the existing token: {str(e)}")
35
35
 
36
36
  # If token is not valid or not present, authenticate with AppRole
37
37
  try:
@@ -47,7 +47,7 @@ def login(url, app_role_id, secret_id, path):
47
47
 
48
48
  return vault_client
49
49
  except hvac.exceptions.InvalidRequest as e:
50
- logger.error("Authentication failed with provided secret_id:", str(e))
50
+ logger.error(f"Authentication failed with provided secret_id: {str(e)}")
51
51
  raise RuntimeError("Authentication failed: unable to log in with the provided credentials.") from e
52
52
 
53
53
 
@@ -105,9 +105,62 @@ def generate_provider_creds(vault_client, provider, backend_path, role_name):
105
105
 
106
106
 
107
107
  def generate_new_secret_id(vault_client, role_name, vault_path, env_var_name):
108
+ """
109
+ Generates new secret_id. Note an admin role is required for this.
110
+ """
108
111
  new_secret_response = vault_client.auth.approle.generate_secret_id(
109
112
  role_name=role_name,
110
113
  mount_point=f'approle/{vault_path}'
111
114
  )
112
115
 
113
- return { env_var_name : new_secret_response['data']['secret_id'] }
116
+ return { env_var_name : new_secret_response['data']['secret_id'] }
117
+
118
+
119
+
120
+ def delete_secret_id(vault_client, role_name, secret_id, vault_path):
121
+ """
122
+ Delete (revoke) a secret ID associated with a role in Vault.
123
+
124
+ Parameters:
125
+ vault_client (hvac.Client): An authenticated Vault client.
126
+ role_name (str): The name of the role the secret ID is associated with.
127
+ secret_id (str): The secret ID to be deleted.
128
+ vault_path (str): The path where the AppRole is enabled.
129
+ """
130
+ try:
131
+ vault_client.auth.approle.destroy_secret_id(
132
+ mount_point=f"approle/{vault_path}",
133
+ role_name=role_name,
134
+ secret_id=secret_id
135
+ )
136
+
137
+ logger.info("Secret ID successfully revoked.")
138
+ except hvac.exceptions.InvalidRequest as e:
139
+ logger.error("Failed to revoke the secret ID: %s", str(e))
140
+ raise RuntimeError("Failed to delete the secret ID.") from e
141
+
142
+
143
+ def clean_secret_ids(vault_client, role_name, secret_id_env, vault_path, has_kube_secret_updated):
144
+ """
145
+ This function removes all idle secret-ids from `role_name`, except the
146
+ inputted `secret_id_env`.
147
+
148
+ Note: secret_id_env is a dictionary. The key, VAULT_SECRET_ID, has the secret_id value.
149
+ """
150
+ secret_id = secret_id_env.get("VAULT_SECRET_ID")
151
+ if has_kube_secret_updated:
152
+ secret_ids_path = f'auth/approle/{vault_path}/role/{role_name}/secret-id'
153
+ try:
154
+ response = vault_client.list(secret_ids_path)
155
+ if 'data' in response:
156
+ secret_ids = response['data']['keys']
157
+ for idx in secret_ids:
158
+ if idx != secret_id:
159
+ delete_secret_id(vault_client, role_name, secret_id, vault_path)
160
+ logger.info(f"Revoking idle secret id for role: {role_name}")
161
+ else:
162
+ logger.info("No secrets found at this path.")
163
+ except hvac.exceptions.Forbidden:
164
+ logger.error("Access denied. Ensure your token has the correct policies to read this path.")
165
+ except Exception as e:
166
+ logger.error(f"An error occurred: {e}")
@@ -1,7 +1,7 @@
1
1
  from prefect import task
2
- import functools
3
- import asyncio
2
+ import sys
4
3
  from prefect.task_runners import ConcurrentTaskRunner, SequentialTaskRunner
4
+ from prefect import runtime
5
5
  # from prefect_dask.task_runners import DaskTaskRunner
6
6
  from typing import Dict, List
7
7
  import yaml
@@ -39,9 +39,9 @@ def func_task(name="default_task_name", task_input_type: TaskInputsType = TaskIn
39
39
  return execute_task
40
40
  if task_input_type == TaskInputsType.DICT:
41
41
  @task(name=name)
42
- def execute_task(func, dict_inp):
42
+ def execute_task_dict(func, dict_inp):
43
43
  return func(dict_inp)
44
- return execute_task
44
+ return execute_task_dict
45
45
  raise ValueError(f'Unknow Task Input Type. It should either be {TaskInputsType.ARG} or {TaskInputsType.DICT} but {task_input_type} is provided.')
46
46
 
47
47
 
@@ -51,28 +51,46 @@ def func_task(name="default_task_name", task_input_type: TaskInputsType = TaskIn
51
51
 
52
52
 
53
53
 
54
- def get_task_function(module_name, task_name):
55
- # module = importlib.import_module(f"sparrow_watch.modules.{module_name}")
56
- # klass = getattr(module, class_name)
57
- # return getattr(klass, task_name)
54
+ def get_task_function(module_name, task_name, plugin_path=None):
58
55
  """
59
56
  Fetch a function directly from a specified module.
60
57
 
61
58
  Args:
62
59
  module_name (str): The name of the module to import the function from. e.g. providers.aws
63
60
  task_name (str): The name of the function to fetch from the module.
61
+ plugin_path (ster): define for external modules
64
62
 
65
63
  Returns:
66
64
  function: The function object fetched from the module.
67
65
  """
68
- module = importlib.import_module(f"kube_watch.modules.{module_name}")
69
- return getattr(module, task_name)
66
+ try:
67
+ if plugin_path:
68
+ # Temporarily prepend the plugin path to sys.path to find the module
69
+ module_path = os.path.join(plugin_path, *module_name.split('.')) + '.py'
70
+ module_spec = importlib.util.spec_from_file_location(module_name, module_path)
71
+ module = importlib.util.module_from_spec(module_spec)
72
+ module_spec.loader.exec_module(module)
73
+ else:
74
+ # Standard import from the internal library path
75
+ module = importlib.import_module(f"kube_watch.modules.{module_name}")
76
+
77
+ return getattr(module, task_name)
78
+ except ImportError as e:
79
+ raise ImportError(f"Unable to import module '{module_name}': {e}")
80
+ except AttributeError as e:
81
+ raise AttributeError(f"The module '{module_name}' does not have a function named '{task_name}': {e}")
82
+ # finally:
83
+ # if plugin_path:
84
+ # # Remove the plugin path from sys.path after importing
85
+ # sys.path.pop(0) # Using pop(0) is safer in the context of insert(0, plugin_path)
70
86
 
71
87
 
72
88
 
73
89
  def resolve_parameter_value(param):
74
90
  if param.type == ParameterType.FROM_ENV:
75
91
  return os.getenv(param.value, '') # Default to empty string if env var is not set
92
+ if param.type == ParameterType.FROM_FLOW:
93
+ return runtime.flow_run.parameters.get(param.value, '')
76
94
  return param.value
77
95
 
78
96
  def prepare_task_inputs(parameters):
@@ -123,4 +141,31 @@ def resolve_runner(runner):
123
141
  if runner == TaskRunners.RAY:
124
142
  raise ValueError("Ray Not Implemented")
125
143
  # return RayTaskRunner
126
- raise ValueError("Invalid task runner type")
144
+ raise ValueError("Invalid task runner type")
145
+
146
+
147
+ def filter_attributes(obj):
148
+ import uuid
149
+ from collections.abc import Iterable
150
+ import inspect
151
+
152
+ def is_simple(value):
153
+ """ Check if the value is a simple data type or a collection of simple data types """
154
+ if isinstance(value, (int, float, str, bool, type(None), uuid.UUID)):
155
+ return True
156
+ if isinstance(value, dict):
157
+ return all(is_simple(k) and is_simple(v) for k, v in value.items())
158
+ if isinstance(value, Iterable) and not isinstance(value, (str, bytes)):
159
+ return all(is_simple(item) for item in value)
160
+ return False
161
+
162
+ result = {}
163
+ for attr in dir(obj):
164
+ # Avoid magic methods and attributes
165
+ if attr.startswith("__") and attr.endswith("__"):
166
+ continue
167
+ value = getattr(obj, attr)
168
+ # Filter out methods and check if the attribute value is simple
169
+ if not callable(value) and not inspect.isclass(value) and is_simple(value):
170
+ result[attr] = value
171
+ return result
@@ -1,4 +1,4 @@
1
- from prefect import flow, get_run_logger
1
+ from prefect import flow, get_run_logger, runtime
2
2
  import asyncio
3
3
  from typing import List
4
4
  import secrets
@@ -20,10 +20,14 @@ def create_flow_based_on_config(yaml_file, run_async=True):
20
20
  async def dynamic_workflow():
21
21
  logger = get_run_logger()
22
22
  tasks = {}
23
+
24
+ for param in workflow_config.parameters:
25
+ runtime.flow_run.parameters[param.name] = param.value
26
+
23
27
  logger.info(f"Starting flow: {flow_name}")
24
28
  for task_data in workflow_config.tasks:
25
29
  task_name = task_data.name
26
- func = helpers.get_task_function(task_data.module, task_data.task)
30
+ func = helpers.get_task_function(task_data.module, task_data.task, task_data.plugin_path)
27
31
  task_inputs = helpers.prepare_task_inputs(task_data.inputs.parameters) if task_data.inputs else {}
28
32
 
29
33
  condition_result = True
@@ -38,17 +42,6 @@ def create_flow_based_on_config(yaml_file, run_async=True):
38
42
  task_future = helpers.submit_task(task_name, task_data, task_inputs, func)
39
43
  tasks[task_data.name] = task_future
40
44
 
41
- # if task_data.dependency:
42
- # task_inputs = helpers.prepare_task_inputs_from_dep(task_data, task_inputs, tasks)
43
-
44
- # if task_data.conditional:
45
- # condition_result = helpers.resolve_conditional(task_data, tasks)
46
- # if condition_result:
47
- # task_future = helpers.submit_task(task_name, task_data, task_inputs, func)
48
- # tasks[task_data.name] = task_future
49
- # else:
50
- # task_future = helpers.submit_task(task_name, task_data, task_inputs, func)
51
- # tasks[task_data.name] = task_future
52
45
 
53
46
  return tasks
54
47
  return dynamic_workflow
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: kubernetes-watch
3
- Version: 0.1.2
3
+ Version: 0.1.4
4
4
  Summary:
5
5
  Author: bmotevalli
6
6
  Author-email: b.motevalli@gmail.com
@@ -9,14 +9,14 @@ Classifier: Programming Language :: Python :: 3
9
9
  Classifier: Programming Language :: Python :: 3.10
10
10
  Classifier: Programming Language :: Python :: 3.11
11
11
  Classifier: Programming Language :: Python :: 3.12
12
- Requires-Dist: GitPython (==3.1.43)
13
- Requires-Dist: PyYAML (==6.0.1)
14
- Requires-Dist: boto3 (==1.34.68)
15
- Requires-Dist: humps (==0.2.2)
16
- Requires-Dist: hvac (==2.1.0)
17
- Requires-Dist: kubernetes (==29.0.0)
18
- Requires-Dist: prefect (==2.18.0)
19
- Requires-Dist: requests (==2.32.3)
12
+ Requires-Dist: GitPython (>=3.1.43,<4.0.0)
13
+ Requires-Dist: PyYAML (>=6.0.1,<7.0.0)
14
+ Requires-Dist: boto3 (>=1.34.68,<2.0.0)
15
+ Requires-Dist: humps (>=0.2.2,<0.3.0)
16
+ Requires-Dist: hvac (>=2.1.0,<3.0.0)
17
+ Requires-Dist: kubernetes (>=29.0.0,<30.0.0)
18
+ Requires-Dist: prefect (>=2.18.0,<3.0.0)
19
+ Requires-Dist: requests (>=2.32.3,<3.0.0)
20
20
  Description-Content-Type: text/markdown
21
21
 
22
22
  # kube_watch
@@ -26,10 +26,16 @@ Description-Content-Type: text/markdown
26
26
  - `poetry install`
27
27
  - `poetry shell`
28
28
 
29
- # To install package to your environment:
29
+ # To install package to your environment locally:
30
30
 
31
31
  python setup.py install
32
32
 
33
+ # To publish
34
+
35
+ `poetry config pypi-token.pypi your-api-token`
36
+ `poetry build`
37
+ `poetry publish`
38
+
33
39
 
34
40
  # Description
35
41
  The kube_watch library is build on top of <a href='https://docs.prefect.io/latest/'>Prefect</a>. The library is designed to define workflows in a declaritive and flexible fashion. Originally, workflows in Prefect are defined via decorators such as @flow and @task. In kube_watch, workflows can be defined in a declaritive form via yaml files. The library is mainly focused on running scheduled workflows in kubernetes environment. However, it can easily be extended to be used for any purpose requiring a workflow. The workflow manifest has the following generic structure:
@@ -3,32 +3,31 @@ kube_watch/enums/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,
3
3
  kube_watch/enums/kube.py,sha256=z6ceQwHV9-LB1bMitaMeX8kBXt2GCVdVDFAwc6DQffo,94
4
4
  kube_watch/enums/logic.py,sha256=Dzh24fZpYahIt0YWpXe1_4FIoJNlwwgYOCnwEAjo8Uk,154
5
5
  kube_watch/enums/providers.py,sha256=nMX-hXqhgLJMFmC5nmMy8Ajnr7tiya3B5NWn57EMcxk,248
6
- kube_watch/enums/workflow.py,sha256=yMViHxigDfft7Qmsg3ou7x-vmMbw2IopbMkeNtHhbMI,321
6
+ kube_watch/enums/workflow.py,sha256=W4EI98Kwh2Ptbzn4KgRMtTnulyKYxICRPgxo4g7wTjU,345
7
7
  kube_watch/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
8
  kube_watch/models/common.py,sha256=mECTcFVLEitlN38l8tqP-BI1kwYev3Jxq0kDSH5pE0o,543
9
- kube_watch/models/workflow.py,sha256=S1c5cWAXJYnnRUHglE9JO61cfoWw4Q26o1wfZ2SKdsY,1452
9
+ kube_watch/models/workflow.py,sha256=WE-ArxyaJfmze6gRvmwHYKQu7TpQBzxy88szqJO4Xxc,1694
10
10
  kube_watch/modules/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
11
  kube_watch/modules/clusters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
12
12
  kube_watch/modules/clusters/kube.py,sha256=Fje6-vUA1KQ4x8T6cUYJT_eVwUYw-dR71h95ixSLqLM,7767
13
- kube_watch/modules/logic/actions.py,sha256=Nf5-pzKobcd-CYNWhaz-j6wucAE6cb2nKqfNzaM26Tw,1223
13
+ kube_watch/modules/logic/actions.py,sha256=lt7OkSw6m2ZQe2SfENUsjZeD8vdpvfoE4laEkv9veEA,2471
14
14
  kube_watch/modules/logic/checks.py,sha256=CFIMVURKJP5Y3mByyJkFCrJBlVUjTG2XixiwoRquXN4,157
15
15
  kube_watch/modules/logic/load.py,sha256=5DKi3nAFMcoNQL504JaDc-_REJSc6GyxeXLWQwNl0BM,227
16
16
  kube_watch/modules/logic/merge.py,sha256=vwc2TwcGU-vH5W0bFXzAzOMHt36ksdS4if1c4IbTeXs,926
17
17
  kube_watch/modules/logic/scheduler.py,sha256=-p5qh3FnEQ1jlkaY0Lrj9U-vau1b07NYAXBP6M09yoU,3517
18
18
  kube_watch/modules/logic/trasnform.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
19
19
  kube_watch/modules/mock/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
20
- kube_watch/modules/mock/mock_generator.py,sha256=V7u5e_j_XIzFnv4sqhd44wBPuJIjd8ZeH9znfqxtIyE,609
20
+ kube_watch/modules/mock/mock_generator.py,sha256=j8UfcJeA9giEEyqH9Sf3RGtlMfGO13NbWMZ80dj4UtE,1315
21
21
  kube_watch/modules/providers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
22
- kube_watch/modules/providers/aws.py,sha256=N3dmImU0_yIJJS0JrW8FqJ-66_jGip0QBfb7-4VDKTA,6606
22
+ kube_watch/modules/providers/aws.py,sha256=yvxVwL7seuvxpGR2ZCrmWEMKh9hesWdPTC6LvW7Bi9E,8585
23
23
  kube_watch/modules/providers/git.py,sha256=h3rcn1FhU82nF52Ol9YHyFk4cvPxxaz_AxHnip8OXPY,1183
24
24
  kube_watch/modules/providers/github.py,sha256=WCpZIKHr4U0a4El1leXkaCv1jznf9ob5xHVeTNSpNG0,5338
25
- kube_watch/modules/providers/vault.py,sha256=eGQbjrVLlHzOPW8SqRWn6S6j8WsQBvvG7jOhzJKEh1o,3983
25
+ kube_watch/modules/providers/vault.py,sha256=kw-S4orCIrVgDKzctzYeICIVD3-A9cT_CyyFAwi4oPM,6215
26
26
  kube_watch/standalone/metarecogen/ckan_to_gn.py,sha256=FBiv6McWh4hqV6Bz08zGLzEIe4v1-D3FawjBKYbV7Ms,4767
27
- kube_watch/standalone/metarecogen/Dockerfile,sha256=5EBb8oUnBwlRcLT4Lw2E659ONj4LYk_HFF4lZfjB2DM,357
28
27
  kube_watch/watch/__init__.py,sha256=6Ay9P_Ws7rP7ZaIrFRZtp_1uwVK4ZDmkkNhFyqPNQIU,61
29
- kube_watch/watch/helpers.py,sha256=MlsPuUHIYfKHB8GZf2yEz779hQwen_s3xBOp9gQu6oI,4377
30
- kube_watch/watch/workflow.py,sha256=7AODfNDx9oc1lAuPfF4muATs-6eTZZk6WfD5yN20tGo,4688
31
- kubernetes_watch-0.1.2.dist-info/LICENSE,sha256=StyinJRmy--Pc2vQbRToZSN4sjSVg3zccMFrktVcrEw,1096
32
- kubernetes_watch-0.1.2.dist-info/METADATA,sha256=r3b4BplzbwQTuN1iSEbl26mTHoaxDhfFSd6GinbQt-w,4810
33
- kubernetes_watch-0.1.2.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
34
- kubernetes_watch-0.1.2.dist-info/RECORD,,
28
+ kube_watch/watch/helpers.py,sha256=T0xDSCfrW7NrmQzgIzOiojQzu_HesajMb7S_AX-tt98,6431
29
+ kube_watch/watch/workflow.py,sha256=h0b_P_kfiPxqTFHZ6o2HkDkNaUBOwv1DKJnwEMMVXaI,4203
30
+ kubernetes_watch-0.1.4.dist-info/LICENSE,sha256=StyinJRmy--Pc2vQbRToZSN4sjSVg3zccMFrktVcrEw,1096
31
+ kubernetes_watch-0.1.4.dist-info/METADATA,sha256=n9GU5CEsh33BEGgYDApxctg_Rl0Oav6RfeYwVMM-fzs,4969
32
+ kubernetes_watch-0.1.4.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
33
+ kubernetes_watch-0.1.4.dist-info/RECORD,,
@@ -1,16 +0,0 @@
1
- FROM python:3.11-slim
2
- # FROM python:3
3
- # Simple Dockerfile used to run 'ckan_to_gn.py' script
4
-
5
- # Install pip and requests library
6
- RUN pip install --no-cache-dir --upgrade pip \
7
- && pip install --no-cache-dir requests
8
-
9
- # Set work dir
10
- WORKDIR /usr/app/src
11
-
12
- # Copy python script
13
- COPY ckan_to_gn.py ./
14
-
15
- # Run it
16
- CMD ["python3", "ckan_to_gn.py"]