kubernetes-watch 0.1.5__py3-none-any.whl → 0.1.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kube_watch/enums/kube.py +5 -5
- kube_watch/enums/logic.py +8 -8
- kube_watch/enums/providers.py +12 -12
- kube_watch/enums/workflow.py +17 -17
- kube_watch/models/common.py +16 -16
- kube_watch/models/workflow.py +60 -60
- kube_watch/modules/clusters/kube.py +185 -185
- kube_watch/modules/database/__init__.py +0 -0
- kube_watch/modules/database/model.py +12 -0
- kube_watch/modules/database/postgre.py +271 -0
- kube_watch/modules/logic/actions.py +55 -55
- kube_watch/modules/logic/checks.py +7 -7
- kube_watch/modules/logic/load.py +23 -23
- kube_watch/modules/logic/merge.py +31 -31
- kube_watch/modules/logic/scheduler.py +74 -74
- kube_watch/modules/mock/mock_generator.py +53 -53
- kube_watch/modules/providers/aws.py +210 -210
- kube_watch/modules/providers/git.py +120 -32
- kube_watch/modules/providers/github.py +126 -126
- kube_watch/modules/providers/vault.py +188 -188
- kube_watch/standalone/metarecogen/ckan_to_gn.py +132 -132
- kube_watch/watch/__init__.py +1 -1
- kube_watch/watch/helpers.py +170 -170
- kube_watch/watch/workflow.py +232 -100
- {kubernetes_watch-0.1.5.dist-info → kubernetes_watch-0.1.9.dist-info}/LICENSE +21 -21
- {kubernetes_watch-0.1.5.dist-info → kubernetes_watch-0.1.9.dist-info}/METADATA +5 -3
- kubernetes_watch-0.1.9.dist-info/RECORD +36 -0
- kubernetes_watch-0.1.5.dist-info/RECORD +0 -33
- {kubernetes_watch-0.1.5.dist-info → kubernetes_watch-0.1.9.dist-info}/WHEEL +0 -0
|
@@ -0,0 +1,271 @@
|
|
|
1
|
+
import psycopg2
|
|
2
|
+
import psycopg2.extras
|
|
3
|
+
from prefect import get_run_logger
|
|
4
|
+
|
|
5
|
+
from .model import TableQuery
|
|
6
|
+
|
|
7
|
+
logger = get_run_logger()
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def execute_query(db_user, db_pass, db_query, db_host="localhost", db_port=5432, db_name="postgres"):
|
|
11
|
+
"""
|
|
12
|
+
Connect to PostgreSQL database, execute a query, and return status message.
|
|
13
|
+
|
|
14
|
+
Args:
|
|
15
|
+
db_user (str): Database username
|
|
16
|
+
db_pass (str): Database password
|
|
17
|
+
db_query (str): SQL query to execute
|
|
18
|
+
db_host (str): Database host (default: localhost)
|
|
19
|
+
db_port (int): Database port (default: 5432)
|
|
20
|
+
db_name (str): Database name (default: postgres)
|
|
21
|
+
|
|
22
|
+
Returns:
|
|
23
|
+
dict: Status message with success/failure information
|
|
24
|
+
"""
|
|
25
|
+
connection = None
|
|
26
|
+
cursor = None
|
|
27
|
+
|
|
28
|
+
try:
|
|
29
|
+
# Establish database connection
|
|
30
|
+
connection = psycopg2.connect(
|
|
31
|
+
host=db_host,
|
|
32
|
+
port=db_port,
|
|
33
|
+
database=db_name,
|
|
34
|
+
user=db_user,
|
|
35
|
+
password=db_pass
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
cursor = connection.cursor(cursor_factory=psycopg2.extras.RealDictCursor)
|
|
39
|
+
|
|
40
|
+
# Execute the query
|
|
41
|
+
cursor.execute(db_query)
|
|
42
|
+
|
|
43
|
+
# Check if it's a SELECT query to fetch results
|
|
44
|
+
if db_query.strip().upper().startswith('SELECT'):
|
|
45
|
+
results = cursor.fetchall()
|
|
46
|
+
row_count = len(results)
|
|
47
|
+
connection.commit() # Commit even for SELECT to close transaction
|
|
48
|
+
|
|
49
|
+
logger.info(f"Query executed successfully. Retrieved {row_count} rows.")
|
|
50
|
+
return {
|
|
51
|
+
"status": "success",
|
|
52
|
+
"message": f"Query executed successfully. Retrieved {row_count} rows.",
|
|
53
|
+
"row_count": row_count,
|
|
54
|
+
"data": results
|
|
55
|
+
}
|
|
56
|
+
else:
|
|
57
|
+
# For INSERT, UPDATE, DELETE queries
|
|
58
|
+
connection.commit()
|
|
59
|
+
affected_rows = cursor.rowcount
|
|
60
|
+
|
|
61
|
+
logger.info(f"Query executed successfully. {affected_rows} rows affected.")
|
|
62
|
+
return {
|
|
63
|
+
"status": "success",
|
|
64
|
+
"message": f"Query executed successfully. {affected_rows} rows affected.",
|
|
65
|
+
"affected_rows": affected_rows
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
except psycopg2.Error as e:
|
|
69
|
+
# PostgreSQL specific errors
|
|
70
|
+
error_msg = f"PostgreSQL error: {str(e)}"
|
|
71
|
+
logger.error(error_msg)
|
|
72
|
+
|
|
73
|
+
if connection:
|
|
74
|
+
connection.rollback()
|
|
75
|
+
|
|
76
|
+
return {
|
|
77
|
+
"status": "error",
|
|
78
|
+
"message": error_msg,
|
|
79
|
+
"error_type": "postgresql_error"
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
except Exception as e:
|
|
83
|
+
# General errors
|
|
84
|
+
error_msg = f"Unexpected error: {str(e)}"
|
|
85
|
+
logger.error(error_msg)
|
|
86
|
+
|
|
87
|
+
if connection:
|
|
88
|
+
connection.rollback()
|
|
89
|
+
|
|
90
|
+
return {
|
|
91
|
+
"status": "error",
|
|
92
|
+
"message": error_msg,
|
|
93
|
+
"error_type": "general_error"
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
finally:
|
|
97
|
+
# Clean up connections
|
|
98
|
+
try:
|
|
99
|
+
if cursor:
|
|
100
|
+
cursor.close()
|
|
101
|
+
logger.debug("Database cursor closed.")
|
|
102
|
+
|
|
103
|
+
if connection:
|
|
104
|
+
connection.close()
|
|
105
|
+
logger.debug("Database connection closed.")
|
|
106
|
+
|
|
107
|
+
except Exception as cleanup_error:
|
|
108
|
+
logger.warning(f"Error during cleanup: {str(cleanup_error)}")
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
def delete_on_retention_period(table_delete: dict, batch_size: int = 100000, interval_days: int = 14):
|
|
112
|
+
"""
|
|
113
|
+
Delete rows older than a specified retention period from a table in PostgreSQL.
|
|
114
|
+
|
|
115
|
+
Args:
|
|
116
|
+
table_delete (dict[TableQuery]): Object containing table name and column name.
|
|
117
|
+
batch_size (int): Number of rows to delete in each batch (default: 100000).
|
|
118
|
+
interval_days (int): Retention period in days (default: 14).
|
|
119
|
+
|
|
120
|
+
Returns:
|
|
121
|
+
dict: Status message with success/failure information.
|
|
122
|
+
"""
|
|
123
|
+
|
|
124
|
+
try:
|
|
125
|
+
table_query = TableQuery(**table_delete)
|
|
126
|
+
except Exception as e:
|
|
127
|
+
logger.error(f"Error creating TableQuery object: {str(e)}")
|
|
128
|
+
raise ValueError("Invalid table_delete data format. Expected a dictionary with 'name', 'column_name', 'db_host', 'db_port', 'db_name', 'db_user', and 'db_pass' keys.")
|
|
129
|
+
|
|
130
|
+
connection = None
|
|
131
|
+
cursor = None
|
|
132
|
+
|
|
133
|
+
try:
|
|
134
|
+
# Establish database connection
|
|
135
|
+
connection = psycopg2.connect(
|
|
136
|
+
host=table_query.db_host,
|
|
137
|
+
port=table_query.db_port,
|
|
138
|
+
database=table_query.db_name,
|
|
139
|
+
user=table_query.db_user,
|
|
140
|
+
password=table_query.db_pass
|
|
141
|
+
)
|
|
142
|
+
|
|
143
|
+
cursor = connection.cursor()
|
|
144
|
+
|
|
145
|
+
rows_deleted_total = 0
|
|
146
|
+
|
|
147
|
+
while True:
|
|
148
|
+
# Build the DELETE query dynamically
|
|
149
|
+
delete_query = f"""
|
|
150
|
+
DELETE FROM {table_query.table_name}
|
|
151
|
+
WHERE id IN (
|
|
152
|
+
SELECT id FROM {table_query.table_name}
|
|
153
|
+
WHERE {table_query.column_name} < NOW() - INTERVAL '{interval_days} days'
|
|
154
|
+
LIMIT {batch_size}
|
|
155
|
+
)
|
|
156
|
+
"""
|
|
157
|
+
|
|
158
|
+
cursor.execute(delete_query)
|
|
159
|
+
rows_deleted = cursor.rowcount
|
|
160
|
+
rows_deleted_total += rows_deleted
|
|
161
|
+
|
|
162
|
+
connection.commit()
|
|
163
|
+
|
|
164
|
+
logger.info(f"Deleted {rows_deleted} rows from {table_query.table_name}.")
|
|
165
|
+
|
|
166
|
+
# Exit the loop if no rows were deleted
|
|
167
|
+
if rows_deleted == 0:
|
|
168
|
+
break
|
|
169
|
+
|
|
170
|
+
logger.info(f"Total rows deleted from {table_query.table_name}: {rows_deleted_total}.")
|
|
171
|
+
|
|
172
|
+
return {
|
|
173
|
+
"status": "success",
|
|
174
|
+
"message": f"Total rows deleted from {table_query.table_name}: {rows_deleted_total}.",
|
|
175
|
+
"rows_deleted_total": rows_deleted_total
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
except psycopg2.Error as e:
|
|
179
|
+
error_msg = f"PostgreSQL error: {str(e)}"
|
|
180
|
+
logger.error(error_msg)
|
|
181
|
+
|
|
182
|
+
if connection:
|
|
183
|
+
connection.rollback()
|
|
184
|
+
|
|
185
|
+
return {
|
|
186
|
+
"status": "error",
|
|
187
|
+
"message": error_msg,
|
|
188
|
+
"error_type": "postgresql_error"
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
except Exception as e:
|
|
192
|
+
error_msg = f"Unexpected error: {str(e)}"
|
|
193
|
+
logger.error(error_msg)
|
|
194
|
+
|
|
195
|
+
if connection:
|
|
196
|
+
connection.rollback()
|
|
197
|
+
|
|
198
|
+
return {
|
|
199
|
+
"status": "error",
|
|
200
|
+
"message": error_msg,
|
|
201
|
+
"error_type": "general_error"
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
finally:
|
|
205
|
+
# Clean up connections
|
|
206
|
+
try:
|
|
207
|
+
if cursor:
|
|
208
|
+
cursor.close()
|
|
209
|
+
logger.debug("Database cursor closed.")
|
|
210
|
+
|
|
211
|
+
if connection:
|
|
212
|
+
connection.close()
|
|
213
|
+
logger.debug("Database connection closed.")
|
|
214
|
+
|
|
215
|
+
except Exception as cleanup_error:
|
|
216
|
+
logger.warning(f"Error during cleanup: {str(cleanup_error)}")
|
|
217
|
+
|
|
218
|
+
"""
|
|
219
|
+
EXAMPLE RETENTION PERIOD DELETION QUERY
|
|
220
|
+
|
|
221
|
+
DO $$
|
|
222
|
+
DECLARE
|
|
223
|
+
batch_size INTEGER := 100000;
|
|
224
|
+
rows_deleted INTEGER;
|
|
225
|
+
t TEXT;
|
|
226
|
+
col TEXT;
|
|
227
|
+
sql TEXT;
|
|
228
|
+
count_sql TEXT;
|
|
229
|
+
|
|
230
|
+
-- Define loop record
|
|
231
|
+
rec RECORD;
|
|
232
|
+
BEGIN
|
|
233
|
+
-- Simulate table/column pairs using VALUES
|
|
234
|
+
FOR rec IN
|
|
235
|
+
SELECT * FROM (
|
|
236
|
+
VALUES
|
|
237
|
+
('log', 'created'),
|
|
238
|
+
('task_run_state', 'timestamp'),
|
|
239
|
+
('task_run', 'start_time'),
|
|
240
|
+
('flow_run_state', 'timestamp'),
|
|
241
|
+
('flow_run', 'start_time')
|
|
242
|
+
) AS table_info(table_name, column_name)
|
|
243
|
+
LOOP
|
|
244
|
+
t := rec.table_name;
|
|
245
|
+
col := rec.column_name;
|
|
246
|
+
|
|
247
|
+
LOOP
|
|
248
|
+
sql := format(
|
|
249
|
+
'DELETE FROM %I WHERE id IN (
|
|
250
|
+
SELECT id FROM %I WHERE %I < NOW() - INTERVAL ''14 days'' LIMIT %s
|
|
251
|
+
)',
|
|
252
|
+
t, t, col, batch_size
|
|
253
|
+
);
|
|
254
|
+
|
|
255
|
+
EXECUTE sql;
|
|
256
|
+
GET DIAGNOSTICS rows_deleted = ROW_COUNT;
|
|
257
|
+
|
|
258
|
+
RAISE NOTICE 'Deleted % rows from %', rows_deleted, t;
|
|
259
|
+
EXIT WHEN rows_deleted = 0;
|
|
260
|
+
END LOOP;
|
|
261
|
+
|
|
262
|
+
count_sql := format(
|
|
263
|
+
'SELECT COUNT(*) FROM %I WHERE %I < NOW() - INTERVAL ''14 days''',
|
|
264
|
+
t, col
|
|
265
|
+
);
|
|
266
|
+
|
|
267
|
+
EXECUTE count_sql INTO rows_deleted;
|
|
268
|
+
RAISE NOTICE 'Remaining rows in % older than 14 days: %', t, rows_deleted;
|
|
269
|
+
END LOOP;
|
|
270
|
+
END $$;
|
|
271
|
+
"""
|
|
@@ -1,56 +1,56 @@
|
|
|
1
|
-
import subprocess
|
|
2
|
-
import os
|
|
3
|
-
from typing import List
|
|
4
|
-
from prefect import get_run_logger
|
|
5
|
-
logger = get_run_logger()
|
|
6
|
-
|
|
7
|
-
def run_standalone_script(package_name, package_run, package_exec):
|
|
8
|
-
script_dir = os.path.dirname(os.path.realpath(__file__))
|
|
9
|
-
# script_path = os.path.join(script_dir, package_name.replace('.', os.sep))
|
|
10
|
-
target_dir = os.path.join(script_dir, os.pardir, os.pardir, *package_name.split('.'))
|
|
11
|
-
|
|
12
|
-
full_command = f"{package_run} {os.path.join(target_dir, package_exec)}"
|
|
13
|
-
|
|
14
|
-
# Execute the command
|
|
15
|
-
try:
|
|
16
|
-
result = subprocess.run(full_command, shell=True, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
|
|
17
|
-
if result.stdout:
|
|
18
|
-
logger.info(result.stdout)
|
|
19
|
-
if result.stderr:
|
|
20
|
-
logger.error(result.stderr)
|
|
21
|
-
# logger.info(f"Output: {result.stdout}")
|
|
22
|
-
result.check_returncode()
|
|
23
|
-
except subprocess.CalledProcessError as e:
|
|
24
|
-
# All logs should have already been handled above, now just raise an exception
|
|
25
|
-
logger.error("The subprocess encountered an error: %s", e)
|
|
26
|
-
raise Exception("Subprocess failed with exit code {}".format(e.returncode))
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
def run_standalone_script_modified(base_path: str, package_name: str, package_run_cmds: List[str]):
|
|
30
|
-
# Construct the absolute path to the target directory
|
|
31
|
-
target_dir = os.path.join(base_path, *package_name.split('.'))
|
|
32
|
-
|
|
33
|
-
commands = [f"cd {target_dir}"] + package_run_cmds
|
|
34
|
-
full_command = " && ".join(commands)
|
|
35
|
-
|
|
36
|
-
# full_command = f"cd {target_dir} && {package_run_cmd}"
|
|
37
|
-
|
|
38
|
-
# Build the full command to execute
|
|
39
|
-
# full_command = f"{package_run} {os.path.join(target_dir, package_exec)}"
|
|
40
|
-
|
|
41
|
-
# print(full_command)
|
|
42
|
-
|
|
43
|
-
# Execute the command
|
|
44
|
-
try:
|
|
45
|
-
result = subprocess.run(full_command, shell=True, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
|
|
46
|
-
if result.stdout:
|
|
47
|
-
logger.info(result.stdout)
|
|
48
|
-
if result.stderr:
|
|
49
|
-
logger.error(result.stderr)
|
|
50
|
-
result.check_returncode()
|
|
51
|
-
except subprocess.CalledProcessError as e:
|
|
52
|
-
logger.error("Command failed with exit code %s", e.returncode)
|
|
53
|
-
logger.error("Output:\n%s", e.stdout)
|
|
54
|
-
logger.error("Errors:\n%s", e.stderr)
|
|
55
|
-
raise Exception(f"Subprocess failed with exit code {e.returncode}. Check logs for more details.")
|
|
1
|
+
import subprocess
|
|
2
|
+
import os
|
|
3
|
+
from typing import List
|
|
4
|
+
from prefect import get_run_logger
|
|
5
|
+
logger = get_run_logger()
|
|
6
|
+
|
|
7
|
+
def run_standalone_script(package_name, package_run, package_exec):
|
|
8
|
+
script_dir = os.path.dirname(os.path.realpath(__file__))
|
|
9
|
+
# script_path = os.path.join(script_dir, package_name.replace('.', os.sep))
|
|
10
|
+
target_dir = os.path.join(script_dir, os.pardir, os.pardir, *package_name.split('.'))
|
|
11
|
+
|
|
12
|
+
full_command = f"{package_run} {os.path.join(target_dir, package_exec)}"
|
|
13
|
+
|
|
14
|
+
# Execute the command
|
|
15
|
+
try:
|
|
16
|
+
result = subprocess.run(full_command, shell=True, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
|
|
17
|
+
if result.stdout:
|
|
18
|
+
logger.info(result.stdout)
|
|
19
|
+
if result.stderr:
|
|
20
|
+
logger.error(result.stderr)
|
|
21
|
+
# logger.info(f"Output: {result.stdout}")
|
|
22
|
+
result.check_returncode()
|
|
23
|
+
except subprocess.CalledProcessError as e:
|
|
24
|
+
# All logs should have already been handled above, now just raise an exception
|
|
25
|
+
logger.error("The subprocess encountered an error: %s", e)
|
|
26
|
+
raise Exception("Subprocess failed with exit code {}".format(e.returncode))
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def run_standalone_script_modified(base_path: str, package_name: str, package_run_cmds: List[str]):
|
|
30
|
+
# Construct the absolute path to the target directory
|
|
31
|
+
target_dir = os.path.join(base_path, *package_name.split('.'))
|
|
32
|
+
|
|
33
|
+
commands = [f"cd {target_dir}"] + package_run_cmds
|
|
34
|
+
full_command = " && ".join(commands)
|
|
35
|
+
|
|
36
|
+
# full_command = f"cd {target_dir} && {package_run_cmd}"
|
|
37
|
+
|
|
38
|
+
# Build the full command to execute
|
|
39
|
+
# full_command = f"{package_run} {os.path.join(target_dir, package_exec)}"
|
|
40
|
+
|
|
41
|
+
# print(full_command)
|
|
42
|
+
|
|
43
|
+
# Execute the command
|
|
44
|
+
try:
|
|
45
|
+
result = subprocess.run(full_command, shell=True, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
|
|
46
|
+
if result.stdout:
|
|
47
|
+
logger.info(result.stdout)
|
|
48
|
+
if result.stderr:
|
|
49
|
+
logger.error(result.stderr)
|
|
50
|
+
result.check_returncode()
|
|
51
|
+
except subprocess.CalledProcessError as e:
|
|
52
|
+
logger.error("Command failed with exit code %s", e.returncode)
|
|
53
|
+
logger.error("Output:\n%s", e.stdout)
|
|
54
|
+
logger.error("Errors:\n%s", e.stderr)
|
|
55
|
+
raise Exception(f"Subprocess failed with exit code {e.returncode}. Check logs for more details.")
|
|
56
56
|
# raise Exception(f"Subprocess failed with exit code {e.returncode}")
|
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
def dicts_has_diff(dict_a, dict_b):
|
|
4
|
-
return dict_a != dict_b
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
def remove_keys(d, keys):
|
|
1
|
+
|
|
2
|
+
|
|
3
|
+
def dicts_has_diff(dict_a, dict_b):
|
|
4
|
+
return dict_a != dict_b
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def remove_keys(d, keys):
|
|
8
8
|
return {k: v for k, v in d.items() if k not in keys}
|
kube_watch/modules/logic/load.py
CHANGED
|
@@ -1,23 +1,23 @@
|
|
|
1
|
-
import os
|
|
2
|
-
from prefect import get_run_logger
|
|
3
|
-
logger = get_run_logger()
|
|
4
|
-
|
|
5
|
-
def load_secrets_to_env(data):
|
|
6
|
-
for key, value in data.items():
|
|
7
|
-
if key in os.environ:
|
|
8
|
-
del os.environ[key]
|
|
9
|
-
os.environ[key] = value
|
|
10
|
-
# logger.info(f"ENV VAR: {key} loaded")
|
|
11
|
-
|
|
12
|
-
def load_env_from_file(filepath):
|
|
13
|
-
with open(filepath, "r") as f:
|
|
14
|
-
for line in f:
|
|
15
|
-
# Remove whitespace and ignore comments
|
|
16
|
-
line = line.strip()
|
|
17
|
-
if line and not line.startswith('#'):
|
|
18
|
-
key, value = line.split('=', 1)
|
|
19
|
-
# Remove the environment variable if it already exists
|
|
20
|
-
if key in os.environ:
|
|
21
|
-
del os.environ[key]
|
|
22
|
-
# Set the new value
|
|
23
|
-
os.environ[key] = value
|
|
1
|
+
import os
|
|
2
|
+
from prefect import get_run_logger
|
|
3
|
+
logger = get_run_logger()
|
|
4
|
+
|
|
5
|
+
def load_secrets_to_env(data):
|
|
6
|
+
for key, value in data.items():
|
|
7
|
+
if key in os.environ:
|
|
8
|
+
del os.environ[key]
|
|
9
|
+
os.environ[key] = value
|
|
10
|
+
# logger.info(f"ENV VAR: {key} loaded")
|
|
11
|
+
|
|
12
|
+
def load_env_from_file(filepath):
|
|
13
|
+
with open(filepath, "r") as f:
|
|
14
|
+
for line in f:
|
|
15
|
+
# Remove whitespace and ignore comments
|
|
16
|
+
line = line.strip()
|
|
17
|
+
if line and not line.startswith('#'):
|
|
18
|
+
key, value = line.split('=', 1)
|
|
19
|
+
# Remove the environment variable if it already exists
|
|
20
|
+
if key in os.environ:
|
|
21
|
+
del os.environ[key]
|
|
22
|
+
# Set the new value
|
|
23
|
+
os.environ[key] = value
|
|
@@ -1,31 +1,31 @@
|
|
|
1
|
-
from typing import Any, List, Dict
|
|
2
|
-
from kube_watch.enums.logic import Operations
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
def merge_logical_outputs(inp_dict: Dict):
|
|
6
|
-
if 'operation' not in inp_dict.keys():
|
|
7
|
-
raise TypeError("Missing required parameters: 'operation'")
|
|
8
|
-
operation = inp_dict.get('operation')
|
|
9
|
-
del inp_dict['operation']
|
|
10
|
-
|
|
11
|
-
inputs = [v for k,v in inp_dict.items()]
|
|
12
|
-
return merge_logical_list(inputs, operation)
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
def merge_logical_list(inp_list: List, operation: Operations):
|
|
16
|
-
if operation == Operations.OR:
|
|
17
|
-
return any(inp_list)
|
|
18
|
-
if operation == Operations.AND:
|
|
19
|
-
return all(inp_list)
|
|
20
|
-
raise ValueError("Invalid logical operation")
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
def partial_dict_update(orig_data, new_data):
|
|
24
|
-
"""
|
|
25
|
-
This function is used when some key value pairs in orig_data should
|
|
26
|
-
be updated from new_data.
|
|
27
|
-
"""
|
|
28
|
-
for k, v in new_data.items():
|
|
29
|
-
orig_data[k] = v
|
|
30
|
-
|
|
31
|
-
return orig_data
|
|
1
|
+
from typing import Any, List, Dict
|
|
2
|
+
from kube_watch.enums.logic import Operations
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
def merge_logical_outputs(inp_dict: Dict):
|
|
6
|
+
if 'operation' not in inp_dict.keys():
|
|
7
|
+
raise TypeError("Missing required parameters: 'operation'")
|
|
8
|
+
operation = inp_dict.get('operation')
|
|
9
|
+
del inp_dict['operation']
|
|
10
|
+
|
|
11
|
+
inputs = [v for k,v in inp_dict.items()]
|
|
12
|
+
return merge_logical_list(inputs, operation)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def merge_logical_list(inp_list: List, operation: Operations):
|
|
16
|
+
if operation == Operations.OR:
|
|
17
|
+
return any(inp_list)
|
|
18
|
+
if operation == Operations.AND:
|
|
19
|
+
return all(inp_list)
|
|
20
|
+
raise ValueError("Invalid logical operation")
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def partial_dict_update(orig_data, new_data):
|
|
24
|
+
"""
|
|
25
|
+
This function is used when some key value pairs in orig_data should
|
|
26
|
+
be updated from new_data.
|
|
27
|
+
"""
|
|
28
|
+
for k, v in new_data.items():
|
|
29
|
+
orig_data[k] = v
|
|
30
|
+
|
|
31
|
+
return orig_data
|
|
@@ -1,74 +1,74 @@
|
|
|
1
|
-
from datetime import datetime
|
|
2
|
-
from enum import Enum
|
|
3
|
-
|
|
4
|
-
from prefect import get_run_logger
|
|
5
|
-
logger = get_run_logger()
|
|
6
|
-
|
|
7
|
-
class IntervalType(Enum):
|
|
8
|
-
MINUTES = 'minutes'
|
|
9
|
-
HOURLY = 'hourly'
|
|
10
|
-
DAILY = 'daily'
|
|
11
|
-
WEEKLY = 'weekly'
|
|
12
|
-
MONTHLY = 'monthly'
|
|
13
|
-
QUARTERLY = 'quarterly'
|
|
14
|
-
SEMIANNUAL = 'semiannual'
|
|
15
|
-
YEARLY = 'yearly'
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
def should_run_task(interval_type, interval_value=None, interval_buffer=10, specific_day=None):
|
|
19
|
-
"""
|
|
20
|
-
The function `should_run_task` determines whether a task should run based on the specified interval
|
|
21
|
-
type and values.
|
|
22
|
-
|
|
23
|
-
:param interval_type: The `interval_type` parameter specifies the type of interval at which a task
|
|
24
|
-
should run. It can take on the following values:
|
|
25
|
-
:param interval_value: The `interval_value` parameter represents the specific value associated with
|
|
26
|
-
the interval type. For example, if the interval type is `DAILY`, the `interval_value` would
|
|
27
|
-
represent the specific hour at which the task should run daily. Similarly, for `WEEKLY`, it would
|
|
28
|
-
represent the specific day
|
|
29
|
-
:param interval_buffer: The `interval_buffer` parameter in the `should_run_task` function is a
|
|
30
|
-
default value set to 20 minutes. This provides an acceptable range for a task to get executed. Suitable
|
|
31
|
-
for Daily, Weekly, Monthly, etc. schedules.
|
|
32
|
-
:param specific_day: The `specific_day` parameter represents the day of the week when the task
|
|
33
|
-
should run in the case of a weekly interval. The values for `specific_day` are as follows:
|
|
34
|
-
:return: The function `should_run_task` takes in various parameters related to different interval
|
|
35
|
-
types (such as minutes, hourly, daily, weekly, monthly, quarterly, semiannual, yearly) and checks if
|
|
36
|
-
the current datetime matches the specified interval criteria.
|
|
37
|
-
"""
|
|
38
|
-
|
|
39
|
-
now = datetime.now()
|
|
40
|
-
# Match the interval type
|
|
41
|
-
if interval_type == IntervalType.MINUTES.value:
|
|
42
|
-
# Runs every 'interval_value' minutes
|
|
43
|
-
return now.minute % interval_value == 0
|
|
44
|
-
|
|
45
|
-
if interval_type == IntervalType.HOURLY.value:
|
|
46
|
-
# Runs every 'interval_value' hours on the hour
|
|
47
|
-
return now.hour % interval_value == 0 and now.minute < interval_buffer
|
|
48
|
-
|
|
49
|
-
if interval_type == IntervalType.DAILY.value:
|
|
50
|
-
# Runs once a day at 'interval_value' hour
|
|
51
|
-
return now.hour == interval_value and now.minute < interval_buffer
|
|
52
|
-
|
|
53
|
-
if interval_type == IntervalType.WEEKLY.value:
|
|
54
|
-
# Runs once a week on 'specific_day' (0=Monday, 6=Sunday)
|
|
55
|
-
return now.weekday() == specific_day and now.hour == 0 and now.minute < interval_buffer
|
|
56
|
-
|
|
57
|
-
if interval_type == IntervalType.MONTHLY.value:
|
|
58
|
-
# Runs on the 'interval_value' day of each month
|
|
59
|
-
return now.day == interval_value and now.hour == 0 and now.minute < interval_buffer
|
|
60
|
-
|
|
61
|
-
if interval_type == IntervalType.QUARTERLY.value:
|
|
62
|
-
# Runs on the first day of each quarter
|
|
63
|
-
return now.month % 3 == 1 and now.day == 1 and now.hour == 0 and now.minute < interval_buffer
|
|
64
|
-
|
|
65
|
-
if interval_type == IntervalType.SEMIANNUAL.value:
|
|
66
|
-
# Runs on the first day of the 1st and 7th month
|
|
67
|
-
return (now.month == 1 or now.month == 7) and now.day == 1 and now.hour == 0 and now.minute < interval_buffer
|
|
68
|
-
|
|
69
|
-
if interval_type == IntervalType.YEARLY.value:
|
|
70
|
-
# Runs on the first day of the year
|
|
71
|
-
return now.month == 1 and now.day == 1 and now.hour == 0 and now.minute < interval_buffer
|
|
72
|
-
|
|
73
|
-
return False
|
|
74
|
-
|
|
1
|
+
from datetime import datetime
|
|
2
|
+
from enum import Enum
|
|
3
|
+
|
|
4
|
+
from prefect import get_run_logger
|
|
5
|
+
logger = get_run_logger()
|
|
6
|
+
|
|
7
|
+
class IntervalType(Enum):
|
|
8
|
+
MINUTES = 'minutes'
|
|
9
|
+
HOURLY = 'hourly'
|
|
10
|
+
DAILY = 'daily'
|
|
11
|
+
WEEKLY = 'weekly'
|
|
12
|
+
MONTHLY = 'monthly'
|
|
13
|
+
QUARTERLY = 'quarterly'
|
|
14
|
+
SEMIANNUAL = 'semiannual'
|
|
15
|
+
YEARLY = 'yearly'
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def should_run_task(interval_type, interval_value=None, interval_buffer=10, specific_day=None):
|
|
19
|
+
"""
|
|
20
|
+
The function `should_run_task` determines whether a task should run based on the specified interval
|
|
21
|
+
type and values.
|
|
22
|
+
|
|
23
|
+
:param interval_type: The `interval_type` parameter specifies the type of interval at which a task
|
|
24
|
+
should run. It can take on the following values:
|
|
25
|
+
:param interval_value: The `interval_value` parameter represents the specific value associated with
|
|
26
|
+
the interval type. For example, if the interval type is `DAILY`, the `interval_value` would
|
|
27
|
+
represent the specific hour at which the task should run daily. Similarly, for `WEEKLY`, it would
|
|
28
|
+
represent the specific day
|
|
29
|
+
:param interval_buffer: The `interval_buffer` parameter in the `should_run_task` function is a
|
|
30
|
+
default value set to 20 minutes. This provides an acceptable range for a task to get executed. Suitable
|
|
31
|
+
for Daily, Weekly, Monthly, etc. schedules.
|
|
32
|
+
:param specific_day: The `specific_day` parameter represents the day of the week when the task
|
|
33
|
+
should run in the case of a weekly interval. The values for `specific_day` are as follows:
|
|
34
|
+
:return: The function `should_run_task` takes in various parameters related to different interval
|
|
35
|
+
types (such as minutes, hourly, daily, weekly, monthly, quarterly, semiannual, yearly) and checks if
|
|
36
|
+
the current datetime matches the specified interval criteria.
|
|
37
|
+
"""
|
|
38
|
+
|
|
39
|
+
now = datetime.now()
|
|
40
|
+
# Match the interval type
|
|
41
|
+
if interval_type == IntervalType.MINUTES.value:
|
|
42
|
+
# Runs every 'interval_value' minutes
|
|
43
|
+
return now.minute % interval_value == 0
|
|
44
|
+
|
|
45
|
+
if interval_type == IntervalType.HOURLY.value:
|
|
46
|
+
# Runs every 'interval_value' hours on the hour
|
|
47
|
+
return now.hour % interval_value == 0 and now.minute < interval_buffer
|
|
48
|
+
|
|
49
|
+
if interval_type == IntervalType.DAILY.value:
|
|
50
|
+
# Runs once a day at 'interval_value' hour
|
|
51
|
+
return now.hour == interval_value and now.minute < interval_buffer
|
|
52
|
+
|
|
53
|
+
if interval_type == IntervalType.WEEKLY.value:
|
|
54
|
+
# Runs once a week on 'specific_day' (0=Monday, 6=Sunday)
|
|
55
|
+
return now.weekday() == specific_day and now.hour == 0 and now.minute < interval_buffer
|
|
56
|
+
|
|
57
|
+
if interval_type == IntervalType.MONTHLY.value:
|
|
58
|
+
# Runs on the 'interval_value' day of each month
|
|
59
|
+
return now.day == interval_value and now.hour == 0 and now.minute < interval_buffer
|
|
60
|
+
|
|
61
|
+
if interval_type == IntervalType.QUARTERLY.value:
|
|
62
|
+
# Runs on the first day of each quarter
|
|
63
|
+
return now.month % 3 == 1 and now.day == 1 and now.hour == 0 and now.minute < interval_buffer
|
|
64
|
+
|
|
65
|
+
if interval_type == IntervalType.SEMIANNUAL.value:
|
|
66
|
+
# Runs on the first day of the 1st and 7th month
|
|
67
|
+
return (now.month == 1 or now.month == 7) and now.day == 1 and now.hour == 0 and now.minute < interval_buffer
|
|
68
|
+
|
|
69
|
+
if interval_type == IntervalType.YEARLY.value:
|
|
70
|
+
# Runs on the first day of the year
|
|
71
|
+
return now.month == 1 and now.day == 1 and now.hour == 0 and now.minute < interval_buffer
|
|
72
|
+
|
|
73
|
+
return False
|
|
74
|
+
|