kubernetes-watch 0.1.5__py3-none-any.whl → 0.1.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kube_watch/enums/kube.py +5 -5
- kube_watch/enums/logic.py +8 -8
- kube_watch/enums/providers.py +12 -12
- kube_watch/enums/workflow.py +17 -17
- kube_watch/models/common.py +16 -16
- kube_watch/models/workflow.py +60 -60
- kube_watch/modules/clusters/kube.py +185 -185
- kube_watch/modules/database/__init__.py +0 -0
- kube_watch/modules/database/model.py +12 -0
- kube_watch/modules/database/postgre.py +271 -0
- kube_watch/modules/logic/actions.py +55 -55
- kube_watch/modules/logic/checks.py +7 -7
- kube_watch/modules/logic/load.py +23 -23
- kube_watch/modules/logic/merge.py +31 -31
- kube_watch/modules/logic/scheduler.py +74 -74
- kube_watch/modules/mock/mock_generator.py +53 -53
- kube_watch/modules/providers/aws.py +210 -210
- kube_watch/modules/providers/git.py +120 -32
- kube_watch/modules/providers/github.py +126 -126
- kube_watch/modules/providers/vault.py +188 -188
- kube_watch/standalone/metarecogen/ckan_to_gn.py +132 -132
- kube_watch/watch/__init__.py +1 -1
- kube_watch/watch/helpers.py +170 -170
- kube_watch/watch/workflow.py +232 -100
- {kubernetes_watch-0.1.5.dist-info → kubernetes_watch-0.1.9.dist-info}/LICENSE +21 -21
- {kubernetes_watch-0.1.5.dist-info → kubernetes_watch-0.1.9.dist-info}/METADATA +5 -3
- kubernetes_watch-0.1.9.dist-info/RECORD +36 -0
- kubernetes_watch-0.1.5.dist-info/RECORD +0 -33
- {kubernetes_watch-0.1.5.dist-info → kubernetes_watch-0.1.9.dist-info}/WHEEL +0 -0
|
@@ -1,33 +1,121 @@
|
|
|
1
|
-
import os
|
|
2
|
-
from git import Repo
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
1
|
+
import os
|
|
2
|
+
from git import Repo
|
|
3
|
+
import shutil
|
|
4
|
+
import subprocess
|
|
5
|
+
import tempfile
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
|
|
8
|
+
from prefect import get_run_logger
|
|
9
|
+
|
|
10
|
+
logger = get_run_logger()
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def clone_ssh_repo(
|
|
14
|
+
git_url: str,
|
|
15
|
+
clone_base_path: str,
|
|
16
|
+
repo_dir_name: str = "manifest-repo",
|
|
17
|
+
depth: int = 1,
|
|
18
|
+
ssh_key_env: str = "GIT_SSH_PRIVATE_KEY",
|
|
19
|
+
known_hosts_env: str = "GIT_SSH_KNOWN_HOSTS",
|
|
20
|
+
) -> Path:
|
|
21
|
+
"""
|
|
22
|
+
Clone or update a Git repo using an SSH private key & known_hosts provided via env vars.
|
|
23
|
+
|
|
24
|
+
Env vars:
|
|
25
|
+
- GIT_SSH_PRIVATE_KEY: the full private key (RSA/ED25519) including BEGIN/END lines
|
|
26
|
+
- GIT_SSH_KNOWN_HOSTS: one or more known_hosts lines (from `ssh-keyscan github.com`)
|
|
27
|
+
|
|
28
|
+
Args:
|
|
29
|
+
git_url: SSH URL like 'git@github.com:your-org/your-repo.git'
|
|
30
|
+
clone_base_path: directory where repo_dir_name will be created
|
|
31
|
+
repo_dir_name: folder name for the clone
|
|
32
|
+
depth: shallow clone depth (1 == latest)
|
|
33
|
+
|
|
34
|
+
Returns:
|
|
35
|
+
Path to the cloned/updated repository.
|
|
36
|
+
"""
|
|
37
|
+
if not git_url.startswith("git@"):
|
|
38
|
+
raise ValueError("git_url must be an SSH URL like 'git@github.com:org/repo.git'")
|
|
39
|
+
|
|
40
|
+
priv_key = os.environ.get(ssh_key_env)
|
|
41
|
+
kh_data = os.environ.get(known_hosts_env)
|
|
42
|
+
if not priv_key:
|
|
43
|
+
raise ValueError(f"Missing env var {ssh_key_env}")
|
|
44
|
+
if not kh_data:
|
|
45
|
+
raise ValueError(f"Missing env var {known_hosts_env}")
|
|
46
|
+
|
|
47
|
+
base = Path(clone_base_path).expanduser().resolve()
|
|
48
|
+
base.mkdir(parents=True, exist_ok=True)
|
|
49
|
+
repo_path = base / repo_dir_name
|
|
50
|
+
|
|
51
|
+
# Secure temp dir for SSH material
|
|
52
|
+
tmpdir = Path(tempfile.mkdtemp(prefix="git_ssh_"))
|
|
53
|
+
key_path = tmpdir / "id_rsa"
|
|
54
|
+
kh_path = tmpdir / "known_hosts"
|
|
55
|
+
|
|
56
|
+
try:
|
|
57
|
+
key_path.write_text(priv_key, encoding="utf-8")
|
|
58
|
+
kh_path.write_text(kh_data, encoding="utf-8")
|
|
59
|
+
|
|
60
|
+
try:
|
|
61
|
+
os.chmod(key_path, 0o600)
|
|
62
|
+
except PermissionError:
|
|
63
|
+
pass # Windows quirk
|
|
64
|
+
|
|
65
|
+
ssh_cmd = (
|
|
66
|
+
f"ssh -i {key_path} -o IdentitiesOnly=yes "
|
|
67
|
+
f"-o UserKnownHostsFile={kh_path} "
|
|
68
|
+
f"-o StrictHostKeyChecking=yes -o StrictModes=no"
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
env = os.environ.copy()
|
|
72
|
+
env["GIT_SSH_COMMAND"] = ssh_cmd
|
|
73
|
+
|
|
74
|
+
if not repo_path.exists():
|
|
75
|
+
cmd = ["git", "clone"]
|
|
76
|
+
if depth and depth > 0:
|
|
77
|
+
cmd += ["--depth", str(depth)]
|
|
78
|
+
cmd += [git_url, str(repo_path)]
|
|
79
|
+
subprocess.check_call(cmd, env=env)
|
|
80
|
+
else:
|
|
81
|
+
if not (repo_path / ".git").exists():
|
|
82
|
+
raise RuntimeError(f"Path exists but is not a git repo: {repo_path}")
|
|
83
|
+
subprocess.check_call(["git", "remote", "set-url", "origin", git_url], cwd=repo_path, env=env)
|
|
84
|
+
subprocess.check_call(["git", "fetch", "--all", "--prune"], cwd=repo_path, env=env)
|
|
85
|
+
subprocess.check_call(["git", "pull", "--ff-only", "origin"], cwd=repo_path, env=env)
|
|
86
|
+
|
|
87
|
+
return repo_path
|
|
88
|
+
|
|
89
|
+
finally:
|
|
90
|
+
try:
|
|
91
|
+
shutil.rmtree(tmpdir)
|
|
92
|
+
except Exception:
|
|
93
|
+
pass
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
def clone_pat_repo(git_pat, git_url, clone_base_path):
|
|
97
|
+
# Retrieve environment variables
|
|
98
|
+
access_token = git_pat # os.environ.get('GIT_PAT')
|
|
99
|
+
repo_url = git_url # os.environ.get('GIT_URL')
|
|
100
|
+
|
|
101
|
+
if not access_token or not repo_url:
|
|
102
|
+
raise ValueError("Environment variables GIT_PAT or GIT_URL are not set")
|
|
103
|
+
|
|
104
|
+
# Correctly format the URL with the PAT
|
|
105
|
+
if 'https://' in repo_url:
|
|
106
|
+
# Splitting the URL and inserting the PAT
|
|
107
|
+
parts = repo_url.split('https://', 1)
|
|
108
|
+
repo_url = f'https://{access_token}@{parts[1]}'
|
|
109
|
+
else:
|
|
110
|
+
raise ValueError("URL must begin with https:// for PAT authentication")
|
|
111
|
+
|
|
112
|
+
# Directory where the repo will be cloned
|
|
113
|
+
repo_path = os.path.join(clone_base_path, 'manifest-repo')
|
|
114
|
+
|
|
115
|
+
# Clone the repository
|
|
116
|
+
if not os.path.exists(repo_path):
|
|
117
|
+
logger.info(f"Cloning repository into {repo_path}")
|
|
118
|
+
repo = Repo.clone_from(repo_url, repo_path)
|
|
119
|
+
logger.info("Repository cloned successfully.")
|
|
120
|
+
else:
|
|
33
121
|
logger.info(f"Repository already exists at {repo_path}")
|
|
@@ -1,126 +1,126 @@
|
|
|
1
|
-
import requests
|
|
2
|
-
from datetime import datetime, timedelta
|
|
3
|
-
import pytz
|
|
4
|
-
|
|
5
|
-
from prefect import get_run_logger
|
|
6
|
-
|
|
7
|
-
logger = get_run_logger()
|
|
8
|
-
|
|
9
|
-
def parse_datetime(dt_str):
|
|
10
|
-
"""Parse a datetime string into a datetime object."""
|
|
11
|
-
return datetime.strptime(dt_str, "%Y-%m-%dT%H:%M:%SZ").replace(tzinfo=pytz.UTC)
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
def add_version_dependency(versions):
|
|
15
|
-
"""
|
|
16
|
-
Finds untagged versions that were created within 2 minutes of any tagged version.
|
|
17
|
-
|
|
18
|
-
Args:
|
|
19
|
-
versions (list of dict): List of dictionaries, each containing 'created_at' and possibly 'tags'.
|
|
20
|
-
|
|
21
|
-
Returns:
|
|
22
|
-
list: A list of untagged versions that meet the criteria.
|
|
23
|
-
"""
|
|
24
|
-
tagged_versions = [v for v in versions if v['metadata']['container']['tags']]
|
|
25
|
-
untagged_versions = [v for v in versions if not v['metadata']['container']['tags']]
|
|
26
|
-
|
|
27
|
-
# Convert all creation times to datetime objects
|
|
28
|
-
for v in versions:
|
|
29
|
-
v['created_datetime'] = parse_datetime(v['created_at'])
|
|
30
|
-
|
|
31
|
-
# Check each untagged version against all tagged versions
|
|
32
|
-
for v in versions:
|
|
33
|
-
if v in untagged_versions:
|
|
34
|
-
for tagged in tagged_versions:
|
|
35
|
-
time_diff = abs(tagged['created_datetime'] - v['created_datetime'])
|
|
36
|
-
if time_diff < timedelta(minutes=2):
|
|
37
|
-
v['tag'] = tagged['tag']
|
|
38
|
-
break # Stop checking once a close tagged version is found
|
|
39
|
-
|
|
40
|
-
return versions
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
def get_github_package_versions(token, organization, package_type, package_name):
|
|
44
|
-
"""
|
|
45
|
-
This function returns all available versions in a github package registry `ghcr`.
|
|
46
|
-
|
|
47
|
-
:param: token: GitHub token with proper permissions
|
|
48
|
-
:param: organization: GitHub organization name
|
|
49
|
-
:param: package_type: GitHub package type (e.g. container, npm)
|
|
50
|
-
:param: package_name: GitHub package name
|
|
51
|
-
"""
|
|
52
|
-
base_url = f"https://api.github.com/orgs/{organization}/packages/{package_type}/{package_name}/versions"
|
|
53
|
-
headers = {
|
|
54
|
-
'Authorization': f'token {token}',
|
|
55
|
-
'Accept': 'application/vnd.github.v3+json'
|
|
56
|
-
}
|
|
57
|
-
versions = []
|
|
58
|
-
url = base_url
|
|
59
|
-
|
|
60
|
-
while url:
|
|
61
|
-
logger.info(f"Requesting: {url}") # Debug output to check the URL being requested
|
|
62
|
-
response = requests.get(url, headers=headers)
|
|
63
|
-
if response.status_code == 200:
|
|
64
|
-
page_versions = response.json()
|
|
65
|
-
versions.extend(page_versions)
|
|
66
|
-
link_header = response.headers.get('Link', None)
|
|
67
|
-
if link_header:
|
|
68
|
-
links = {rel.split('; ')[1][5:-1]: rel.split('; ')[0][1:-1] for rel in link_header.split(', ')}
|
|
69
|
-
url = links.get("next", None) # Get the URL for the next page
|
|
70
|
-
if url:
|
|
71
|
-
logger.info(f"Next page link found: {url}") # Debug output to check the next page link
|
|
72
|
-
else:
|
|
73
|
-
logger.info("No next page link found in header.") # End of pagination
|
|
74
|
-
else:
|
|
75
|
-
logger.info("No 'Link' header present, likely the last page.") # If no 'Link' header, it's the last page
|
|
76
|
-
url = None
|
|
77
|
-
else:
|
|
78
|
-
logger.error(f"Failed to retrieve package versions: {response.status_code}, {response.text}")
|
|
79
|
-
url = None
|
|
80
|
-
|
|
81
|
-
for item in versions:
|
|
82
|
-
tags = item['metadata']['container']['tags']
|
|
83
|
-
item['tag'] = tags[0] if len(tags) > 0 else None
|
|
84
|
-
|
|
85
|
-
return versions
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
def delete_versions(versions, token, organization, package_type, package_name):
|
|
89
|
-
"""
|
|
90
|
-
:param: versions: list of versions to be deleted
|
|
91
|
-
:param: token: GitHub token with proper permissions
|
|
92
|
-
:param: organization: GitHub organization name
|
|
93
|
-
:param: package_type: GitHub package type (e.g. container, npm)
|
|
94
|
-
:param: package_name: GitHub package name
|
|
95
|
-
"""
|
|
96
|
-
headers = {
|
|
97
|
-
'Authorization': f'token {token}',
|
|
98
|
-
'Accept': 'application/vnd.github.v3+json'
|
|
99
|
-
}
|
|
100
|
-
for version in versions:
|
|
101
|
-
delete_url = f"https://api.github.com/orgs/{organization}/packages/{package_type}/{package_name}/versions/{version['id']}"
|
|
102
|
-
response = requests.delete(delete_url, headers=headers)
|
|
103
|
-
if response.status_code == 204:
|
|
104
|
-
logger.info(f"Successfully deleted version: {version['metadata']['container']['tags']} (ID: {version['id']})")
|
|
105
|
-
else:
|
|
106
|
-
logger.info(f"Failed to delete version: {version['metadata']['container']['tags']} (ID: {version['id']}), {response.status_code}, {response.text}")
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
def delete_untaged_versions(versions, token, organization, package_type, package_name):
|
|
111
|
-
# Identifying untagged versions that are related to a tagged version
|
|
112
|
-
untag_test = list(filter(lambda ver: ver['tag'] is None, versions))
|
|
113
|
-
logger.info(f"UNTAGGED BEFORE: {len(untag_test)}")
|
|
114
|
-
versions = add_version_dependency(versions)
|
|
115
|
-
untag_vers = list(filter(lambda ver: ver['tag'] is None, versions))
|
|
116
|
-
logger.info(f"UNTAGGED BEFORE: {len(untag_vers)}")
|
|
117
|
-
|
|
118
|
-
delete_versions(untag_vers, token, organization, package_type, package_name)
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
def task_get_latest_image_digest(versions, tag_name):
|
|
122
|
-
lst = list(filter(lambda ver: ver['tag'] == tag_name, versions))
|
|
123
|
-
if len(lst) == 0:
|
|
124
|
-
raise ValueError(f"Provided tag: {tag_name} was not found.")
|
|
125
|
-
|
|
126
|
-
return lst[0]['name']
|
|
1
|
+
import requests
|
|
2
|
+
from datetime import datetime, timedelta
|
|
3
|
+
import pytz
|
|
4
|
+
|
|
5
|
+
from prefect import get_run_logger
|
|
6
|
+
|
|
7
|
+
logger = get_run_logger()
|
|
8
|
+
|
|
9
|
+
def parse_datetime(dt_str):
|
|
10
|
+
"""Parse a datetime string into a datetime object."""
|
|
11
|
+
return datetime.strptime(dt_str, "%Y-%m-%dT%H:%M:%SZ").replace(tzinfo=pytz.UTC)
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def add_version_dependency(versions):
|
|
15
|
+
"""
|
|
16
|
+
Finds untagged versions that were created within 2 minutes of any tagged version.
|
|
17
|
+
|
|
18
|
+
Args:
|
|
19
|
+
versions (list of dict): List of dictionaries, each containing 'created_at' and possibly 'tags'.
|
|
20
|
+
|
|
21
|
+
Returns:
|
|
22
|
+
list: A list of untagged versions that meet the criteria.
|
|
23
|
+
"""
|
|
24
|
+
tagged_versions = [v for v in versions if v['metadata']['container']['tags']]
|
|
25
|
+
untagged_versions = [v for v in versions if not v['metadata']['container']['tags']]
|
|
26
|
+
|
|
27
|
+
# Convert all creation times to datetime objects
|
|
28
|
+
for v in versions:
|
|
29
|
+
v['created_datetime'] = parse_datetime(v['created_at'])
|
|
30
|
+
|
|
31
|
+
# Check each untagged version against all tagged versions
|
|
32
|
+
for v in versions:
|
|
33
|
+
if v in untagged_versions:
|
|
34
|
+
for tagged in tagged_versions:
|
|
35
|
+
time_diff = abs(tagged['created_datetime'] - v['created_datetime'])
|
|
36
|
+
if time_diff < timedelta(minutes=2):
|
|
37
|
+
v['tag'] = tagged['tag']
|
|
38
|
+
break # Stop checking once a close tagged version is found
|
|
39
|
+
|
|
40
|
+
return versions
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def get_github_package_versions(token, organization, package_type, package_name):
|
|
44
|
+
"""
|
|
45
|
+
This function returns all available versions in a github package registry `ghcr`.
|
|
46
|
+
|
|
47
|
+
:param: token: GitHub token with proper permissions
|
|
48
|
+
:param: organization: GitHub organization name
|
|
49
|
+
:param: package_type: GitHub package type (e.g. container, npm)
|
|
50
|
+
:param: package_name: GitHub package name
|
|
51
|
+
"""
|
|
52
|
+
base_url = f"https://api.github.com/orgs/{organization}/packages/{package_type}/{package_name}/versions"
|
|
53
|
+
headers = {
|
|
54
|
+
'Authorization': f'token {token}',
|
|
55
|
+
'Accept': 'application/vnd.github.v3+json'
|
|
56
|
+
}
|
|
57
|
+
versions = []
|
|
58
|
+
url = base_url
|
|
59
|
+
|
|
60
|
+
while url:
|
|
61
|
+
logger.info(f"Requesting: {url}") # Debug output to check the URL being requested
|
|
62
|
+
response = requests.get(url, headers=headers)
|
|
63
|
+
if response.status_code == 200:
|
|
64
|
+
page_versions = response.json()
|
|
65
|
+
versions.extend(page_versions)
|
|
66
|
+
link_header = response.headers.get('Link', None)
|
|
67
|
+
if link_header:
|
|
68
|
+
links = {rel.split('; ')[1][5:-1]: rel.split('; ')[0][1:-1] for rel in link_header.split(', ')}
|
|
69
|
+
url = links.get("next", None) # Get the URL for the next page
|
|
70
|
+
if url:
|
|
71
|
+
logger.info(f"Next page link found: {url}") # Debug output to check the next page link
|
|
72
|
+
else:
|
|
73
|
+
logger.info("No next page link found in header.") # End of pagination
|
|
74
|
+
else:
|
|
75
|
+
logger.info("No 'Link' header present, likely the last page.") # If no 'Link' header, it's the last page
|
|
76
|
+
url = None
|
|
77
|
+
else:
|
|
78
|
+
logger.error(f"Failed to retrieve package versions: {response.status_code}, {response.text}")
|
|
79
|
+
url = None
|
|
80
|
+
|
|
81
|
+
for item in versions:
|
|
82
|
+
tags = item['metadata']['container']['tags']
|
|
83
|
+
item['tag'] = tags[0] if len(tags) > 0 else None
|
|
84
|
+
|
|
85
|
+
return versions
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
def delete_versions(versions, token, organization, package_type, package_name):
|
|
89
|
+
"""
|
|
90
|
+
:param: versions: list of versions to be deleted
|
|
91
|
+
:param: token: GitHub token with proper permissions
|
|
92
|
+
:param: organization: GitHub organization name
|
|
93
|
+
:param: package_type: GitHub package type (e.g. container, npm)
|
|
94
|
+
:param: package_name: GitHub package name
|
|
95
|
+
"""
|
|
96
|
+
headers = {
|
|
97
|
+
'Authorization': f'token {token}',
|
|
98
|
+
'Accept': 'application/vnd.github.v3+json'
|
|
99
|
+
}
|
|
100
|
+
for version in versions:
|
|
101
|
+
delete_url = f"https://api.github.com/orgs/{organization}/packages/{package_type}/{package_name}/versions/{version['id']}"
|
|
102
|
+
response = requests.delete(delete_url, headers=headers)
|
|
103
|
+
if response.status_code == 204:
|
|
104
|
+
logger.info(f"Successfully deleted version: {version['metadata']['container']['tags']} (ID: {version['id']})")
|
|
105
|
+
else:
|
|
106
|
+
logger.info(f"Failed to delete version: {version['metadata']['container']['tags']} (ID: {version['id']}), {response.status_code}, {response.text}")
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
def delete_untaged_versions(versions, token, organization, package_type, package_name):
|
|
111
|
+
# Identifying untagged versions that are related to a tagged version
|
|
112
|
+
untag_test = list(filter(lambda ver: ver['tag'] is None, versions))
|
|
113
|
+
logger.info(f"UNTAGGED BEFORE: {len(untag_test)}")
|
|
114
|
+
versions = add_version_dependency(versions)
|
|
115
|
+
untag_vers = list(filter(lambda ver: ver['tag'] is None, versions))
|
|
116
|
+
logger.info(f"UNTAGGED BEFORE: {len(untag_vers)}")
|
|
117
|
+
|
|
118
|
+
delete_versions(untag_vers, token, organization, package_type, package_name)
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
def task_get_latest_image_digest(versions, tag_name):
|
|
122
|
+
lst = list(filter(lambda ver: ver['tag'] == tag_name, versions))
|
|
123
|
+
if len(lst) == 0:
|
|
124
|
+
raise ValueError(f"Provided tag: {tag_name} was not found.")
|
|
125
|
+
|
|
126
|
+
return lst[0]['name']
|