service-forge 0.1.18__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of service-forge might be problematic. Click here for more details.
- service_forge/api/deprecated_websocket_api.py +86 -0
- service_forge/api/deprecated_websocket_manager.py +425 -0
- service_forge/api/http_api.py +152 -0
- service_forge/api/http_api_doc.py +455 -0
- service_forge/api/kafka_api.py +126 -0
- service_forge/api/routers/feedback/feedback_router.py +148 -0
- service_forge/api/routers/service/service_router.py +127 -0
- service_forge/api/routers/websocket/websocket_manager.py +83 -0
- service_forge/api/routers/websocket/websocket_router.py +78 -0
- service_forge/api/task_manager.py +141 -0
- service_forge/current_service.py +14 -0
- service_forge/db/__init__.py +1 -0
- service_forge/db/database.py +237 -0
- service_forge/db/migrations/feedback_migration.py +154 -0
- service_forge/db/models/__init__.py +0 -0
- service_forge/db/models/feedback.py +33 -0
- service_forge/llm/__init__.py +67 -0
- service_forge/llm/llm.py +56 -0
- service_forge/model/__init__.py +0 -0
- service_forge/model/feedback.py +30 -0
- service_forge/model/websocket.py +13 -0
- service_forge/proto/foo_input.py +5 -0
- service_forge/service.py +280 -0
- service_forge/service_config.py +44 -0
- service_forge/sft/cli.py +91 -0
- service_forge/sft/cmd/config_command.py +67 -0
- service_forge/sft/cmd/deploy_service.py +123 -0
- service_forge/sft/cmd/list_tars.py +41 -0
- service_forge/sft/cmd/service_command.py +149 -0
- service_forge/sft/cmd/upload_service.py +36 -0
- service_forge/sft/config/injector.py +129 -0
- service_forge/sft/config/injector_default_files.py +131 -0
- service_forge/sft/config/sf_metadata.py +30 -0
- service_forge/sft/config/sft_config.py +200 -0
- service_forge/sft/file/__init__.py +0 -0
- service_forge/sft/file/ignore_pattern.py +80 -0
- service_forge/sft/file/sft_file_manager.py +107 -0
- service_forge/sft/kubernetes/kubernetes_manager.py +257 -0
- service_forge/sft/util/assert_util.py +25 -0
- service_forge/sft/util/logger.py +16 -0
- service_forge/sft/util/name_util.py +8 -0
- service_forge/sft/util/yaml_utils.py +57 -0
- service_forge/storage/__init__.py +5 -0
- service_forge/storage/feedback_storage.py +245 -0
- service_forge/utils/__init__.py +0 -0
- service_forge/utils/default_type_converter.py +12 -0
- service_forge/utils/register.py +39 -0
- service_forge/utils/type_converter.py +99 -0
- service_forge/utils/workflow_clone.py +124 -0
- service_forge/workflow/__init__.py +1 -0
- service_forge/workflow/context.py +14 -0
- service_forge/workflow/edge.py +24 -0
- service_forge/workflow/node.py +184 -0
- service_forge/workflow/nodes/__init__.py +8 -0
- service_forge/workflow/nodes/control/if_node.py +29 -0
- service_forge/workflow/nodes/control/switch_node.py +28 -0
- service_forge/workflow/nodes/input/console_input_node.py +26 -0
- service_forge/workflow/nodes/llm/query_llm_node.py +41 -0
- service_forge/workflow/nodes/nested/workflow_node.py +28 -0
- service_forge/workflow/nodes/output/kafka_output_node.py +27 -0
- service_forge/workflow/nodes/output/print_node.py +29 -0
- service_forge/workflow/nodes/test/if_console_input_node.py +33 -0
- service_forge/workflow/nodes/test/time_consuming_node.py +62 -0
- service_forge/workflow/port.py +89 -0
- service_forge/workflow/trigger.py +28 -0
- service_forge/workflow/triggers/__init__.py +6 -0
- service_forge/workflow/triggers/a2a_api_trigger.py +257 -0
- service_forge/workflow/triggers/fast_api_trigger.py +201 -0
- service_forge/workflow/triggers/kafka_api_trigger.py +47 -0
- service_forge/workflow/triggers/once_trigger.py +23 -0
- service_forge/workflow/triggers/period_trigger.py +29 -0
- service_forge/workflow/triggers/websocket_api_trigger.py +189 -0
- service_forge/workflow/workflow.py +227 -0
- service_forge/workflow/workflow_callback.py +141 -0
- service_forge/workflow/workflow_config.py +66 -0
- service_forge/workflow/workflow_event.py +15 -0
- service_forge/workflow/workflow_factory.py +246 -0
- service_forge/workflow/workflow_group.py +51 -0
- service_forge/workflow/workflow_type.py +52 -0
- service_forge-0.1.18.dist-info/METADATA +98 -0
- service_forge-0.1.18.dist-info/RECORD +83 -0
- service_forge-0.1.18.dist-info/WHEEL +4 -0
- service_forge-0.1.18.dist-info/entry_points.txt +2 -0
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
import fnmatch
|
|
2
|
+
import re
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from typing import List
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class IgnorePattern:
|
|
8
|
+
def __init__(self, patterns: List[str], base_path: Path):
|
|
9
|
+
self.base_path = base_path.resolve()
|
|
10
|
+
self.patterns: List[tuple[str, bool]] = []
|
|
11
|
+
|
|
12
|
+
for pattern in patterns:
|
|
13
|
+
pattern = pattern.strip()
|
|
14
|
+
if not pattern or pattern.startswith('#'):
|
|
15
|
+
continue
|
|
16
|
+
|
|
17
|
+
is_negation = pattern.startswith('!')
|
|
18
|
+
if is_negation:
|
|
19
|
+
pattern = pattern[1:].strip()
|
|
20
|
+
|
|
21
|
+
pattern = pattern.replace('\\', '/')
|
|
22
|
+
|
|
23
|
+
if pattern.startswith('/'):
|
|
24
|
+
pattern = pattern[1:]
|
|
25
|
+
|
|
26
|
+
self.patterns.append((pattern, is_negation))
|
|
27
|
+
|
|
28
|
+
def should_ignore(self, file_path: Path) -> bool:
|
|
29
|
+
file_path = file_path.resolve()
|
|
30
|
+
|
|
31
|
+
try:
|
|
32
|
+
relative_path = file_path.relative_to(self.base_path)
|
|
33
|
+
except ValueError:
|
|
34
|
+
return False
|
|
35
|
+
|
|
36
|
+
path_str = str(relative_path).replace('\\', '/')
|
|
37
|
+
|
|
38
|
+
ignored = False
|
|
39
|
+
for pattern, is_negation in self.patterns:
|
|
40
|
+
if self._match_pattern(pattern, path_str, file_path):
|
|
41
|
+
if is_negation:
|
|
42
|
+
ignored = False
|
|
43
|
+
else:
|
|
44
|
+
ignored = True
|
|
45
|
+
|
|
46
|
+
return ignored
|
|
47
|
+
|
|
48
|
+
def _match_pattern(self, pattern: str, path_str: str, file_path: Path) -> bool:
|
|
49
|
+
if pattern.endswith('/'):
|
|
50
|
+
pattern = pattern[:-1]
|
|
51
|
+
if not file_path.is_dir():
|
|
52
|
+
return False
|
|
53
|
+
|
|
54
|
+
if '**' in pattern:
|
|
55
|
+
regex_pattern = pattern.replace('**', '.*')
|
|
56
|
+
regex_pattern = fnmatch.translate(regex_pattern)
|
|
57
|
+
return bool(re.match(regex_pattern, path_str))
|
|
58
|
+
|
|
59
|
+
path_parts = path_str.split('/')
|
|
60
|
+
|
|
61
|
+
if '/' in pattern:
|
|
62
|
+
return fnmatch.fnmatch(path_str, pattern)
|
|
63
|
+
|
|
64
|
+
return any(fnmatch.fnmatch(part, pattern) for part in path_parts)
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def load_ignore_patterns(project_path: Path) -> IgnorePattern:
|
|
68
|
+
project_path = project_path.resolve()
|
|
69
|
+
ignore_file = project_path / '.sftignore'
|
|
70
|
+
|
|
71
|
+
if not ignore_file.exists():
|
|
72
|
+
return IgnorePattern([], project_path)
|
|
73
|
+
|
|
74
|
+
try:
|
|
75
|
+
with open(ignore_file, 'r', encoding='utf-8') as f:
|
|
76
|
+
patterns = f.readlines()
|
|
77
|
+
return IgnorePattern(patterns, project_path)
|
|
78
|
+
except Exception:
|
|
79
|
+
return IgnorePattern([], project_path)
|
|
80
|
+
|
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import tarfile
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
import requests
|
|
5
|
+
|
|
6
|
+
from service_forge.sft.file.ignore_pattern import load_ignore_patterns
|
|
7
|
+
from service_forge.sft.config.sft_config import sft_config
|
|
8
|
+
from service_forge.sft.util.logger import log_success, log_info, log_error
|
|
9
|
+
|
|
10
|
+
class SftTarFile:
|
|
11
|
+
# example: sf_tag_service_0.0.1.tar
|
|
12
|
+
def __init__(self, path: Path):
|
|
13
|
+
self.path = path
|
|
14
|
+
self.name = path.name
|
|
15
|
+
self.size = path.stat().st_size
|
|
16
|
+
self.modified_time = path.stat().st_mtime
|
|
17
|
+
self.project_name = '_'.join(path.name.split('_')[1:-1])
|
|
18
|
+
self.version = path.name.split('_')[-1][:-4]
|
|
19
|
+
|
|
20
|
+
def _format_size(self) -> str:
|
|
21
|
+
for unit in ['B', 'KB', 'MB', 'GB']:
|
|
22
|
+
if self.size < 1024.0:
|
|
23
|
+
return f"{self.size:.2f} {unit}"
|
|
24
|
+
self.size /= 1024.0
|
|
25
|
+
return f"{self.size:.2f} TB"
|
|
26
|
+
|
|
27
|
+
def _format_modified_time(self) -> str:
|
|
28
|
+
from datetime import datetime
|
|
29
|
+
return datetime.fromtimestamp(self.modified_time).strftime("%Y-%m-%d %H:%M:%S")
|
|
30
|
+
|
|
31
|
+
@staticmethod
|
|
32
|
+
def is_valid_path(path: Path) -> bool:
|
|
33
|
+
return path.is_file() and path.suffix == '.tar' and path.name.startswith('sf_')
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class SftFileManager:
|
|
37
|
+
def __init__(self):
|
|
38
|
+
self.tar_path = Path(sft_config.sft_file_root) / "service-tar"
|
|
39
|
+
self.tars: list[SftTarFile] = []
|
|
40
|
+
os.makedirs(self.tar_path, exist_ok=True)
|
|
41
|
+
self.load_tars()
|
|
42
|
+
|
|
43
|
+
def create_tar(self, project_path: Path, name: str, version: str) -> Path:
|
|
44
|
+
project_path = Path(project_path).resolve()
|
|
45
|
+
tar_path = Path(self.tar_path) / f"sf_{name}_{version}.tar"
|
|
46
|
+
|
|
47
|
+
ignore_pattern = load_ignore_patterns(project_path)
|
|
48
|
+
|
|
49
|
+
with tarfile.open(tar_path, 'w') as tar:
|
|
50
|
+
for root, dirs, files in os.walk(project_path):
|
|
51
|
+
root_path = Path(root)
|
|
52
|
+
|
|
53
|
+
dirs[:] = [
|
|
54
|
+
d for d in dirs
|
|
55
|
+
if not ignore_pattern.should_ignore(root_path / d)
|
|
56
|
+
]
|
|
57
|
+
|
|
58
|
+
for file in files:
|
|
59
|
+
file_path = root_path / file
|
|
60
|
+
if ignore_pattern.should_ignore(file_path):
|
|
61
|
+
continue
|
|
62
|
+
|
|
63
|
+
arcname = file_path.relative_to(project_path)
|
|
64
|
+
tar.add(file_path, arcname=Path(f"{name}_{version}") / arcname)
|
|
65
|
+
self.load_tars()
|
|
66
|
+
return tar_path
|
|
67
|
+
|
|
68
|
+
def load_tars(self) -> list[SftTarFile]:
|
|
69
|
+
self.tars = [SftTarFile(p) for p in self.tar_path.iterdir() if SftTarFile.is_valid_path(p)]
|
|
70
|
+
return self.tars
|
|
71
|
+
|
|
72
|
+
def upload_tar(self, tar_path: Path) -> None:
|
|
73
|
+
if not tar_path.exists():
|
|
74
|
+
raise FileNotFoundError(f"File not found: {tar_path}")
|
|
75
|
+
|
|
76
|
+
upload_url = f"{sft_config.server_url}/api/v1/services/upload-tar"
|
|
77
|
+
|
|
78
|
+
try:
|
|
79
|
+
with open(tar_path, 'rb') as file:
|
|
80
|
+
files = {'file': (tar_path.name, file)}
|
|
81
|
+
|
|
82
|
+
response = requests.post(
|
|
83
|
+
upload_url,
|
|
84
|
+
files=files,
|
|
85
|
+
timeout=sft_config.upload_timeout
|
|
86
|
+
)
|
|
87
|
+
|
|
88
|
+
if response.status_code == 200:
|
|
89
|
+
result = response.json()
|
|
90
|
+
if result.get('code') == 200:
|
|
91
|
+
log_success(f"Upload successful: {result.get('message')}")
|
|
92
|
+
log_info(f"File saved path: {result.get('data', {}).get('file_path')}")
|
|
93
|
+
else:
|
|
94
|
+
raise Exception(f"Upload failed: {result.get('message')}")
|
|
95
|
+
else:
|
|
96
|
+
try:
|
|
97
|
+
error_detail = response.json()
|
|
98
|
+
error_message = error_detail.get('message', f"HTTP错误: {response.status_code}")
|
|
99
|
+
if 'debug' in error_detail and error_detail['debug']:
|
|
100
|
+
log_error(f"Error details: {error_detail['debug']}")
|
|
101
|
+
raise Exception(error_message)
|
|
102
|
+
except ValueError:
|
|
103
|
+
raise Exception(f"Server returned error status code: {response.status_code}")
|
|
104
|
+
except requests.exceptions.RequestException as e:
|
|
105
|
+
raise Exception(f"Upload request failed: {str(e)}")
|
|
106
|
+
|
|
107
|
+
sft_file_manager = SftFileManager()
|
|
@@ -0,0 +1,257 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
from kubernetes.utils.create_from_yaml import FailToCreateError
|
|
3
|
+
import threading
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
import yaml
|
|
6
|
+
from kubernetes import client, config, utils
|
|
7
|
+
from kubernetes.client.rest import ApiException
|
|
8
|
+
from kubernetes.dynamic import DynamicClient
|
|
9
|
+
from kubernetes.dynamic.exceptions import NotFoundError
|
|
10
|
+
from service_forge.sft.util.logger import log_error, log_info, log_success, log_warning
|
|
11
|
+
|
|
12
|
+
class KubernetesServiceDetails:
|
|
13
|
+
def __init__(self, name: str, type: str | None = None, port: int | None = None, target_port: int | None = None):
|
|
14
|
+
self.name = name
|
|
15
|
+
self.type = type
|
|
16
|
+
self.port = port
|
|
17
|
+
self.target_port = target_port
|
|
18
|
+
|
|
19
|
+
class KubernetesManager:
|
|
20
|
+
_instance_lock = threading.Lock()
|
|
21
|
+
|
|
22
|
+
def __init__(self):
|
|
23
|
+
try:
|
|
24
|
+
config.load_incluster_config()
|
|
25
|
+
# 使用InCluster配置创建DynamicClient
|
|
26
|
+
self.dynamic_client = DynamicClient(client.ApiClient())
|
|
27
|
+
except config.ConfigException:
|
|
28
|
+
config.load_kube_config()
|
|
29
|
+
# 如果InCluster配置失败,使用kubeconfig文件创建DynamicClient
|
|
30
|
+
self.dynamic_client = DynamicClient(config.new_client_from_config())
|
|
31
|
+
|
|
32
|
+
self.k8s_client = client.CoreV1Api()
|
|
33
|
+
self.k8s_apps_client = client.AppsV1Api()
|
|
34
|
+
self.k8s_batch_client = client.BatchV1Api()
|
|
35
|
+
self.k8s_rbac_client = client.RbacAuthorizationV1Api()
|
|
36
|
+
self.k8s_networking_client = client.NetworkingV1Api()
|
|
37
|
+
self.k8s_apiextensions_client = client.ApiextensionsV1Api()
|
|
38
|
+
|
|
39
|
+
self.api_mapping = {
|
|
40
|
+
"v1": self.k8s_client,
|
|
41
|
+
"apps/v1": self.k8s_apps_client,
|
|
42
|
+
"batch/v1": self.k8s_batch_client,
|
|
43
|
+
"rbac.authorization.k8s.io/v1": self.k8s_rbac_client,
|
|
44
|
+
"networking.k8s.io/v1": self.k8s_networking_client,
|
|
45
|
+
"apiextensions.k8s.io/v1": self.k8s_apiextensions_client,
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
def __new__(cls) -> KubernetesManager:
|
|
49
|
+
if not hasattr(cls, '_instance'):
|
|
50
|
+
with KubernetesManager._instance_lock:
|
|
51
|
+
if not hasattr(cls, '_instance'):
|
|
52
|
+
KubernetesManager._instance = super().__new__(cls)
|
|
53
|
+
return KubernetesManager._instance
|
|
54
|
+
|
|
55
|
+
def get_services_in_namespace(self, namespace: str) -> list[str]:
|
|
56
|
+
try:
|
|
57
|
+
services = self.k8s_client.list_namespaced_service(namespace=namespace)
|
|
58
|
+
return [svc.metadata.name for svc in services.items if svc.metadata.name.startswith("sf-")]
|
|
59
|
+
except ApiException as e:
|
|
60
|
+
log_error(f"Failed to get services: {e.reason}")
|
|
61
|
+
return []
|
|
62
|
+
except Exception as e:
|
|
63
|
+
log_error(f"Failed to get services: {e}")
|
|
64
|
+
return []
|
|
65
|
+
|
|
66
|
+
def get_service_details(self, namespace: str, service_name: str) -> KubernetesServiceDetails:
|
|
67
|
+
try:
|
|
68
|
+
service = self.k8s_client.read_namespaced_service(name=service_name, namespace=namespace)
|
|
69
|
+
return KubernetesServiceDetails(
|
|
70
|
+
name=service.metadata.name,
|
|
71
|
+
type=service.spec.type,
|
|
72
|
+
port=service.spec.ports[0].port,
|
|
73
|
+
target_port=service.spec.ports[0].target_port
|
|
74
|
+
)
|
|
75
|
+
except ApiException as e:
|
|
76
|
+
log_error(f"Failed to get service details: {e.reason}")
|
|
77
|
+
return KubernetesServiceDetails(name=service_name)
|
|
78
|
+
except Exception as e:
|
|
79
|
+
log_error(f"Failed to get service details: {e}")
|
|
80
|
+
return KubernetesServiceDetails(name=service_name)
|
|
81
|
+
|
|
82
|
+
def get_pods_for_service(self, namespace: str, service_name: str) -> list[str]:
|
|
83
|
+
try:
|
|
84
|
+
service = self.k8s_client.read_namespaced_service(name=service_name, namespace=namespace)
|
|
85
|
+
selector = service.spec.selector
|
|
86
|
+
if not selector:
|
|
87
|
+
log_error(f"Service '{service_name}' has no selector")
|
|
88
|
+
return []
|
|
89
|
+
label_selector = ",".join([f"{k}={v}" for k, v in selector.items()])
|
|
90
|
+
pods = self.k8s_client.list_namespaced_pod(namespace=namespace, label_selector=label_selector)
|
|
91
|
+
return [pod.metadata.name for pod in pods.items]
|
|
92
|
+
except ApiException as e:
|
|
93
|
+
log_error(f"Failed to get pods for service: {e.reason}")
|
|
94
|
+
return []
|
|
95
|
+
except Exception as e:
|
|
96
|
+
log_error(f"Failed to get pods for service: {e}")
|
|
97
|
+
return []
|
|
98
|
+
|
|
99
|
+
def get_pod_containers(self, namespace: str, pod_name: str) -> list[str]:
|
|
100
|
+
try:
|
|
101
|
+
pod = self.k8s_client.read_namespaced_pod(name=pod_name, namespace=namespace)
|
|
102
|
+
containers = []
|
|
103
|
+
if pod.spec.containers:
|
|
104
|
+
containers.extend([c.name for c in pod.spec.containers])
|
|
105
|
+
if pod.spec.init_containers:
|
|
106
|
+
containers.extend([c.name for c in pod.spec.init_containers])
|
|
107
|
+
return containers
|
|
108
|
+
except ApiException as e:
|
|
109
|
+
log_error(f"Failed to get pod containers: {e.reason}")
|
|
110
|
+
return []
|
|
111
|
+
except Exception as e:
|
|
112
|
+
log_error(f"Failed to get pod containers: {e}")
|
|
113
|
+
return []
|
|
114
|
+
|
|
115
|
+
def get_pod_logs(self, namespace: str, pod_name: str, container_name: str, tail: int, follow: bool, previous: bool) -> str:
|
|
116
|
+
try:
|
|
117
|
+
logs = self.k8s_client.read_namespaced_pod_log(
|
|
118
|
+
name=pod_name,
|
|
119
|
+
namespace=namespace,
|
|
120
|
+
container=container_name,
|
|
121
|
+
tail_lines=tail if not follow else None,
|
|
122
|
+
previous=previous,
|
|
123
|
+
follow=follow,
|
|
124
|
+
_preload_content=not follow
|
|
125
|
+
)
|
|
126
|
+
return logs
|
|
127
|
+
except ApiException as e:
|
|
128
|
+
log_error(f"Failed to get pod logs: {e.reason}")
|
|
129
|
+
return ""
|
|
130
|
+
except Exception as e:
|
|
131
|
+
log_error(f"Failed to get pod logs: {e}")
|
|
132
|
+
|
|
133
|
+
def apply_dynamic_yaml(self, obj: dict, namespace: str) -> None:
|
|
134
|
+
api_version = obj["apiVersion"]
|
|
135
|
+
kind = obj["kind"]
|
|
136
|
+
metadata = obj["metadata"]
|
|
137
|
+
name = metadata["name"]
|
|
138
|
+
|
|
139
|
+
resource = self.dynamic_client.resources.get(api_version=api_version, kind=kind)
|
|
140
|
+
|
|
141
|
+
try:
|
|
142
|
+
resource.get(name=name, namespace=namespace)
|
|
143
|
+
print(f"{kind}/{name} exists → patching...")
|
|
144
|
+
resource.patch(name=name, namespace=namespace, body=obj, content_type="application/merge-patch+json")
|
|
145
|
+
|
|
146
|
+
except NotFoundError:
|
|
147
|
+
print(f"{kind}/{name} not found → creating...")
|
|
148
|
+
resource.create(body=obj, namespace=namespace)
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
def apply_deployment_yaml(self, deployment_yaml: Path, namespace: str) -> None:
|
|
152
|
+
with open(deployment_yaml, 'r') as f:
|
|
153
|
+
objs = yaml.safe_load_all(f)
|
|
154
|
+
for obj in objs:
|
|
155
|
+
api_version = obj["apiVersion"]
|
|
156
|
+
kind = obj["kind"]
|
|
157
|
+
metadata = obj["metadata"]
|
|
158
|
+
|
|
159
|
+
name = metadata["name"]
|
|
160
|
+
|
|
161
|
+
api_client = self.api_mapping.get(api_version)
|
|
162
|
+
if not api_client:
|
|
163
|
+
self.apply_dynamic_yaml(obj, namespace)
|
|
164
|
+
continue
|
|
165
|
+
|
|
166
|
+
read_fn = getattr(api_client, f"read_namespaced_{kind.lower()}", None)
|
|
167
|
+
create_fn = getattr(api_client, f"create_namespaced_{kind.lower()}", None)
|
|
168
|
+
patch_fn = getattr(api_client, f"patch_namespaced_{kind.lower()}", None)
|
|
169
|
+
|
|
170
|
+
if not read_fn:
|
|
171
|
+
raise Exception(f"Unsupported resource type: {kind}")
|
|
172
|
+
|
|
173
|
+
try:
|
|
174
|
+
read_fn(name=name, namespace=namespace)
|
|
175
|
+
print(f"{kind}/{name} exists → patching...")
|
|
176
|
+
patch_fn(name=name, namespace=namespace, body=obj)
|
|
177
|
+
|
|
178
|
+
except ApiException as e:
|
|
179
|
+
if e.status == 404:
|
|
180
|
+
print(f"{kind}/{name} not found → creating...")
|
|
181
|
+
create_fn(namespace=namespace, body=obj)
|
|
182
|
+
else:
|
|
183
|
+
raise
|
|
184
|
+
|
|
185
|
+
def delete_service(self, namespace: str, service_name: str, force: bool = False) -> None:
|
|
186
|
+
delete_options = client.V1DeleteOptions()
|
|
187
|
+
if force:
|
|
188
|
+
delete_options.grace_period_seconds = 0
|
|
189
|
+
delete_options.propagation_policy = "Background"
|
|
190
|
+
|
|
191
|
+
# Delete deployment
|
|
192
|
+
try:
|
|
193
|
+
log_info(f"Attempting to delete deployment '{service_name}'...")
|
|
194
|
+
self.k8s_apps_client.delete_namespaced_deployment(
|
|
195
|
+
name=service_name,
|
|
196
|
+
namespace=namespace,
|
|
197
|
+
body=delete_options
|
|
198
|
+
)
|
|
199
|
+
log_success(f"Deployment '{service_name}' deleted successfully")
|
|
200
|
+
except ApiException as e:
|
|
201
|
+
if e.status == 404:
|
|
202
|
+
log_warning(f"Deployment '{service_name}' not found, skipping...")
|
|
203
|
+
else:
|
|
204
|
+
log_warning(f"Failed to delete deployment '{service_name}': {e.reason}")
|
|
205
|
+
log_warning("Continuing with service deletion...")
|
|
206
|
+
except Exception as e:
|
|
207
|
+
log_warning(f"Failed to delete deployment '{service_name}': {e}")
|
|
208
|
+
log_warning("Continuing with service deletion...")
|
|
209
|
+
|
|
210
|
+
# Delete service
|
|
211
|
+
try:
|
|
212
|
+
log_info(f"Attempting to delete service '{service_name}'...")
|
|
213
|
+
self.k8s_client.delete_namespaced_service(
|
|
214
|
+
name=service_name,
|
|
215
|
+
namespace=namespace,
|
|
216
|
+
body=delete_options
|
|
217
|
+
)
|
|
218
|
+
log_success(f"Service '{service_name}' deleted successfully")
|
|
219
|
+
except ApiException as e:
|
|
220
|
+
if e.status == 404:
|
|
221
|
+
log_warning(f"Service '{service_name}' not found, skipping...")
|
|
222
|
+
else:
|
|
223
|
+
log_error(f"Failed to delete service '{service_name}': {e.reason}")
|
|
224
|
+
if e.body:
|
|
225
|
+
log_error(f"Error details: {e.body}")
|
|
226
|
+
except Exception as e:
|
|
227
|
+
log_error(f"Failed to delete service '{service_name}': {e}")
|
|
228
|
+
|
|
229
|
+
# Delete IngressRoute (Traefik CRD)
|
|
230
|
+
try:
|
|
231
|
+
log_info(f"Attempting to delete IngressRoute '{service_name}'...")
|
|
232
|
+
ingressroute_resource = self.dynamic_client.resources.get(
|
|
233
|
+
api_version="traefik.io/v1alpha1",
|
|
234
|
+
kind="IngressRoute"
|
|
235
|
+
)
|
|
236
|
+
ingressroute_resource.delete(name=service_name, namespace=namespace)
|
|
237
|
+
log_success(f"IngressRoute '{service_name}' deleted successfully")
|
|
238
|
+
except NotFoundError:
|
|
239
|
+
log_warning(f"IngressRoute '{service_name}' not found, skipping...")
|
|
240
|
+
except Exception as e:
|
|
241
|
+
log_warning(f"Failed to delete IngressRoute '{service_name}': {e}")
|
|
242
|
+
|
|
243
|
+
# Delete Middleware (Traefik CRD)
|
|
244
|
+
middleware_name = f"strip-prefix-{service_name}"
|
|
245
|
+
try:
|
|
246
|
+
log_info(f"Attempting to delete Middleware '{middleware_name}'...")
|
|
247
|
+
middleware_resource = self.dynamic_client.resources.get(
|
|
248
|
+
api_version="traefik.io/v1alpha1",
|
|
249
|
+
kind="Middleware"
|
|
250
|
+
)
|
|
251
|
+
middleware_resource.delete(name=middleware_name, namespace=namespace)
|
|
252
|
+
log_success(f"Middleware '{middleware_name}' deleted successfully")
|
|
253
|
+
except NotFoundError:
|
|
254
|
+
log_warning(f"Middleware '{middleware_name}' not found, skipping...")
|
|
255
|
+
except Exception as e:
|
|
256
|
+
log_warning(f"Failed to delete Middleware '{middleware_name}': {e}")
|
|
257
|
+
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import typer
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
from typing import Callable, TypeVar, Any
|
|
4
|
+
from service_forge.sft.util.logger import log_error, log_info
|
|
5
|
+
from service_forge.sft.config.sf_metadata import load_metadata, SfMetadata
|
|
6
|
+
|
|
7
|
+
T = TypeVar('T')
|
|
8
|
+
|
|
9
|
+
def assert_dir_exists(path: Path) -> None:
|
|
10
|
+
if not path.exists():
|
|
11
|
+
log_error(f"Directory does not exist: {path}")
|
|
12
|
+
raise typer.Exit(1)
|
|
13
|
+
if not path.is_dir():
|
|
14
|
+
log_error(f"Path is not a directory: {path}")
|
|
15
|
+
raise typer.Exit(1)
|
|
16
|
+
log_info(f"Directory exists: {path}")
|
|
17
|
+
|
|
18
|
+
def assert_file_exists(path: Path) -> None:
|
|
19
|
+
if not path.exists():
|
|
20
|
+
log_error(f"File does not exist: {path}")
|
|
21
|
+
raise typer.Exit(1)
|
|
22
|
+
if not path.is_file():
|
|
23
|
+
log_error(f"Path is not a file: {path}")
|
|
24
|
+
raise typer.Exit(1)
|
|
25
|
+
log_info(f"File exists: {path}")
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
from rich.console import Console
|
|
2
|
+
from typing import Any
|
|
3
|
+
|
|
4
|
+
console = Console()
|
|
5
|
+
|
|
6
|
+
def log_error(message: str, **kwargs: Any) -> None:
|
|
7
|
+
console.print(f"[red]{message}[/red]", **kwargs)
|
|
8
|
+
|
|
9
|
+
def log_info(message: str, **kwargs: Any) -> None:
|
|
10
|
+
console.print(f"{message}", **kwargs)
|
|
11
|
+
|
|
12
|
+
def log_success(message: str, **kwargs: Any) -> None:
|
|
13
|
+
console.print(f"[green]{message}[/green]", **kwargs)
|
|
14
|
+
|
|
15
|
+
def log_warning(message: str, **kwargs: Any) -> None:
|
|
16
|
+
console.print(f"[yellow]{message}[/yellow]", **kwargs)
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
def get_metadata_file_name(name: str, version: str) -> str:
|
|
2
|
+
return "sf-meta.yaml"
|
|
3
|
+
|
|
4
|
+
def get_service_name(name: str, version: str) -> str:
|
|
5
|
+
return f"sf-{name}-{version.replace('.', '-')}v"
|
|
6
|
+
|
|
7
|
+
def get_service_url_name(name: str, version: str) -> str:
|
|
8
|
+
return f"{name}-{version.replace('.', '-')}"
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
import yaml
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
from typing import Any, Dict
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def extract_yaml_content_without_comments(yaml_file: Path) -> str:
|
|
7
|
+
"""
|
|
8
|
+
读取 YAML 文件,去除注释,返回格式化的 YAML 字符串
|
|
9
|
+
|
|
10
|
+
Args:
|
|
11
|
+
yaml_file: YAML 文件路径
|
|
12
|
+
|
|
13
|
+
Returns:
|
|
14
|
+
去除注释后的 YAML 字符串
|
|
15
|
+
"""
|
|
16
|
+
with open(yaml_file, 'r', encoding='utf-8') as f:
|
|
17
|
+
content = f.read()
|
|
18
|
+
|
|
19
|
+
# 移除注释行(以 # 开头的行,但不包含字符串中的 #)
|
|
20
|
+
lines = content.split('\n')
|
|
21
|
+
filtered_lines = []
|
|
22
|
+
|
|
23
|
+
for line in lines:
|
|
24
|
+
stripped = line.strip()
|
|
25
|
+
# 跳过空行和纯注释行
|
|
26
|
+
if not stripped or stripped.startswith('#'):
|
|
27
|
+
continue
|
|
28
|
+
filtered_lines.append(line)
|
|
29
|
+
|
|
30
|
+
# 重新组合并解析为 Python 对象,然后重新序列化
|
|
31
|
+
yaml_content = '\n'.join(filtered_lines)
|
|
32
|
+
|
|
33
|
+
try:
|
|
34
|
+
# 使用 yaml.safe_load 解析,然后重新序列化确保格式正确
|
|
35
|
+
data = yaml.safe_load(yaml_content)
|
|
36
|
+
if data is None:
|
|
37
|
+
return ""
|
|
38
|
+
return yaml.dump(data, default_flow_style=False, allow_unicode=True, sort_keys=False)
|
|
39
|
+
except yaml.YAMLError:
|
|
40
|
+
# 如果解析失败,返回原始过滤后的内容
|
|
41
|
+
return yaml_content
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def load_sf_metadata_as_string(metadata_file: Path) -> str:
|
|
45
|
+
"""
|
|
46
|
+
加载 sf-meta.yaml 文件,去除注释后返回字符串格式
|
|
47
|
+
|
|
48
|
+
Args:
|
|
49
|
+
metadata_file: sf-meta.yaml 文件路径
|
|
50
|
+
|
|
51
|
+
Returns:
|
|
52
|
+
去除注释后的 YAML 内容字符串
|
|
53
|
+
"""
|
|
54
|
+
if not metadata_file.exists():
|
|
55
|
+
raise FileNotFoundError(f"Metadata file not found: {metadata_file}")
|
|
56
|
+
|
|
57
|
+
return extract_yaml_content_without_comments(metadata_file)
|