service-forge 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. service_forge/api/http_api.py +138 -0
  2. service_forge/api/kafka_api.py +126 -0
  3. service_forge/api/task_manager.py +141 -0
  4. service_forge/api/websocket_api.py +86 -0
  5. service_forge/api/websocket_manager.py +425 -0
  6. service_forge/db/__init__.py +1 -0
  7. service_forge/db/database.py +119 -0
  8. service_forge/llm/__init__.py +62 -0
  9. service_forge/llm/llm.py +56 -0
  10. service_forge/main.py +121 -0
  11. service_forge/model/__init__.py +0 -0
  12. service_forge/model/websocket.py +13 -0
  13. service_forge/proto/foo_input.py +5 -0
  14. service_forge/service.py +111 -0
  15. service_forge/service_config.py +115 -0
  16. service_forge/sft/cli.py +91 -0
  17. service_forge/sft/cmd/config_command.py +67 -0
  18. service_forge/sft/cmd/deploy_service.py +124 -0
  19. service_forge/sft/cmd/list_tars.py +41 -0
  20. service_forge/sft/cmd/service_command.py +149 -0
  21. service_forge/sft/cmd/upload_service.py +36 -0
  22. service_forge/sft/config/injector.py +87 -0
  23. service_forge/sft/config/injector_default_files.py +97 -0
  24. service_forge/sft/config/sf_metadata.py +30 -0
  25. service_forge/sft/config/sft_config.py +125 -0
  26. service_forge/sft/file/__init__.py +0 -0
  27. service_forge/sft/file/ignore_pattern.py +80 -0
  28. service_forge/sft/file/sft_file_manager.py +107 -0
  29. service_forge/sft/kubernetes/kubernetes_manager.py +257 -0
  30. service_forge/sft/util/assert_util.py +25 -0
  31. service_forge/sft/util/logger.py +16 -0
  32. service_forge/sft/util/name_util.py +2 -0
  33. service_forge/utils/__init__.py +0 -0
  34. service_forge/utils/default_type_converter.py +12 -0
  35. service_forge/utils/register.py +39 -0
  36. service_forge/utils/type_converter.py +74 -0
  37. service_forge/workflow/__init__.py +1 -0
  38. service_forge/workflow/context.py +13 -0
  39. service_forge/workflow/edge.py +31 -0
  40. service_forge/workflow/node.py +179 -0
  41. service_forge/workflow/nodes/__init__.py +7 -0
  42. service_forge/workflow/nodes/control/if_node.py +29 -0
  43. service_forge/workflow/nodes/input/console_input_node.py +26 -0
  44. service_forge/workflow/nodes/llm/query_llm_node.py +41 -0
  45. service_forge/workflow/nodes/nested/workflow_node.py +28 -0
  46. service_forge/workflow/nodes/output/kafka_output_node.py +27 -0
  47. service_forge/workflow/nodes/output/print_node.py +29 -0
  48. service_forge/workflow/nodes/test/if_console_input_node.py +33 -0
  49. service_forge/workflow/nodes/test/time_consuming_node.py +61 -0
  50. service_forge/workflow/port.py +86 -0
  51. service_forge/workflow/trigger.py +20 -0
  52. service_forge/workflow/triggers/__init__.py +4 -0
  53. service_forge/workflow/triggers/fast_api_trigger.py +125 -0
  54. service_forge/workflow/triggers/kafka_api_trigger.py +44 -0
  55. service_forge/workflow/triggers/once_trigger.py +20 -0
  56. service_forge/workflow/triggers/period_trigger.py +26 -0
  57. service_forge/workflow/workflow.py +251 -0
  58. service_forge/workflow/workflow_factory.py +227 -0
  59. service_forge/workflow/workflow_group.py +23 -0
  60. service_forge/workflow/workflow_type.py +52 -0
  61. service_forge-0.1.0.dist-info/METADATA +93 -0
  62. service_forge-0.1.0.dist-info/RECORD +64 -0
  63. service_forge-0.1.0.dist-info/WHEEL +4 -0
  64. service_forge-0.1.0.dist-info/entry_points.txt +2 -0
@@ -0,0 +1,107 @@
1
+ import os
2
+ import tarfile
3
+ from pathlib import Path
4
+ import requests
5
+
6
+ from service_forge.sft.file.ignore_pattern import load_ignore_patterns
7
+ from service_forge.sft.config.sft_config import sft_config
8
+ from service_forge.sft.util.logger import log_success, log_info, log_error
9
+
10
+ class SftTarFile:
11
+ # example: sf_tag_service_0.0.1.tar
12
+ def __init__(self, path: Path):
13
+ self.path = path
14
+ self.name = path.name
15
+ self.size = path.stat().st_size
16
+ self.modified_time = path.stat().st_mtime
17
+ self.project_name = '_'.join(path.name.split('_')[1:-1])
18
+ self.version = path.name.split('_')[-1][:-4]
19
+
20
+ def _format_size(self) -> str:
21
+ for unit in ['B', 'KB', 'MB', 'GB']:
22
+ if self.size < 1024.0:
23
+ return f"{self.size:.2f} {unit}"
24
+ self.size /= 1024.0
25
+ return f"{self.size:.2f} TB"
26
+
27
+ def _format_modified_time(self) -> str:
28
+ from datetime import datetime
29
+ return datetime.fromtimestamp(self.modified_time).strftime("%Y-%m-%d %H:%M:%S")
30
+
31
+ @staticmethod
32
+ def is_valid_path(path: Path) -> bool:
33
+ return path.is_file() and path.suffix == '.tar' and path.name.startswith('sf_')
34
+
35
+
36
+ class SftFileManager:
37
+ def __init__(self):
38
+ self.tar_path = Path(sft_config.sft_file_root) / "service-tar"
39
+ self.tars: list[SftTarFile] = []
40
+ os.makedirs(self.tar_path, exist_ok=True)
41
+ self.load_tars()
42
+
43
+ def create_tar(self, project_path: Path, name: str, version: str) -> Path:
44
+ project_path = Path(project_path).resolve()
45
+ tar_path = Path(self.tar_path) / f"sf_{name}_{version}.tar"
46
+
47
+ ignore_pattern = load_ignore_patterns(project_path)
48
+
49
+ with tarfile.open(tar_path, 'w') as tar:
50
+ for root, dirs, files in os.walk(project_path):
51
+ root_path = Path(root)
52
+
53
+ dirs[:] = [
54
+ d for d in dirs
55
+ if not ignore_pattern.should_ignore(root_path / d)
56
+ ]
57
+
58
+ for file in files:
59
+ file_path = root_path / file
60
+ if ignore_pattern.should_ignore(file_path):
61
+ continue
62
+
63
+ arcname = file_path.relative_to(project_path)
64
+ tar.add(file_path, arcname=Path(f"{name}_{version}") / arcname)
65
+ self.load_tars()
66
+ return tar_path
67
+
68
+ def load_tars(self) -> list[SftTarFile]:
69
+ self.tars = [SftTarFile(p) for p in self.tar_path.iterdir() if SftTarFile.is_valid_path(p)]
70
+ return self.tars
71
+
72
+ def upload_tar(self, tar_path: Path) -> None:
73
+ if not tar_path.exists():
74
+ raise FileNotFoundError(f"File not found: {tar_path}")
75
+
76
+ upload_url = f"{sft_config.server_url}/api/v1/services/upload-tar"
77
+
78
+ try:
79
+ with open(tar_path, 'rb') as file:
80
+ files = {'file': (tar_path.name, file)}
81
+
82
+ response = requests.post(
83
+ upload_url,
84
+ files=files,
85
+ timeout=sft_config.upload_timeout
86
+ )
87
+
88
+ if response.status_code == 200:
89
+ result = response.json()
90
+ if result.get('code') == 200:
91
+ log_success(f"Upload successful: {result.get('message')}")
92
+ log_info(f"File saved path: {result.get('data', {}).get('file_path')}")
93
+ else:
94
+ raise Exception(f"Upload failed: {result.get('message')}")
95
+ else:
96
+ try:
97
+ error_detail = response.json()
98
+ error_message = error_detail.get('message', f"HTTP错误: {response.status_code}")
99
+ if 'debug' in error_detail and error_detail['debug']:
100
+ log_error(f"Error details: {error_detail['debug']}")
101
+ raise Exception(error_message)
102
+ except ValueError:
103
+ raise Exception(f"Server returned error status code: {response.status_code}")
104
+ except requests.exceptions.RequestException as e:
105
+ raise Exception(f"Upload request failed: {str(e)}")
106
+
107
+ sft_file_manager = SftFileManager()
@@ -0,0 +1,257 @@
1
+ from __future__ import annotations
2
+ from kubernetes.utils.create_from_yaml import FailToCreateError
3
+ import threading
4
+ from pathlib import Path
5
+ import yaml
6
+ from kubernetes import client, config, utils
7
+ from kubernetes.client.rest import ApiException
8
+ from kubernetes.dynamic import DynamicClient
9
+ from kubernetes.dynamic.exceptions import NotFoundError
10
+ from service_forge.sft.util.logger import log_error, log_info, log_success, log_warning
11
+
12
+ class KubernetesServiceDetails:
13
+ def __init__(self, name: str, type: str | None = None, port: int | None = None, target_port: int | None = None):
14
+ self.name = name
15
+ self.type = type
16
+ self.port = port
17
+ self.target_port = target_port
18
+
19
+ class KubernetesManager:
20
+ _instance_lock = threading.Lock()
21
+
22
+ def __init__(self):
23
+ try:
24
+ config.load_incluster_config()
25
+ # 使用InCluster配置创建DynamicClient
26
+ self.dynamic_client = DynamicClient(client.ApiClient())
27
+ except config.ConfigException:
28
+ config.load_kube_config()
29
+ # 如果InCluster配置失败,使用kubeconfig文件创建DynamicClient
30
+ self.dynamic_client = DynamicClient(config.new_client_from_config())
31
+
32
+ self.k8s_client = client.CoreV1Api()
33
+ self.k8s_apps_client = client.AppsV1Api()
34
+ self.k8s_batch_client = client.BatchV1Api()
35
+ self.k8s_rbac_client = client.RbacAuthorizationV1Api()
36
+ self.k8s_networking_client = client.NetworkingV1Api()
37
+ self.k8s_apiextensions_client = client.ApiextensionsV1Api()
38
+
39
+ self.api_mapping = {
40
+ "v1": self.k8s_client,
41
+ "apps/v1": self.k8s_apps_client,
42
+ "batch/v1": self.k8s_batch_client,
43
+ "rbac.authorization.k8s.io/v1": self.k8s_rbac_client,
44
+ "networking.k8s.io/v1": self.k8s_networking_client,
45
+ "apiextensions.k8s.io/v1": self.k8s_apiextensions_client,
46
+ }
47
+
48
+ def __new__(cls) -> KubernetesManager:
49
+ if not hasattr(cls, '_instance'):
50
+ with KubernetesManager._instance_lock:
51
+ if not hasattr(cls, '_instance'):
52
+ KubernetesManager._instance = super().__new__(cls)
53
+ return KubernetesManager._instance
54
+
55
+ def get_services_in_namespace(self, namespace: str) -> list[str]:
56
+ try:
57
+ services = self.k8s_client.list_namespaced_service(namespace=namespace)
58
+ return [svc.metadata.name for svc in services.items if svc.metadata.name.startswith("sf-")]
59
+ except ApiException as e:
60
+ log_error(f"Failed to get services: {e.reason}")
61
+ return []
62
+ except Exception as e:
63
+ log_error(f"Failed to get services: {e}")
64
+ return []
65
+
66
+ def get_service_details(self, namespace: str, service_name: str) -> KubernetesServiceDetails:
67
+ try:
68
+ service = self.k8s_client.read_namespaced_service(name=service_name, namespace=namespace)
69
+ return KubernetesServiceDetails(
70
+ name=service.metadata.name,
71
+ type=service.spec.type,
72
+ port=service.spec.ports[0].port,
73
+ target_port=service.spec.ports[0].target_port
74
+ )
75
+ except ApiException as e:
76
+ log_error(f"Failed to get service details: {e.reason}")
77
+ return KubernetesServiceDetails(name=service_name)
78
+ except Exception as e:
79
+ log_error(f"Failed to get service details: {e}")
80
+ return KubernetesServiceDetails(name=service_name)
81
+
82
+ def get_pods_for_service(self, namespace: str, service_name: str) -> list[str]:
83
+ try:
84
+ service = self.k8s_client.read_namespaced_service(name=service_name, namespace=namespace)
85
+ selector = service.spec.selector
86
+ if not selector:
87
+ log_error(f"Service '{service_name}' has no selector")
88
+ return []
89
+ label_selector = ",".join([f"{k}={v}" for k, v in selector.items()])
90
+ pods = self.k8s_client.list_namespaced_pod(namespace=namespace, label_selector=label_selector)
91
+ return [pod.metadata.name for pod in pods.items]
92
+ except ApiException as e:
93
+ log_error(f"Failed to get pods for service: {e.reason}")
94
+ return []
95
+ except Exception as e:
96
+ log_error(f"Failed to get pods for service: {e}")
97
+ return []
98
+
99
+ def get_pod_containers(self, namespace: str, pod_name: str) -> list[str]:
100
+ try:
101
+ pod = self.k8s_client.read_namespaced_pod(name=pod_name, namespace=namespace)
102
+ containers = []
103
+ if pod.spec.containers:
104
+ containers.extend([c.name for c in pod.spec.containers])
105
+ if pod.spec.init_containers:
106
+ containers.extend([c.name for c in pod.spec.init_containers])
107
+ return containers
108
+ except ApiException as e:
109
+ log_error(f"Failed to get pod containers: {e.reason}")
110
+ return []
111
+ except Exception as e:
112
+ log_error(f"Failed to get pod containers: {e}")
113
+ return []
114
+
115
+ def get_pod_logs(self, namespace: str, pod_name: str, container_name: str, tail: int, follow: bool, previous: bool) -> str:
116
+ try:
117
+ logs = self.k8s_client.read_namespaced_pod_log(
118
+ name=pod_name,
119
+ namespace=namespace,
120
+ container=container_name,
121
+ tail_lines=tail if not follow else None,
122
+ previous=previous,
123
+ follow=follow,
124
+ _preload_content=not follow
125
+ )
126
+ return logs
127
+ except ApiException as e:
128
+ log_error(f"Failed to get pod logs: {e.reason}")
129
+ return ""
130
+ except Exception as e:
131
+ log_error(f"Failed to get pod logs: {e}")
132
+
133
+ def apply_dynamic_yaml(self, obj: dict, namespace: str) -> None:
134
+ api_version = obj["apiVersion"]
135
+ kind = obj["kind"]
136
+ metadata = obj["metadata"]
137
+ name = metadata["name"]
138
+
139
+ resource = self.dynamic_client.resources.get(api_version=api_version, kind=kind)
140
+
141
+ try:
142
+ resource.get(name=name, namespace=namespace)
143
+ print(f"{kind}/{name} exists → patching...")
144
+ resource.patch(name=name, namespace=namespace, body=obj, content_type="application/merge-patch+json")
145
+
146
+ except NotFoundError:
147
+ print(f"{kind}/{name} not found → creating...")
148
+ resource.create(body=obj, namespace=namespace)
149
+
150
+
151
+ def apply_deployment_yaml(self, deployment_yaml: Path, namespace: str) -> None:
152
+ with open(deployment_yaml, 'r') as f:
153
+ objs = yaml.safe_load_all(f)
154
+ for obj in objs:
155
+ api_version = obj["apiVersion"]
156
+ kind = obj["kind"]
157
+ metadata = obj["metadata"]
158
+
159
+ name = metadata["name"]
160
+
161
+ api_client = self.api_mapping.get(api_version)
162
+ if not api_client:
163
+ self.apply_dynamic_yaml(obj, namespace)
164
+ continue
165
+
166
+ read_fn = getattr(api_client, f"read_namespaced_{kind.lower()}", None)
167
+ create_fn = getattr(api_client, f"create_namespaced_{kind.lower()}", None)
168
+ patch_fn = getattr(api_client, f"patch_namespaced_{kind.lower()}", None)
169
+
170
+ if not read_fn:
171
+ raise Exception(f"Unsupported resource type: {kind}")
172
+
173
+ try:
174
+ read_fn(name=name, namespace=namespace)
175
+ print(f"{kind}/{name} exists → patching...")
176
+ patch_fn(name=name, namespace=namespace, body=obj)
177
+
178
+ except ApiException as e:
179
+ if e.status == 404:
180
+ print(f"{kind}/{name} not found → creating...")
181
+ create_fn(namespace=namespace, body=obj)
182
+ else:
183
+ raise
184
+
185
+ def delete_service(self, namespace: str, service_name: str, force: bool = False) -> None:
186
+ delete_options = client.V1DeleteOptions()
187
+ if force:
188
+ delete_options.grace_period_seconds = 0
189
+ delete_options.propagation_policy = "Background"
190
+
191
+ # Delete deployment
192
+ try:
193
+ log_info(f"Attempting to delete deployment '{service_name}'...")
194
+ self.k8s_apps_client.delete_namespaced_deployment(
195
+ name=service_name,
196
+ namespace=namespace,
197
+ body=delete_options
198
+ )
199
+ log_success(f"Deployment '{service_name}' deleted successfully")
200
+ except ApiException as e:
201
+ if e.status == 404:
202
+ log_warning(f"Deployment '{service_name}' not found, skipping...")
203
+ else:
204
+ log_warning(f"Failed to delete deployment '{service_name}': {e.reason}")
205
+ log_warning("Continuing with service deletion...")
206
+ except Exception as e:
207
+ log_warning(f"Failed to delete deployment '{service_name}': {e}")
208
+ log_warning("Continuing with service deletion...")
209
+
210
+ # Delete service
211
+ try:
212
+ log_info(f"Attempting to delete service '{service_name}'...")
213
+ self.k8s_client.delete_namespaced_service(
214
+ name=service_name,
215
+ namespace=namespace,
216
+ body=delete_options
217
+ )
218
+ log_success(f"Service '{service_name}' deleted successfully")
219
+ except ApiException as e:
220
+ if e.status == 404:
221
+ log_warning(f"Service '{service_name}' not found, skipping...")
222
+ else:
223
+ log_error(f"Failed to delete service '{service_name}': {e.reason}")
224
+ if e.body:
225
+ log_error(f"Error details: {e.body}")
226
+ except Exception as e:
227
+ log_error(f"Failed to delete service '{service_name}': {e}")
228
+
229
+ # Delete IngressRoute (Traefik CRD)
230
+ try:
231
+ log_info(f"Attempting to delete IngressRoute '{service_name}'...")
232
+ ingressroute_resource = self.dynamic_client.resources.get(
233
+ api_version="traefik.io/v1alpha1",
234
+ kind="IngressRoute"
235
+ )
236
+ ingressroute_resource.delete(name=service_name, namespace=namespace)
237
+ log_success(f"IngressRoute '{service_name}' deleted successfully")
238
+ except NotFoundError:
239
+ log_warning(f"IngressRoute '{service_name}' not found, skipping...")
240
+ except Exception as e:
241
+ log_warning(f"Failed to delete IngressRoute '{service_name}': {e}")
242
+
243
+ # Delete Middleware (Traefik CRD)
244
+ middleware_name = f"strip-prefix-{service_name}"
245
+ try:
246
+ log_info(f"Attempting to delete Middleware '{middleware_name}'...")
247
+ middleware_resource = self.dynamic_client.resources.get(
248
+ api_version="traefik.io/v1alpha1",
249
+ kind="Middleware"
250
+ )
251
+ middleware_resource.delete(name=middleware_name, namespace=namespace)
252
+ log_success(f"Middleware '{middleware_name}' deleted successfully")
253
+ except NotFoundError:
254
+ log_warning(f"Middleware '{middleware_name}' not found, skipping...")
255
+ except Exception as e:
256
+ log_warning(f"Failed to delete Middleware '{middleware_name}': {e}")
257
+
@@ -0,0 +1,25 @@
1
+ import typer
2
+ from pathlib import Path
3
+ from typing import Callable, TypeVar, Any
4
+ from service_forge.sft.util.logger import log_error, log_info
5
+ from service_forge.sft.config.sf_metadata import load_metadata, SfMetadata
6
+
7
+ T = TypeVar('T')
8
+
9
+ def assert_dir_exists(path: Path) -> None:
10
+ if not path.exists():
11
+ log_error(f"Directory does not exist: {path}")
12
+ raise typer.Exit(1)
13
+ if not path.is_dir():
14
+ log_error(f"Path is not a directory: {path}")
15
+ raise typer.Exit(1)
16
+ log_info(f"Directory exists: {path}")
17
+
18
+ def assert_file_exists(path: Path) -> None:
19
+ if not path.exists():
20
+ log_error(f"File does not exist: {path}")
21
+ raise typer.Exit(1)
22
+ if not path.is_file():
23
+ log_error(f"Path is not a file: {path}")
24
+ raise typer.Exit(1)
25
+ log_info(f"File exists: {path}")
@@ -0,0 +1,16 @@
1
+ from rich.console import Console
2
+ from typing import Any
3
+
4
+ console = Console()
5
+
6
+ def log_error(message: str, **kwargs: Any) -> None:
7
+ console.print(f"[red]{message}[/red]", **kwargs)
8
+
9
+ def log_info(message: str, **kwargs: Any) -> None:
10
+ console.print(f"{message}", **kwargs)
11
+
12
+ def log_success(message: str, **kwargs: Any) -> None:
13
+ console.print(f"[green]{message}[/green]", **kwargs)
14
+
15
+ def log_warning(message: str, **kwargs: Any) -> None:
16
+ console.print(f"[yellow]{message}[/yellow]", **kwargs)
@@ -0,0 +1,2 @@
1
+ def get_service_name(name: str, version: str) -> str:
2
+ return f"sf-{name}-{version.replace('.', '-')}v"
File without changes
@@ -0,0 +1,12 @@
1
+ from ..utils.type_converter import TypeConverter
2
+ from ..workflow.workflow import Workflow
3
+ from ..api.http_api import fastapi_app
4
+ from ..api.kafka_api import KafkaApp, kafka_app
5
+ from fastapi import FastAPI
6
+ from ..workflow.workflow_type import WorkflowType, workflow_type_register
7
+
8
+ type_converter = TypeConverter()
9
+ type_converter.register(str, Workflow, lambda s, node: node.sub_workflows.get_workflow(s))
10
+ type_converter.register(str, FastAPI, lambda s, node: fastapi_app)
11
+ type_converter.register(str, KafkaApp, lambda s, node: kafka_app)
12
+ type_converter.register(str, type, lambda s, node: workflow_type_register.items[s].type)
@@ -0,0 +1,39 @@
1
+ from typing import TypeVar, Generic, Any
2
+ from loguru import logger
3
+
4
+ T = TypeVar('T')
5
+
6
+ class Register(Generic[T]):
7
+ def __init__(self) -> None:
8
+ self.items:dict[str, T] = {}
9
+
10
+ def register(
11
+ self,
12
+ name: str,
13
+ item: T,
14
+ show_info_log: bool = False,
15
+ ) -> None:
16
+ if name not in self.items:
17
+ self.items[name] = item
18
+ if show_info_log:
19
+ logger.info(f'Register {name}.')
20
+ else:
21
+ logger.warning(f'{name} has been registered.')
22
+
23
+ def instance(
24
+ self,
25
+ name: str,
26
+ kwargs: dict[str, Any] = {},
27
+ ignore_keys: list[str] = []
28
+ ) -> None:
29
+ for key in ignore_keys:
30
+ try:
31
+ kwargs.pop(key)
32
+ except:
33
+ pass
34
+ if name not in self.items:
35
+ logger.error(f'{name} has not been registered.')
36
+ return self.items[name](**kwargs)
37
+
38
+ def __len__(self) -> int:
39
+ return len(self.items)
@@ -0,0 +1,74 @@
1
+ from typing import Any, Callable, Type, Dict, Tuple, Set, List
2
+ from collections import deque
3
+ import inspect
4
+
5
+ class TypeConverter:
6
+ def __init__(self):
7
+ self._registry: Dict[Tuple[Type, Type], Callable[..., Any]] = {}
8
+
9
+ def register(self, src_type: Type, dst_type: Type, func: Callable[..., Any]):
10
+ self._registry[(src_type, dst_type)] = func
11
+
12
+ def can_convert(self, src_type: Type, dst_type: Type) -> bool:
13
+ return self._find_path(src_type, dst_type) is not None
14
+
15
+ def convert(self, value: Any, dst_type: Type, **kwargs) -> Any:
16
+ if value is None:
17
+ return None
18
+
19
+ if dst_type == Any:
20
+ return value
21
+
22
+ src_type = type(value)
23
+
24
+ if isinstance(value, dst_type):
25
+ return value
26
+
27
+ if (src_type, dst_type) in self._registry:
28
+ return self._call_func(self._registry[(src_type, dst_type)], value, **kwargs)
29
+
30
+ try:
31
+ return dst_type(value)
32
+ except Exception:
33
+ pass
34
+
35
+ path = self._find_path(src_type, dst_type)
36
+ if not path:
37
+ raise TypeError(f"No conversion path found from {src_type.__name__} to {dst_type.__name__}.")
38
+
39
+ result = value
40
+ for i in range(len(path) - 1):
41
+ func = self._registry[(path[i], path[i + 1])]
42
+ result = self._call_func(func, result, **kwargs)
43
+ return result
44
+
45
+ def _call_func(self, func: Callable[..., Any], value: Any, **kwargs) -> Any:
46
+ sig = inspect.signature(func)
47
+ if len(sig.parameters) == 1:
48
+ return func(value)
49
+ else:
50
+ return func(value, **kwargs)
51
+
52
+ def _find_path(self, src_type: Type, dst_type: Type) -> List[Type] | None:
53
+ if src_type == dst_type:
54
+ return [src_type]
55
+
56
+ graph: Dict[Type, Set[Type]] = {}
57
+ for (s, d) in self._registry.keys():
58
+ graph.setdefault(s, set()).add(d)
59
+
60
+ queue = deque([[src_type]])
61
+ visited = {src_type}
62
+
63
+ while queue:
64
+ path = queue.popleft()
65
+ current = path[-1]
66
+ for neighbor in graph.get(current, []):
67
+ if neighbor in visited:
68
+ continue
69
+ new_path = path + [neighbor]
70
+ if neighbor == dst_type:
71
+ return new_path
72
+ queue.append(new_path)
73
+ visited.add(neighbor)
74
+ return None
@@ -0,0 +1 @@
1
+ from .workflow_type import workflow_type_register
@@ -0,0 +1,13 @@
1
+ from __future__ import annotations
2
+
3
+ class Context():
4
+ def __init__(
5
+ self,
6
+ variables = dict(),
7
+ ) -> None:
8
+ self.variables = variables
9
+
10
+ def _clone(self) -> Context:
11
+ return Context(
12
+ variables={key: value for key, value in self.variables.items()},
13
+ )
@@ -0,0 +1,31 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import TYPE_CHECKING
4
+
5
+ if TYPE_CHECKING:
6
+ from .node import Node
7
+ from .port import Port
8
+
9
+ class Edge:
10
+ def __init__(
11
+ self,
12
+ start_node: Node,
13
+ end_node: Node,
14
+ start_port: Port,
15
+ end_port: Port,
16
+ ) -> None:
17
+ self.start_node = start_node
18
+ self.end_node = end_node
19
+ self.start_port = start_port
20
+ self.end_port = end_port
21
+
22
+ def _simple_clone(self, node_map: dict[Node, Node], port_map: dict[Port, Port]) -> Edge:
23
+ # 检查节点是否为None,避免KeyError
24
+ start_node = node_map[self.start_node] if self.start_node is not None else None
25
+ end_node = node_map[self.end_node] if self.end_node is not None else None
26
+ return Edge(
27
+ start_node=start_node,
28
+ end_node=end_node,
29
+ start_port=port_map[self.start_port],
30
+ end_port=port_map[self.end_port],
31
+ )