service-forge 0.1.18__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of service-forge might be problematic. Click here for more details.

Files changed (83) hide show
  1. service_forge/api/deprecated_websocket_api.py +86 -0
  2. service_forge/api/deprecated_websocket_manager.py +425 -0
  3. service_forge/api/http_api.py +152 -0
  4. service_forge/api/http_api_doc.py +455 -0
  5. service_forge/api/kafka_api.py +126 -0
  6. service_forge/api/routers/feedback/feedback_router.py +148 -0
  7. service_forge/api/routers/service/service_router.py +127 -0
  8. service_forge/api/routers/websocket/websocket_manager.py +83 -0
  9. service_forge/api/routers/websocket/websocket_router.py +78 -0
  10. service_forge/api/task_manager.py +141 -0
  11. service_forge/current_service.py +14 -0
  12. service_forge/db/__init__.py +1 -0
  13. service_forge/db/database.py +237 -0
  14. service_forge/db/migrations/feedback_migration.py +154 -0
  15. service_forge/db/models/__init__.py +0 -0
  16. service_forge/db/models/feedback.py +33 -0
  17. service_forge/llm/__init__.py +67 -0
  18. service_forge/llm/llm.py +56 -0
  19. service_forge/model/__init__.py +0 -0
  20. service_forge/model/feedback.py +30 -0
  21. service_forge/model/websocket.py +13 -0
  22. service_forge/proto/foo_input.py +5 -0
  23. service_forge/service.py +280 -0
  24. service_forge/service_config.py +44 -0
  25. service_forge/sft/cli.py +91 -0
  26. service_forge/sft/cmd/config_command.py +67 -0
  27. service_forge/sft/cmd/deploy_service.py +123 -0
  28. service_forge/sft/cmd/list_tars.py +41 -0
  29. service_forge/sft/cmd/service_command.py +149 -0
  30. service_forge/sft/cmd/upload_service.py +36 -0
  31. service_forge/sft/config/injector.py +129 -0
  32. service_forge/sft/config/injector_default_files.py +131 -0
  33. service_forge/sft/config/sf_metadata.py +30 -0
  34. service_forge/sft/config/sft_config.py +200 -0
  35. service_forge/sft/file/__init__.py +0 -0
  36. service_forge/sft/file/ignore_pattern.py +80 -0
  37. service_forge/sft/file/sft_file_manager.py +107 -0
  38. service_forge/sft/kubernetes/kubernetes_manager.py +257 -0
  39. service_forge/sft/util/assert_util.py +25 -0
  40. service_forge/sft/util/logger.py +16 -0
  41. service_forge/sft/util/name_util.py +8 -0
  42. service_forge/sft/util/yaml_utils.py +57 -0
  43. service_forge/storage/__init__.py +5 -0
  44. service_forge/storage/feedback_storage.py +245 -0
  45. service_forge/utils/__init__.py +0 -0
  46. service_forge/utils/default_type_converter.py +12 -0
  47. service_forge/utils/register.py +39 -0
  48. service_forge/utils/type_converter.py +99 -0
  49. service_forge/utils/workflow_clone.py +124 -0
  50. service_forge/workflow/__init__.py +1 -0
  51. service_forge/workflow/context.py +14 -0
  52. service_forge/workflow/edge.py +24 -0
  53. service_forge/workflow/node.py +184 -0
  54. service_forge/workflow/nodes/__init__.py +8 -0
  55. service_forge/workflow/nodes/control/if_node.py +29 -0
  56. service_forge/workflow/nodes/control/switch_node.py +28 -0
  57. service_forge/workflow/nodes/input/console_input_node.py +26 -0
  58. service_forge/workflow/nodes/llm/query_llm_node.py +41 -0
  59. service_forge/workflow/nodes/nested/workflow_node.py +28 -0
  60. service_forge/workflow/nodes/output/kafka_output_node.py +27 -0
  61. service_forge/workflow/nodes/output/print_node.py +29 -0
  62. service_forge/workflow/nodes/test/if_console_input_node.py +33 -0
  63. service_forge/workflow/nodes/test/time_consuming_node.py +62 -0
  64. service_forge/workflow/port.py +89 -0
  65. service_forge/workflow/trigger.py +28 -0
  66. service_forge/workflow/triggers/__init__.py +6 -0
  67. service_forge/workflow/triggers/a2a_api_trigger.py +257 -0
  68. service_forge/workflow/triggers/fast_api_trigger.py +201 -0
  69. service_forge/workflow/triggers/kafka_api_trigger.py +47 -0
  70. service_forge/workflow/triggers/once_trigger.py +23 -0
  71. service_forge/workflow/triggers/period_trigger.py +29 -0
  72. service_forge/workflow/triggers/websocket_api_trigger.py +189 -0
  73. service_forge/workflow/workflow.py +227 -0
  74. service_forge/workflow/workflow_callback.py +141 -0
  75. service_forge/workflow/workflow_config.py +66 -0
  76. service_forge/workflow/workflow_event.py +15 -0
  77. service_forge/workflow/workflow_factory.py +246 -0
  78. service_forge/workflow/workflow_group.py +51 -0
  79. service_forge/workflow/workflow_type.py +52 -0
  80. service_forge-0.1.18.dist-info/METADATA +98 -0
  81. service_forge-0.1.18.dist-info/RECORD +83 -0
  82. service_forge-0.1.18.dist-info/WHEEL +4 -0
  83. service_forge-0.1.18.dist-info/entry_points.txt +2 -0
@@ -0,0 +1,455 @@
1
+ import asyncio
2
+ from typing import TYPE_CHECKING, Any, Union
3
+ from service_forge.api.http_api import fastapi_app
4
+ from fastapi.openapi.utils import get_openapi
5
+ from pydantic import BaseModel
6
+ from google.protobuf.message import Message
7
+ from google.protobuf import descriptor as _descriptor
8
+ from a2a.utils.constants import AGENT_CARD_WELL_KNOWN_PATH, EXTENDED_AGENT_CARD_PATH
9
+
10
+ if TYPE_CHECKING:
11
+ from service_forge.service import Service
12
+ from service_forge.workflow.workflow import Workflow
13
+ from service_forge.workflow.workflow_group import WorkflowGroup
14
+
15
+ def _protobuf_type_to_json_schema_type(field_type: int) -> dict[str, Any]:
16
+ """将 protobuf 字段类型转换为 JSON schema 类型"""
17
+ type_map = {
18
+ _descriptor.FieldDescriptor.TYPE_DOUBLE: {"type": "number", "format": "double"},
19
+ _descriptor.FieldDescriptor.TYPE_FLOAT: {"type": "number", "format": "float"},
20
+ _descriptor.FieldDescriptor.TYPE_INT64: {"type": "integer", "format": "int64"},
21
+ _descriptor.FieldDescriptor.TYPE_UINT64: {"type": "integer", "format": "uint64"},
22
+ _descriptor.FieldDescriptor.TYPE_INT32: {"type": "integer", "format": "int32"},
23
+ _descriptor.FieldDescriptor.TYPE_UINT32: {"type": "integer", "format": "uint32"},
24
+ _descriptor.FieldDescriptor.TYPE_FIXED64: {"type": "integer", "format": "int64"},
25
+ _descriptor.FieldDescriptor.TYPE_FIXED32: {"type": "integer", "format": "int32"},
26
+ _descriptor.FieldDescriptor.TYPE_SFIXED32: {"type": "integer", "format": "int32"},
27
+ _descriptor.FieldDescriptor.TYPE_SFIXED64: {"type": "integer", "format": "int64"},
28
+ _descriptor.FieldDescriptor.TYPE_SINT32: {"type": "integer", "format": "int32"},
29
+ _descriptor.FieldDescriptor.TYPE_SINT64: {"type": "integer", "format": "int64"},
30
+ _descriptor.FieldDescriptor.TYPE_BOOL: {"type": "boolean"},
31
+ _descriptor.FieldDescriptor.TYPE_STRING: {"type": "string"},
32
+ _descriptor.FieldDescriptor.TYPE_BYTES: {"type": "string", "format": "byte"},
33
+ }
34
+ return type_map.get(field_type, {"type": "string"})
35
+
36
+
37
+ def _get_protobuf_message_class_from_descriptor(message_descriptor: _descriptor.Descriptor) -> type[Message] | None:
38
+ """从 Descriptor 获取对应的 Python Message 类"""
39
+ try:
40
+ from google.protobuf import symbol_database
41
+ _sym_db = symbol_database.Default()
42
+ return _sym_db.GetPrototype(message_descriptor)
43
+ except Exception:
44
+ # 如果无法通过 symbol_database 获取,尝试通过模块查找
45
+ try:
46
+ # 尝试从包含的文件描述符中获取
47
+ file_desc = message_descriptor.file
48
+ package = file_desc.package
49
+ message_name = message_descriptor.name
50
+
51
+ # 尝试导入模块(这需要知道模块路径)
52
+ # 这里我们返回 None,让调用者处理
53
+ return None
54
+ except Exception:
55
+ return None
56
+
57
+
58
+ def _protobuf_message_to_json_schema(message_class: type[Message], openapi_schema: dict[str, Any], visited: set[str] | None = None) -> dict[str, Any]:
59
+ """将 protobuf Message 类型转换为 JSON schema"""
60
+ if visited is None:
61
+ visited = set()
62
+
63
+ descriptor = message_class.DESCRIPTOR
64
+ model_name = descriptor.name
65
+
66
+ # 防止循环引用
67
+ if model_name in visited:
68
+ return {"$ref": f"#/components/schemas/{model_name}"}
69
+
70
+ visited.add(model_name)
71
+
72
+ schema: dict[str, Any] = {
73
+ "type": "object",
74
+ "properties": {},
75
+ "required": []
76
+ }
77
+
78
+ for field in descriptor.fields:
79
+ field_name = field.name
80
+ field_schema: dict[str, Any] = {}
81
+
82
+ if field.label == field.LABEL_REPEATED:
83
+ # 数组类型
84
+ if field.type == field.TYPE_MESSAGE:
85
+ # 嵌套消息数组
86
+ nested_message_class = _get_protobuf_message_class_from_descriptor(field.message_type)
87
+ if nested_message_class:
88
+ # 确保嵌套消息的 schema 也被添加
89
+ _get_protobuf_schema_ref(nested_message_class, openapi_schema, visited)
90
+ nested_schema_ref = f"#/components/schemas/{field.message_type.name}"
91
+ field_schema = {
92
+ "type": "array",
93
+ "items": {"$ref": nested_schema_ref}
94
+ }
95
+ else:
96
+ # 如果无法获取类,使用通用对象类型
97
+ field_schema = {
98
+ "type": "array",
99
+ "items": {"type": "object"}
100
+ }
101
+ else:
102
+ # 基本类型数组
103
+ item_schema = _protobuf_type_to_json_schema_type(field.type)
104
+ field_schema = {
105
+ "type": "array",
106
+ "items": item_schema
107
+ }
108
+ elif field.type == field.TYPE_MESSAGE:
109
+ # 嵌套消息
110
+ nested_message_class = _get_protobuf_message_class_from_descriptor(field.message_type)
111
+ if nested_message_class:
112
+ # 确保嵌套消息的 schema 也被添加
113
+ _get_protobuf_schema_ref(nested_message_class, openapi_schema, visited)
114
+ nested_schema_ref = f"#/components/schemas/{field.message_type.name}"
115
+ field_schema = {"$ref": nested_schema_ref}
116
+ else:
117
+ # 如果无法获取类,使用通用对象类型
118
+ field_schema = {"type": "object"}
119
+ else:
120
+ # 基本类型
121
+ field_schema = _protobuf_type_to_json_schema_type(field.type)
122
+
123
+ schema["properties"][field_name] = field_schema
124
+
125
+ visited.remove(model_name)
126
+ return schema
127
+
128
+
129
+ def _get_protobuf_schema_ref(message_class: type[Message], openapi_schema: dict[str, Any], visited: set[str] | None = None) -> str:
130
+ """获取 protobuf Message 的 schema 引用"""
131
+ model_name = message_class.DESCRIPTOR.name
132
+
133
+ if "components" not in openapi_schema:
134
+ openapi_schema["components"] = {}
135
+ if "schemas" not in openapi_schema["components"]:
136
+ openapi_schema["components"]["schemas"] = {}
137
+
138
+ if model_name not in openapi_schema["components"]["schemas"]:
139
+ json_schema = _protobuf_message_to_json_schema(message_class, openapi_schema, visited)
140
+ openapi_schema["components"]["schemas"][model_name] = json_schema
141
+
142
+ return f"#/components/schemas/{model_name}"
143
+
144
+
145
+ def _process_pydantic_schema_with_defs(json_schema: dict[str, Any], openapi_schema: dict[str, Any], visited: set[str] | None = None) -> dict[str, Any]:
146
+ """处理 Pydantic 生成的 JSON schema,将 $defs 中的嵌套模型迁移到 components/schemas"""
147
+ if visited is None:
148
+ visited = set()
149
+
150
+ # 确保 components/schemas 存在
151
+ if "components" not in openapi_schema:
152
+ openapi_schema["components"] = {}
153
+ if "schemas" not in openapi_schema["components"]:
154
+ openapi_schema["components"]["schemas"] = {}
155
+
156
+ # 处理 $defs 中的嵌套模型定义
157
+ if "$defs" in json_schema:
158
+ defs = json_schema.pop("$defs")
159
+ for def_name, def_schema in defs.items():
160
+ if def_name not in visited:
161
+ visited.add(def_name)
162
+ # 递归处理嵌套的 $defs
163
+ processed_def_schema = _process_pydantic_schema_with_defs(def_schema.copy(), openapi_schema, visited)
164
+ openapi_schema["components"]["schemas"][def_name] = processed_def_schema
165
+ visited.remove(def_name)
166
+
167
+ # 更新 schema 中的 $ref 引用,从 #/$defs/ModelName 改为 #/components/schemas/ModelName
168
+ def update_refs(obj: Any) -> Any:
169
+ if isinstance(obj, dict):
170
+ if "$ref" in obj:
171
+ ref = obj["$ref"]
172
+ if ref.startswith("#/$defs/"):
173
+ obj["$ref"] = ref.replace("#/$defs/", "#/components/schemas/")
174
+ else:
175
+ for key, value in obj.items():
176
+ obj[key] = update_refs(value)
177
+ elif isinstance(obj, list):
178
+ return [update_refs(item) for item in obj]
179
+ return obj
180
+
181
+ return update_refs(json_schema)
182
+
183
+
184
+ def _get_model_schema_ref(model: Union[type[BaseModel], type[Message]], openapi_schema: dict[str, Any], visited: set[str] | None = None) -> str:
185
+ """获取模型的 schema 引用,支持 Pydantic BaseModel 和 protobuf Message"""
186
+ if issubclass(model, BaseModel):
187
+ model_name = model.__name__
188
+
189
+ if "components" not in openapi_schema:
190
+ openapi_schema["components"] = {}
191
+ if "schemas" not in openapi_schema["components"]:
192
+ openapi_schema["components"]["schemas"] = {}
193
+
194
+ if model_name not in openapi_schema["components"]["schemas"]:
195
+ json_schema = model.model_json_schema()
196
+ # 处理嵌套模型定义
197
+ processed_schema = _process_pydantic_schema_with_defs(json_schema, openapi_schema, visited)
198
+ openapi_schema["components"]["schemas"][model_name] = processed_schema
199
+
200
+ return f"#/components/schemas/{model_name}"
201
+ elif issubclass(model, Message):
202
+ return _get_protobuf_schema_ref(model, openapi_schema, visited)
203
+ else:
204
+ raise ValueError(f"Unsupported model type: {type(model)}")
205
+
206
+
207
+ def _convert_model_to_parameters(model: Union[type[BaseModel], type[Message]], openapi_schema: dict[str, Any]) -> list[dict[str, Any]]:
208
+ """将模型转换为 OpenAPI query parameters,支持 Pydantic BaseModel 和 protobuf Message"""
209
+ parameters = []
210
+
211
+ if issubclass(model, BaseModel):
212
+ # 先处理嵌套模型定义,确保它们都在 components/schemas 中
213
+ # 这会返回处理后的 schema 引用,同时将所有嵌套模型添加到 components/schemas
214
+ _get_model_schema_ref(model, openapi_schema)
215
+
216
+ # 从已经处理过的 components/schemas 中获取 schema
217
+ model_name = model.__name__
218
+ if model_name in openapi_schema.get("components", {}).get("schemas", {}):
219
+ processed_schema = openapi_schema["components"]["schemas"][model_name]
220
+ else:
221
+ # 如果还没有处理过,则处理它
222
+ json_schema = model.model_json_schema()
223
+ processed_schema = _process_pydantic_schema_with_defs(json_schema, openapi_schema)
224
+
225
+ properties = processed_schema.get("properties", {})
226
+ required = processed_schema.get("required", [])
227
+
228
+ for field_name, field_info in properties.items():
229
+ param = {
230
+ "name": field_name,
231
+ "in": "query",
232
+ "required": field_name in required,
233
+ "schema": field_info
234
+ }
235
+ parameters.append(param)
236
+ elif issubclass(model, Message):
237
+ descriptor = model.DESCRIPTOR
238
+ for field in descriptor.fields:
239
+ field_name = field.name
240
+ field_schema = _protobuf_type_to_json_schema_type(field.type)
241
+
242
+ # 对于数组和嵌套消息,在 query 参数中可能不太适用,但我们可以尝试
243
+ if field.label == field.LABEL_REPEATED:
244
+ if field.type == field.TYPE_MESSAGE:
245
+ # 嵌套消息数组,在 query 中不支持,跳过
246
+ continue
247
+ else:
248
+ # 基本类型数组
249
+ field_schema = {
250
+ "type": "array",
251
+ "items": field_schema
252
+ }
253
+ elif field.type == field.TYPE_MESSAGE:
254
+ # 嵌套消息,在 query 中不支持,跳过
255
+ continue
256
+
257
+ param = {
258
+ "name": field_name,
259
+ "in": "query",
260
+ "required": False, # protobuf 字段在 proto3 中默认都是可选的
261
+ "schema": field_schema
262
+ }
263
+ parameters.append(param)
264
+
265
+ return parameters
266
+
267
+
268
+ def _convert_model_to_request_body(model: Union[type[BaseModel], type[Message]], openapi_schema: dict[str, Any]) -> dict[str, Any]:
269
+ """将模型转换为 OpenAPI request body,支持 Pydantic BaseModel 和 protobuf Message"""
270
+ schema_ref = _get_model_schema_ref(model, openapi_schema)
271
+ return {
272
+ "content": {
273
+ "application/json": {
274
+ "schema": {"$ref": schema_ref}
275
+ }
276
+ }
277
+ }
278
+
279
+
280
+ def _convert_model_to_response_schema(model: Union[type[BaseModel], type[Message]], openapi_schema: dict[str, Any]) -> dict[str, Any]:
281
+ """将模型转换为 OpenAPI response schema,支持 Pydantic BaseModel 和 protobuf Message"""
282
+ schema_ref = _get_model_schema_ref(model, openapi_schema)
283
+ return {
284
+ "description": "Success",
285
+ "content": {
286
+ "application/json": {
287
+ "schema": {"$ref": schema_ref}
288
+ }
289
+ }
290
+ }
291
+
292
+
293
+ async def generate_service_http_api_doc(service: 'Service') -> None:
294
+ await asyncio.sleep(1)
295
+ openapi_schema = get_openapi(
296
+ title=service.name,
297
+ version=service.version,
298
+ description=service.description,
299
+ routes=fastapi_app.routes,
300
+ )
301
+
302
+
303
+ from service_forge.workflow.triggers.fast_api_trigger import FastAPITrigger
304
+ from service_forge.workflow.triggers.a2a_api_trigger import A2AAPITrigger
305
+
306
+ for workflow_group in service.workflow_groups:
307
+ main_workflow = workflow_group.get_main_workflow()
308
+ fastapi_triggers = [node for node in main_workflow.nodes if isinstance(node, FastAPITrigger)]
309
+ a2a_triggers = [node for node in main_workflow.nodes if isinstance(node, A2AAPITrigger)]
310
+
311
+ # TODO: multiple output ports
312
+ if main_workflow.output_ports:
313
+ output_type = main_workflow.output_ports[0].port.type
314
+ else:
315
+ output_type = None
316
+
317
+ for trigger in fastapi_triggers:
318
+ path = trigger.get_input_port_by_name("path").value
319
+ method = trigger.get_input_port_by_name("method").value
320
+ data_type = trigger.get_input_port_by_name("data_type").value
321
+
322
+ if "paths" not in openapi_schema:
323
+ openapi_schema["paths"] = {}
324
+ if path not in openapi_schema["paths"]:
325
+ openapi_schema["paths"][path] = {}
326
+
327
+ operation: dict[str, Any] = {
328
+ "summary": main_workflow.name,
329
+ "description": main_workflow.description,
330
+ }
331
+
332
+ method_lower = method.lower()
333
+ if method_lower == "get":
334
+ if data_type and isinstance(data_type, type) and (issubclass(data_type, BaseModel) or issubclass(data_type, Message)):
335
+ operation["parameters"] = _convert_model_to_parameters(data_type, openapi_schema)
336
+ else:
337
+ if data_type and isinstance(data_type, type) and (issubclass(data_type, BaseModel) or issubclass(data_type, Message)):
338
+ operation["requestBody"] = _convert_model_to_request_body(data_type, openapi_schema)
339
+
340
+ if output_type and isinstance(output_type, type) and (issubclass(output_type, BaseModel) or issubclass(output_type, Message)):
341
+ operation["responses"] = {
342
+ "200": _convert_model_to_response_schema(output_type, openapi_schema)
343
+ }
344
+ else:
345
+ operation["responses"] = {
346
+ "200": {
347
+ "description": "Success",
348
+ }
349
+ }
350
+
351
+ openapi_schema["paths"][path][method_lower] = operation
352
+
353
+ # Handle A2A triggers
354
+ for trigger in a2a_triggers:
355
+ if not trigger.agent_card:
356
+ continue
357
+
358
+ agent_card = trigger.agent_card
359
+ base_path = "/a2a"
360
+
361
+ # Add agent card endpoint
362
+ agent_card_path = base_path + AGENT_CARD_WELL_KNOWN_PATH
363
+ if "paths" not in openapi_schema:
364
+ openapi_schema["paths"] = {}
365
+ if agent_card_path not in openapi_schema["paths"]:
366
+ openapi_schema["paths"][agent_card_path] = {}
367
+
368
+ openapi_schema["paths"][agent_card_path]["get"] = {
369
+ "summary": f"Get {agent_card.name} Agent Card",
370
+ "description": agent_card.description or f"Retrieve the agent card for {agent_card.name}",
371
+ "tags": ["A2A Agent"],
372
+ "responses": {
373
+ "200": {
374
+ "description": "Agent card JSON",
375
+ "content": {
376
+ "application/json": {
377
+ "schema": {
378
+ "type": "object",
379
+ "description": "A2A Agent Card specification"
380
+ }
381
+ }
382
+ }
383
+ }
384
+ }
385
+ }
386
+
387
+ # Add JSON-RPC endpoint
388
+ rpc_path = base_path + "/"
389
+ if rpc_path not in openapi_schema["paths"]:
390
+ openapi_schema["paths"][rpc_path] = {}
391
+
392
+ openapi_schema["paths"][rpc_path]["post"] = {
393
+ "summary": f"{agent_card.name} JSON-RPC Endpoint",
394
+ "description": agent_card.description or f"JSON-RPC endpoint for {agent_card.name}",
395
+ "tags": ["A2A Agent"],
396
+ "requestBody": {
397
+ "required": True,
398
+ "content": {
399
+ "application/json": {
400
+ "schema": {
401
+ "type": "object",
402
+ "description": "JSON-RPC request",
403
+ "properties": {
404
+ "jsonrpc": {"type": "string", "example": "2.0"},
405
+ "method": {"type": "string"},
406
+ "params": {"type": "object"},
407
+ "id": {"type": ["string", "integer", "null"]}
408
+ },
409
+ "required": ["jsonrpc", "method"]
410
+ }
411
+ }
412
+ }
413
+ },
414
+ "responses": {
415
+ "200": {
416
+ "description": "JSON-RPC response",
417
+ "content": {
418
+ "application/json": {
419
+ "schema": {
420
+ "type": "object",
421
+ "description": "JSON-RPC response"
422
+ }
423
+ }
424
+ }
425
+ }
426
+ }
427
+ }
428
+
429
+ # Add extended card endpoint if supported
430
+ if agent_card.supports_authenticated_extended_card:
431
+ extended_card_path = base_path + EXTENDED_AGENT_CARD_PATH
432
+ if extended_card_path not in openapi_schema["paths"]:
433
+ openapi_schema["paths"][extended_card_path] = {}
434
+
435
+ openapi_schema["paths"][extended_card_path]["get"] = {
436
+ "summary": f"Get {agent_card.name} Authenticated Extended Agent Card",
437
+ "description": f"Retrieve the authenticated extended agent card for {agent_card.name}",
438
+ "tags": ["A2A Agent"],
439
+ "security": [{"bearerAuth": []}],
440
+ "responses": {
441
+ "200": {
442
+ "description": "Extended agent card JSON",
443
+ "content": {
444
+ "application/json": {
445
+ "schema": {
446
+ "type": "object",
447
+ "description": "A2A Extended Agent Card specification"
448
+ }
449
+ }
450
+ }
451
+ }
452
+ }
453
+ }
454
+
455
+ fastapi_app.openapi_schema = openapi_schema
@@ -0,0 +1,126 @@
1
+ from __future__ import annotations
2
+ from typing import Callable, Any
3
+ from aiokafka import AIOKafkaConsumer, AIOKafkaProducer, ConsumerRecord
4
+ import asyncio
5
+ import json
6
+ import inspect
7
+ from pydantic import BaseModel
8
+ from loguru import logger
9
+
10
+ class KafkaApp:
11
+ def __init__(self, bootstrap_servers: str = None):
12
+ self.bootstrap_servers = bootstrap_servers
13
+ self._handlers: dict[str, Callable] = {}
14
+ self._consumer_tasks: dict[str, asyncio.Task] = {}
15
+ self._producer: AIOKafkaProducer = None
16
+ self._lock = asyncio.Lock()
17
+ self._running = False
18
+
19
+ def kafka_input(self, topic: str, data_type: type, group_id: str):
20
+ def decorator(func: Callable):
21
+ self._handlers[topic] = (func, data_type, group_id)
22
+ logger.info(f"Registered Kafka input handler for topic '{topic}', data_type: {data_type}")
23
+
24
+ if self._running:
25
+ asyncio.create_task(self._start_consumer(topic, func, data_type, group_id))
26
+ return func
27
+ return decorator
28
+
29
+ def set_bootstrap_servers(self, bootstrap_servers: str) -> None:
30
+ self.bootstrap_servers = bootstrap_servers
31
+
32
+ async def start(self):
33
+ if not self.bootstrap_servers:
34
+ raise ValueError("bootstrap_servers 未设置")
35
+
36
+ logger.info(f"🚀 KafkaApp started with servers: {self.bootstrap_servers}")
37
+
38
+ await self._start_producer()
39
+
40
+ async with self._lock:
41
+ for topic, (handler, data_type, group_id) in self._handlers.items():
42
+ if topic not in self._consumer_tasks:
43
+ self._consumer_tasks[topic] = asyncio.create_task(self._start_consumer(topic, handler, data_type, group_id))
44
+
45
+ self._running = True
46
+ while self._running:
47
+ await asyncio.sleep(1)
48
+
49
+ async def _start_consumer(self, topic: str, handler: Callable, data_type: type, group_id: str):
50
+ consumer = AIOKafkaConsumer(
51
+ topic,
52
+ bootstrap_servers=self.bootstrap_servers,
53
+ group_id=group_id,
54
+ enable_auto_commit=True,
55
+ auto_offset_reset="latest",
56
+ )
57
+ await consumer.start()
58
+ logger.info(f"✅ Started consumer for topic: {topic}")
59
+
60
+ try:
61
+ async for msg in consumer:
62
+ await self._dispatch_message(handler, msg, data_type)
63
+ except asyncio.CancelledError:
64
+ logger.warning(f"🛑 Consumer for {topic} cancelled")
65
+ finally:
66
+ await consumer.stop()
67
+
68
+ async def _dispatch_message(self, handler: Callable, msg: ConsumerRecord, data_type: type):
69
+ try:
70
+ data = data_type()
71
+ data.ParseFromString(msg.value)
72
+ except Exception as e:
73
+ print("Error:", e)
74
+ data = data_type(**json.loads(msg.value.decode("utf-8")))
75
+ result = handler(data)
76
+ if inspect.iscoroutine(result):
77
+ await result
78
+
79
+ async def _start_producer(self):
80
+ if self._producer is None:
81
+ self._producer = AIOKafkaProducer(
82
+ bootstrap_servers=self.bootstrap_servers,
83
+ value_serializer=lambda v: v.SerializeToString(),
84
+ )
85
+ await self._producer.start()
86
+ logger.info("✅ Kafka producer started")
87
+
88
+ async def _stop_producer(self):
89
+ if self._producer is not None:
90
+ await self._producer.stop()
91
+ self._producer = None
92
+ logger.info("✅ Kafka producer stopped")
93
+
94
+ async def send_message(self, topic: str, data_type: type, data: Any) -> None:
95
+ if not self._running:
96
+ raise RuntimeError("KafkaApp is not running. Call start() first.")
97
+
98
+ if self._producer is None:
99
+ raise RuntimeError("Kafka producer is not initialized.")
100
+
101
+ try:
102
+ await self._producer.send_and_wait(topic, data)
103
+ logger.info(f"✅ 已发送消息到 topic '{topic}', type: {data_type}")
104
+
105
+ except Exception as e:
106
+ logger.error(f"❌ 发送消息到 topic '{topic}' 失败: {e}")
107
+ raise
108
+
109
+ async def stop(self):
110
+ logger.info("Stopping KafkaApp ...")
111
+ self._running = False
112
+
113
+ for t in list(self._consumer_tasks.values()):
114
+ t.cancel()
115
+ await asyncio.sleep(0.1)
116
+ self._consumer_tasks.clear()
117
+
118
+ await self._stop_producer()
119
+
120
+ logger.info("✅ KafkaApp stopped")
121
+
122
+ kafka_app = KafkaApp()
123
+
124
+ async def start_kafka_server(bootstrap_servers: str):
125
+ kafka_app.set_bootstrap_servers(bootstrap_servers)
126
+ await kafka_app.start()