sycommon-python-lib 0.1.14__tar.gz → 0.1.16__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sycommon-python-lib might be problematic. Click here for more details.

Files changed (58) hide show
  1. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/PKG-INFO +5 -5
  2. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/pyproject.toml +5 -5
  3. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/src/sycommon/logging/kafka_log.py +12 -5
  4. sycommon_python_lib-0.1.16/src/sycommon/middleware/cors.py +16 -0
  5. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/src/sycommon/middleware/middleware.py +1 -1
  6. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/src/sycommon/rabbitmq/rabbitmq_client.py +11 -3
  7. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/src/sycommon/rabbitmq/rabbitmq_service.py +2 -2
  8. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/src/sycommon/services.py +18 -17
  9. sycommon_python_lib-0.1.16/src/sycommon/sse/event.py +97 -0
  10. sycommon_python_lib-0.1.16/src/sycommon/sse/sse.py +278 -0
  11. sycommon_python_lib-0.1.16/src/sycommon/tools/__init__.py +0 -0
  12. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/src/sycommon_python_lib.egg-info/PKG-INFO +5 -5
  13. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/src/sycommon_python_lib.egg-info/SOURCES.txt +3 -0
  14. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/src/sycommon_python_lib.egg-info/requires.txt +4 -4
  15. sycommon_python_lib-0.1.14/src/sycommon/middleware/cors.py +0 -14
  16. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/README.md +0 -0
  17. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/setup.cfg +0 -0
  18. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/src/command/cli.py +0 -0
  19. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/src/sycommon/__init__.py +0 -0
  20. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/src/sycommon/config/Config.py +0 -0
  21. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/src/sycommon/config/DatabaseConfig.py +0 -0
  22. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/src/sycommon/config/EmbeddingConfig.py +0 -0
  23. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/src/sycommon/config/LLMConfig.py +0 -0
  24. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/src/sycommon/config/MQConfig.py +0 -0
  25. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/src/sycommon/config/RerankerConfig.py +0 -0
  26. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/src/sycommon/config/__init__.py +0 -0
  27. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/src/sycommon/database/base_db_service.py +0 -0
  28. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/src/sycommon/database/database_service.py +0 -0
  29. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/src/sycommon/health/__init__.py +0 -0
  30. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/src/sycommon/health/health_check.py +0 -0
  31. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/src/sycommon/health/ping.py +0 -0
  32. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/src/sycommon/logging/__init__.py +0 -0
  33. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/src/sycommon/logging/logger_wrapper.py +0 -0
  34. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/src/sycommon/middleware/__init__.py +0 -0
  35. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/src/sycommon/middleware/context.py +0 -0
  36. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/src/sycommon/middleware/docs.py +0 -0
  37. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/src/sycommon/middleware/exception.py +0 -0
  38. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/src/sycommon/middleware/monitor_memory.py +0 -0
  39. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/src/sycommon/middleware/mq.py +0 -0
  40. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/src/sycommon/middleware/timeout.py +0 -0
  41. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/src/sycommon/middleware/traceid.py +0 -0
  42. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/src/sycommon/models/__init__.py +0 -0
  43. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/src/sycommon/models/base_http.py +0 -0
  44. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/src/sycommon/models/log.py +0 -0
  45. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/src/sycommon/models/mqlistener_config.py +0 -0
  46. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/src/sycommon/models/mqmsg_model.py +0 -0
  47. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/src/sycommon/models/mqsend_config.py +0 -0
  48. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/src/sycommon/models/sso_user.py +0 -0
  49. {sycommon_python_lib-0.1.14/src/sycommon/synacos → sycommon_python_lib-0.1.16/src/sycommon/sse}/__init__.py +0 -0
  50. {sycommon_python_lib-0.1.14/src/sycommon/tools → sycommon_python_lib-0.1.16/src/sycommon/synacos}/__init__.py +0 -0
  51. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/src/sycommon/synacos/feign.py +0 -0
  52. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/src/sycommon/synacos/nacos_service.py +0 -0
  53. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/src/sycommon/tools/docs.py +0 -0
  54. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/src/sycommon/tools/snowflake.py +0 -0
  55. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/src/sycommon/tools/timing.py +0 -0
  56. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/src/sycommon_python_lib.egg-info/dependency_links.txt +0 -0
  57. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/src/sycommon_python_lib.egg-info/entry_points.txt +0 -0
  58. {sycommon_python_lib-0.1.14 → sycommon_python_lib-0.1.16}/src/sycommon_python_lib.egg-info/top_level.txt +0 -0
@@ -1,23 +1,23 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: sycommon-python-lib
3
- Version: 0.1.14
3
+ Version: 0.1.16
4
4
  Summary: Add your description here
5
5
  Requires-Python: >=3.10
6
6
  Description-Content-Type: text/markdown
7
7
  Requires-Dist: aio-pika>=9.5.7
8
8
  Requires-Dist: aiohttp>=3.12.15
9
9
  Requires-Dist: decorator>=5.2.1
10
- Requires-Dist: fastapi>=0.116.1
10
+ Requires-Dist: fastapi>=0.117.1
11
11
  Requires-Dist: kafka-python>=2.2.15
12
12
  Requires-Dist: loguru>=0.7.3
13
13
  Requires-Dist: mysql-connector-python>=9.4.0
14
14
  Requires-Dist: nacos-sdk-python>=2.0.9
15
- Requires-Dist: pydantic>=2.11.7
15
+ Requires-Dist: pydantic>=2.11.9
16
16
  Requires-Dist: python-dotenv>=1.1.1
17
- Requires-Dist: pyyaml>=6.0.2
17
+ Requires-Dist: pyyaml>=6.0.3
18
18
  Requires-Dist: sqlalchemy>=2.0.43
19
19
  Requires-Dist: uuid>=1.30
20
- Requires-Dist: uvicorn>=0.35.0
20
+ Requires-Dist: uvicorn>=0.37.0
21
21
 
22
22
  # sycommon-python-lib
23
23
 
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "sycommon-python-lib"
3
- version = "0.1.14"
3
+ version = "0.1.16"
4
4
  description = "Add your description here"
5
5
  readme = "README.md"
6
6
  requires-python = ">=3.10"
@@ -8,17 +8,17 @@ dependencies = [
8
8
  "aio-pika>=9.5.7",
9
9
  "aiohttp>=3.12.15",
10
10
  "decorator>=5.2.1",
11
- "fastapi>=0.116.1",
11
+ "fastapi>=0.117.1",
12
12
  "kafka-python>=2.2.15",
13
13
  "loguru>=0.7.3",
14
14
  "mysql-connector-python>=9.4.0",
15
15
  "nacos-sdk-python>=2.0.9",
16
- "pydantic>=2.11.7",
16
+ "pydantic>=2.11.9",
17
17
  "python-dotenv>=1.1.1",
18
- "pyyaml>=6.0.2",
18
+ "pyyaml>=6.0.3",
19
19
  "sqlalchemy>=2.0.43",
20
20
  "uuid>=1.30",
21
- "uvicorn>=0.35.0",
21
+ "uvicorn>=0.37.0",
22
22
  ]
23
23
 
24
24
  [tool.setuptools]
@@ -1,4 +1,5 @@
1
1
  import os
2
+ import pprint
2
3
  import sys
3
4
  import traceback
4
5
  import asyncio
@@ -70,6 +71,8 @@ class KafkaLogger(metaclass=SingletonMeta):
70
71
  buffer_memory=67108864, # 增大缓冲区内存
71
72
  connections_max_idle_ms=540000, # 连接最大空闲时间
72
73
  reconnect_backoff_max_ms=10000, # 增加重连退避最大时间
74
+ max_in_flight_requests_per_connection=1, # 限制单个连接上未确认的请求数量
75
+ # enable_idempotence=True, # 开启幂等性
73
76
  )
74
77
 
75
78
  # 启动后台发送线程
@@ -487,22 +490,26 @@ class SYLogger:
487
490
  logger.info(_log)
488
491
 
489
492
  if os.getenv('DEV-LOG', 'false').lower() == 'true':
490
- print(_log)
493
+ pprint(_log)
491
494
 
492
495
  @staticmethod
493
- def info(msg: any):
496
+ def info(msg: any, *args, **kwargs):
494
497
  SYLogger._log(msg, "INFO")
495
498
 
496
499
  @staticmethod
497
- def warning(msg: any):
500
+ def warning(msg: any, *args, **kwargs):
498
501
  SYLogger._log(msg, "WARNING")
499
502
 
500
503
  @staticmethod
501
- def error(msg: any):
504
+ def debug(msg: any, *args, **kwargs):
505
+ SYLogger._log(msg, "DEBUG")
506
+
507
+ @staticmethod
508
+ def error(msg: any, *args, **kwargs):
502
509
  SYLogger._log(msg, "ERROR")
503
510
 
504
511
  @staticmethod
505
- def exception(msg: any):
512
+ def exception(msg: any, *args, **kwargs):
506
513
  """记录异常信息,包括完整堆栈"""
507
514
  trace_id = SYLogger.get_trace_id()
508
515
 
@@ -0,0 +1,16 @@
1
+ from fastapi.middleware.cors import CORSMiddleware
2
+
3
+
4
+ def setup_cors_handler(app):
5
+ # 允许所有源访问(*)
6
+ # 注意:此时allow_credentials必须为False,否则浏览器会拦截响应
7
+ app.add_middleware(
8
+ CORSMiddleware,
9
+ # allow_origins=["*"], # 允许所有源
10
+ # allow_credentials=False, # 必须为False(与*配合)
11
+ # allow_methods=["*"], # 允许所有HTTP方法
12
+ allow_headers=["*"], # 允许所有请求头
13
+ expose_headers=["*"] # 允许前端访问所有响应头
14
+ )
15
+
16
+ return app
@@ -26,7 +26,7 @@ class Middleware:
26
26
  # app = setup_monitor_memory_middleware(app)
27
27
 
28
28
  # 设置cors
29
- # app = setup_cors_handler(app)
29
+ app = setup_cors_handler(app)
30
30
 
31
31
  # 健康检查
32
32
  app = setup_health_handler(app)
@@ -13,12 +13,13 @@ from aio_pika.abc import (
13
13
  )
14
14
  from aiormq.exceptions import ChannelInvalidStateError, ConnectionClosed
15
15
 
16
+ from sycommon.logging.kafka_log import SYLogger
16
17
  from sycommon.models.mqmsg_model import MQMsgModel
17
18
 
18
19
  # 最大重试次数限制
19
20
  MAX_RETRY_COUNT = 3
20
21
 
21
- logger = logging.getLogger(__name__)
22
+ logger = SYLogger
22
23
 
23
24
 
24
25
  class RabbitMQClient:
@@ -324,6 +325,9 @@ class RabbitMQClient:
324
325
  timeout=self.rpc_timeout
325
326
  )
326
327
 
328
+ # 启用发布确认
329
+ # await self.channel.confirm_delivery()
330
+
327
331
  # 设置预取计数,控制消息公平分发
328
332
  await self.channel.set_qos(prefetch_count=self.prefetch_count)
329
333
 
@@ -667,7 +671,9 @@ class RabbitMQClient:
667
671
  try:
668
672
  await self.exchange.publish(
669
673
  message,
670
- routing_key=routing_key or self.routing_key or '#'
674
+ routing_key=routing_key or self.routing_key or '#',
675
+ mandatory=True,
676
+ timeout=5.0
671
677
  )
672
678
  self._update_activity_timestamp()
673
679
  logger.debug(
@@ -875,7 +881,9 @@ class RabbitMQClient:
875
881
  if self.exchange:
876
882
  await self.exchange.publish(
877
883
  new_message,
878
- routing_key=self.routing_key or '#'
884
+ routing_key=self.routing_key or '#',
885
+ mandatory=True,
886
+ timeout=5.0
879
887
  )
880
888
  self._update_activity_timestamp()
881
889
  logger.info(
@@ -13,7 +13,7 @@ from sycommon.models.sso_user import SsoUser
13
13
  from sycommon.logging.kafka_log import SYLogger
14
14
  from .rabbitmq_client import RabbitMQClient
15
15
 
16
- logger = logging.getLogger(__name__)
16
+ logger = SYLogger
17
17
 
18
18
 
19
19
  class RabbitMQService:
@@ -100,7 +100,7 @@ class RabbitMQService:
100
100
  f"允许创建: {create_if_not_exists}"
101
101
  )
102
102
 
103
- # 关键修改:将逗号分隔的host字符串拆分为集群节点列表
103
+ # 将逗号分隔的host字符串拆分为集群节点列表
104
104
  hosts_str = mq_config.get('host', "")
105
105
  hosts_list = [host.strip()
106
106
  for host in hosts_str.split(',') if host.strip()]
@@ -19,6 +19,7 @@ class Services(metaclass=SingletonMeta):
19
19
  _mq_tasks: List[asyncio.Task] = []
20
20
  _instance: Optional['Services'] = None
21
21
  _app: Optional[FastAPI] = None
22
+ _user_lifespan: Optional[Callable] = None
22
23
 
23
24
  def __init__(self, config: dict, app: FastAPI):
24
25
  if not Services._config:
@@ -54,6 +55,7 @@ class Services(metaclass=SingletonMeta):
54
55
  # 保存应用实例和配置
55
56
  cls._app = app
56
57
  cls._config = config
58
+ cls._user_lifespan = app.router.lifespan_context
57
59
  # 设置文档
58
60
  applications.get_swagger_ui_html = custom_swagger_ui_html
59
61
  applications.get_redoc_html = custom_redoc_html
@@ -71,20 +73,15 @@ class Services(metaclass=SingletonMeta):
71
73
  if database_service:
72
74
  cls._setup_database_static(database_service, config)
73
75
 
74
- # 创建生命周期管理器
76
+ # 创建组合生命周期管理器
75
77
  @asynccontextmanager
76
- async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]:
77
- # 启动阶段 - 执行初始化
78
+ async def combined_lifespan(app: FastAPI) -> AsyncGenerator[None, None]:
79
+ # 1. 执行Services自身的初始化
78
80
  instance = cls(config, app)
79
-
80
- # 检查是否存在监听器
81
81
  has_listeners = bool(
82
82
  rabbitmq_listeners and len(rabbitmq_listeners) > 0)
83
- # 检查是否存在发送器
84
- has_senders = bool(
85
- rabbitmq_senders and len(rabbitmq_senders) > 0)
83
+ has_senders = bool(rabbitmq_senders and len(rabbitmq_senders) > 0)
86
84
 
87
- # 执行MQ异步初始化,传递是否有监听器的标志
88
85
  try:
89
86
  await instance._setup_mq_async(
90
87
  rabbitmq_listeners=rabbitmq_listeners,
@@ -93,22 +90,26 @@ class Services(metaclass=SingletonMeta):
93
90
  has_senders=has_senders
94
91
  )
95
92
  cls._initialized = True
96
- logging.info("所有服务初始化完成")
93
+ logging.info("Services初始化完成")
97
94
  except Exception as e:
98
- logging.error(f"服务初始化失败: {str(e)}", exc_info=True)
95
+ logging.error(f"Services初始化失败: {str(e)}", exc_info=True)
99
96
  raise
100
97
 
101
- # 将实例挂载到app上
102
98
  app.state.services = instance
103
99
 
104
- yield # 应用运行阶段
100
+ # 2. 执行用户定义的生命周期
101
+ if cls._user_lifespan:
102
+ async with cls._user_lifespan(app):
103
+ yield # 应用运行阶段
104
+ else:
105
+ yield # 没有用户生命周期时直接 yield
105
106
 
106
- # 关闭阶段 - 清理资源
107
+ # 3. 执行Services的关闭逻辑
107
108
  await cls.shutdown()
108
- logging.info("所有服务已关闭")
109
+ logging.info("Services已关闭")
109
110
 
110
- # 设置生命周期
111
- app.router.lifespan_context = lifespan
111
+ # 设置组合生命周期
112
+ app.router.lifespan_context = combined_lifespan
112
113
 
113
114
  return app
114
115
 
@@ -0,0 +1,97 @@
1
+ import io
2
+ import re
3
+ import json
4
+ from typing import Optional, Any, Union
5
+
6
+
7
+ class ServerSentEvent:
8
+ """
9
+ Helper class to format data for Server-Sent Events (SSE).
10
+ """
11
+
12
+ _LINE_SEP_EXPR = re.compile(r"\r\n|\r|\n")
13
+ DEFAULT_SEPARATOR = "\r\n"
14
+
15
+ def __init__(
16
+ self,
17
+ data: Optional[Any] = None,
18
+ *,
19
+ event: Optional[str] = None,
20
+ id: Optional[str] = None,
21
+ retry: Optional[int] = None,
22
+ comment: Optional[str] = None,
23
+ sep: Optional[str] = None,
24
+ ) -> None:
25
+ self.data = data
26
+ self.event = event
27
+ self.id = id
28
+ self.retry = retry
29
+ self.comment = comment
30
+ self._sep = sep if sep is not None else self.DEFAULT_SEPARATOR
31
+
32
+ def encode(self) -> bytes:
33
+ buffer = io.StringIO()
34
+ if self.comment is not None:
35
+ for chunk in self._LINE_SEP_EXPR.split(str(self.comment)):
36
+ buffer.write(f": {chunk}{self._sep}")
37
+
38
+ if self.id is not None:
39
+ # Clean newlines in the event id
40
+ buffer.write(
41
+ "id: " + self._LINE_SEP_EXPR.sub("", self.id) + self._sep)
42
+
43
+ if self.event is not None:
44
+ # Clean newlines in the event name
45
+ buffer.write(
46
+ "event: " + self._LINE_SEP_EXPR.sub("", self.event) + self._sep
47
+ )
48
+
49
+ if self.data is not None:
50
+ # Break multi-line data into multiple data: lines
51
+ for chunk in self._LINE_SEP_EXPR.split(str(self.data)):
52
+ buffer.write(f"data: {chunk}{self._sep}")
53
+
54
+ if self.retry is not None:
55
+ if not isinstance(self.retry, int):
56
+ raise TypeError("retry argument must be int")
57
+ buffer.write(f"retry: {self.retry}{self._sep}")
58
+
59
+ buffer.write(self._sep)
60
+ return buffer.getvalue().encode("utf-8")
61
+
62
+
63
+ class JSONServerSentEvent(ServerSentEvent):
64
+ """
65
+ Helper class to format JSON data for Server-Sent Events (SSE).
66
+ """
67
+
68
+ def __init__(
69
+ self,
70
+ data: Optional[Any] = None,
71
+ *args,
72
+ **kwargs,
73
+ ) -> None:
74
+ super().__init__(
75
+ json.dumps(
76
+ data,
77
+ ensure_ascii=False,
78
+ allow_nan=False,
79
+ indent=None,
80
+ separators=(",", ":"),
81
+ )
82
+ if data is not None
83
+ else None,
84
+ *args,
85
+ **kwargs,
86
+ )
87
+
88
+
89
+ def ensure_bytes(data: Union[bytes, dict, ServerSentEvent, Any], sep: str) -> bytes:
90
+ if isinstance(data, bytes):
91
+ return data
92
+ if isinstance(data, ServerSentEvent):
93
+ return data.encode()
94
+ if isinstance(data, dict):
95
+ data["sep"] = sep
96
+ return ServerSentEvent(**data).encode()
97
+ return ServerSentEvent(str(data), sep=sep).encode()
@@ -0,0 +1,278 @@
1
+ import contextvars
2
+ import logging
3
+ from datetime import datetime, timezone
4
+ from typing import (
5
+ Any,
6
+ AsyncIterable,
7
+ Awaitable,
8
+ Callable,
9
+ Coroutine,
10
+ Iterator,
11
+ Mapping,
12
+ Optional,
13
+ Union,
14
+ )
15
+
16
+ import anyio
17
+ from starlette.background import BackgroundTask
18
+ from starlette.concurrency import iterate_in_threadpool
19
+ from starlette.datastructures import MutableHeaders
20
+ from starlette.responses import Response
21
+ from starlette.types import Receive, Scope, Send, Message
22
+
23
+ from sycommon.sse.event import ServerSentEvent, ensure_bytes
24
+
25
+
26
+ logger = logging.getLogger(__name__)
27
+
28
+ # Context variable for exit events per event loop
29
+ _exit_event_context: contextvars.ContextVar[Optional[anyio.Event]] = (
30
+ contextvars.ContextVar("exit_event", default=None)
31
+ )
32
+
33
+
34
+ class SendTimeoutError(TimeoutError):
35
+ pass
36
+
37
+
38
+ class AppStatus:
39
+ """Helper to capture a shutdown signal from Uvicorn so we can gracefully terminate SSE streams."""
40
+
41
+ should_exit = False
42
+ original_handler: Optional[Callable] = None
43
+
44
+ @staticmethod
45
+ def handle_exit(*args, **kwargs):
46
+ # Mark that the app should exit, and signal all waiters in all contexts.
47
+ AppStatus.should_exit = True
48
+
49
+ # Signal the event in current context if it exists
50
+ current_event = _exit_event_context.get(None)
51
+ if current_event is not None:
52
+ current_event.set()
53
+
54
+ if AppStatus.original_handler is not None:
55
+ AppStatus.original_handler(*args, **kwargs)
56
+
57
+ @staticmethod
58
+ def get_or_create_exit_event() -> anyio.Event:
59
+ """Get or create an exit event for the current context."""
60
+ event = _exit_event_context.get(None)
61
+ if event is None:
62
+ event = anyio.Event()
63
+ _exit_event_context.set(event)
64
+ return event
65
+
66
+
67
+ try:
68
+ from uvicorn.main import Server
69
+
70
+ AppStatus.original_handler = Server.handle_exit
71
+ Server.handle_exit = AppStatus.handle_exit # type: ignore
72
+ except ImportError:
73
+ logger.debug(
74
+ "Uvicorn not installed. Graceful shutdown on server termination disabled."
75
+ )
76
+
77
+ Content = Union[str, bytes, dict, ServerSentEvent, Any]
78
+ SyncContentStream = Iterator[Content]
79
+ AsyncContentStream = AsyncIterable[Content]
80
+ ContentStream = Union[AsyncContentStream, SyncContentStream]
81
+
82
+
83
+ class EventSourceResponse(Response):
84
+ """
85
+ Streaming response that sends data conforming to the SSE (Server-Sent Events) specification.
86
+ """
87
+
88
+ DEFAULT_PING_INTERVAL = 15
89
+ DEFAULT_SEPARATOR = "\r\n"
90
+
91
+ def __init__(
92
+ self,
93
+ content: ContentStream,
94
+ status_code: int = 200,
95
+ headers: Optional[Mapping[str, str]] = None,
96
+ media_type: str = "text/event-stream",
97
+ background: Optional[BackgroundTask] = None,
98
+ ping: Optional[int] = None,
99
+ sep: Optional[str] = None,
100
+ ping_message_factory: Optional[Callable[[], ServerSentEvent]] = None,
101
+ data_sender_callable: Optional[
102
+ Callable[[], Coroutine[None, None, None]]
103
+ ] = None,
104
+ send_timeout: Optional[float] = None,
105
+ client_close_handler_callable: Optional[
106
+ Callable[[Message], Awaitable[None]]
107
+ ] = None,
108
+ ) -> None:
109
+ # Validate separator
110
+ if sep not in (None, "\r\n", "\r", "\n"):
111
+ raise ValueError(
112
+ f"sep must be one of: \\r\\n, \\r, \\n, got: {sep}")
113
+ self.sep = sep or self.DEFAULT_SEPARATOR
114
+
115
+ # If content is sync, wrap it for async iteration
116
+ if isinstance(content, AsyncIterable):
117
+ self.body_iterator = content
118
+ else:
119
+ self.body_iterator = iterate_in_threadpool(content)
120
+
121
+ self.status_code = status_code
122
+ self.media_type = self.media_type if media_type is None else media_type
123
+ self.background = background
124
+ self.data_sender_callable = data_sender_callable
125
+ self.send_timeout = send_timeout
126
+
127
+ # Build SSE-specific headers.
128
+ _headers = MutableHeaders()
129
+ if headers is not None: # pragma: no cover
130
+ _headers.update(headers)
131
+
132
+ # "The no-store response directive indicates that any caches of any kind (private or shared)
133
+ # should not store this response."
134
+ # -- https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Cache-Control
135
+ # allow cache control header to be set by user to support fan out proxies
136
+ # https://www.fastly.com/blog/server-sent-events-fastly
137
+
138
+ _headers.setdefault("Cache-Control", "no-store")
139
+ # mandatory for servers-sent events headers
140
+ _headers["Connection"] = "keep-alive"
141
+ _headers["X-Accel-Buffering"] = "no"
142
+ self.init_headers(_headers)
143
+
144
+ self.ping_interval = self.DEFAULT_PING_INTERVAL if ping is None else ping
145
+ self.ping_message_factory = ping_message_factory
146
+
147
+ self.client_close_handler_callable = client_close_handler_callable
148
+
149
+ self.active = True
150
+ # https://github.com/sysid/sse-starlette/pull/55#issuecomment-1732374113
151
+ self._send_lock = anyio.Lock()
152
+
153
+ @property
154
+ def ping_interval(self) -> Union[int, float]:
155
+ return self._ping_interval
156
+
157
+ @ping_interval.setter
158
+ def ping_interval(self, value: Union[int, float]) -> None:
159
+ if not isinstance(value, (int, float)):
160
+ raise TypeError("ping interval must be int")
161
+ if value < 0:
162
+ raise ValueError("ping interval must be greater than 0")
163
+ self._ping_interval = value
164
+
165
+ def enable_compression(self, force: bool = False) -> None:
166
+ raise NotImplementedError(
167
+ "Compression is not supported for SSE streams.")
168
+
169
+ async def _stream_response(self, send: Send) -> None:
170
+ """Send out SSE data to the client as it becomes available in the iterator."""
171
+ await send(
172
+ {
173
+ "type": "http.response.start",
174
+ "status": self.status_code,
175
+ "headers": self.raw_headers,
176
+ }
177
+ )
178
+
179
+ async for data in self.body_iterator:
180
+ chunk = ensure_bytes(data, self.sep)
181
+ logger.debug("chunk: %s", chunk)
182
+ with anyio.move_on_after(self.send_timeout) as cancel_scope:
183
+ await send(
184
+ {"type": "http.response.body", "body": chunk, "more_body": True}
185
+ )
186
+
187
+ if cancel_scope and cancel_scope.cancel_called:
188
+ if hasattr(self.body_iterator, "aclose"):
189
+ await self.body_iterator.aclose()
190
+ raise SendTimeoutError()
191
+
192
+ async with self._send_lock:
193
+ self.active = False
194
+ await send({"type": "http.response.body", "body": b"", "more_body": False})
195
+
196
+ async def _listen_for_disconnect(self, receive: Receive) -> None:
197
+ """Watch for a disconnect message from the client."""
198
+ while self.active:
199
+ message = await receive()
200
+ if message["type"] == "http.disconnect":
201
+ self.active = False
202
+ logger.debug("Got event: http.disconnect. Stop streaming.")
203
+ if self.client_close_handler_callable:
204
+ await self.client_close_handler_callable(message)
205
+ break
206
+
207
+ @staticmethod
208
+ async def _listen_for_exit_signal() -> None:
209
+ """Watch for shutdown signals (e.g. SIGINT, SIGTERM) so we can break the event loop."""
210
+ # Check if should_exit was set before anybody started waiting
211
+ if AppStatus.should_exit:
212
+ return
213
+
214
+ # Get or create context-local exit event
215
+ exit_event = AppStatus.get_or_create_exit_event()
216
+
217
+ # Check if should_exit got set while we set up the event
218
+ if AppStatus.should_exit:
219
+ return
220
+
221
+ await exit_event.wait()
222
+
223
+ async def _ping(self, send: Send) -> None:
224
+ """Periodically send ping messages to keep the connection alive on proxies.
225
+ - frequenccy ca every 15 seconds.
226
+ - Alternatively one can send periodically a comment line (one starting with a ':' character)
227
+ """
228
+ while self.active:
229
+ await anyio.sleep(self._ping_interval)
230
+ sse_ping = (
231
+ self.ping_message_factory()
232
+ if self.ping_message_factory
233
+ else ServerSentEvent(
234
+ comment=f"ping - {datetime.now(timezone.utc)}", sep=self.sep
235
+ )
236
+ )
237
+ ping_bytes = ensure_bytes(sse_ping, self.sep)
238
+ logger.debug("ping: %s", ping_bytes)
239
+
240
+ async with self._send_lock:
241
+ if self.active:
242
+ await send(
243
+ {
244
+ "type": "http.response.body",
245
+ "body": ping_bytes,
246
+ "more_body": True,
247
+ }
248
+ )
249
+
250
+ async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
251
+ """Entrypoint for Starlette's ASGI contract. We spin up tasks:
252
+ - _stream_response to push events
253
+ - _ping to keep the connection alive
254
+ - _listen_for_exit_signal to respond to server shutdown
255
+ - _listen_for_disconnect to respond to client disconnect
256
+ """
257
+ async with anyio.create_task_group() as task_group:
258
+ # https://trio.readthedocs.io/en/latest/reference-core.html#custom-supervisors
259
+ async def cancel_on_finish(coro: Callable[[], Awaitable[None]]):
260
+ await coro()
261
+ task_group.cancel_scope.cancel()
262
+
263
+ task_group.start_soon(
264
+ cancel_on_finish, lambda: self._stream_response(send))
265
+ task_group.start_soon(cancel_on_finish, lambda: self._ping(send))
266
+ task_group.start_soon(
267
+ cancel_on_finish, self._listen_for_exit_signal)
268
+
269
+ if self.data_sender_callable:
270
+ task_group.start_soon(self.data_sender_callable)
271
+
272
+ # Wait for the client to disconnect last
273
+ task_group.start_soon(
274
+ cancel_on_finish, lambda: self._listen_for_disconnect(receive)
275
+ )
276
+
277
+ if self.background is not None:
278
+ await self.background()
@@ -1,23 +1,23 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: sycommon-python-lib
3
- Version: 0.1.14
3
+ Version: 0.1.16
4
4
  Summary: Add your description here
5
5
  Requires-Python: >=3.10
6
6
  Description-Content-Type: text/markdown
7
7
  Requires-Dist: aio-pika>=9.5.7
8
8
  Requires-Dist: aiohttp>=3.12.15
9
9
  Requires-Dist: decorator>=5.2.1
10
- Requires-Dist: fastapi>=0.116.1
10
+ Requires-Dist: fastapi>=0.117.1
11
11
  Requires-Dist: kafka-python>=2.2.15
12
12
  Requires-Dist: loguru>=0.7.3
13
13
  Requires-Dist: mysql-connector-python>=9.4.0
14
14
  Requires-Dist: nacos-sdk-python>=2.0.9
15
- Requires-Dist: pydantic>=2.11.7
15
+ Requires-Dist: pydantic>=2.11.9
16
16
  Requires-Dist: python-dotenv>=1.1.1
17
- Requires-Dist: pyyaml>=6.0.2
17
+ Requires-Dist: pyyaml>=6.0.3
18
18
  Requires-Dist: sqlalchemy>=2.0.43
19
19
  Requires-Dist: uuid>=1.30
20
- Requires-Dist: uvicorn>=0.35.0
20
+ Requires-Dist: uvicorn>=0.37.0
21
21
 
22
22
  # sycommon-python-lib
23
23
 
@@ -37,6 +37,9 @@ src/sycommon/models/mqsend_config.py
37
37
  src/sycommon/models/sso_user.py
38
38
  src/sycommon/rabbitmq/rabbitmq_client.py
39
39
  src/sycommon/rabbitmq/rabbitmq_service.py
40
+ src/sycommon/sse/__init__.py
41
+ src/sycommon/sse/event.py
42
+ src/sycommon/sse/sse.py
40
43
  src/sycommon/synacos/__init__.py
41
44
  src/sycommon/synacos/feign.py
42
45
  src/sycommon/synacos/nacos_service.py
@@ -1,14 +1,14 @@
1
1
  aio-pika>=9.5.7
2
2
  aiohttp>=3.12.15
3
3
  decorator>=5.2.1
4
- fastapi>=0.116.1
4
+ fastapi>=0.117.1
5
5
  kafka-python>=2.2.15
6
6
  loguru>=0.7.3
7
7
  mysql-connector-python>=9.4.0
8
8
  nacos-sdk-python>=2.0.9
9
- pydantic>=2.11.7
9
+ pydantic>=2.11.9
10
10
  python-dotenv>=1.1.1
11
- pyyaml>=6.0.2
11
+ pyyaml>=6.0.3
12
12
  sqlalchemy>=2.0.43
13
13
  uuid>=1.30
14
- uvicorn>=0.35.0
14
+ uvicorn>=0.37.0
@@ -1,14 +0,0 @@
1
- from fastapi.middleware.cors import CORSMiddleware
2
-
3
-
4
- def setup_cors_handler(app):
5
- app.add_middleware(
6
- CORSMiddleware,
7
- allow_origins=["*"],
8
- allow_credentials=True,
9
- allow_methods=["*"],
10
- allow_headers=["*"],
11
- expose_headers=["*"],
12
- )
13
-
14
- return app