toms-fast 0.2.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. toms_fast-0.2.1.dist-info/METADATA +467 -0
  2. toms_fast-0.2.1.dist-info/RECORD +60 -0
  3. toms_fast-0.2.1.dist-info/WHEEL +4 -0
  4. toms_fast-0.2.1.dist-info/entry_points.txt +2 -0
  5. tomskit/__init__.py +0 -0
  6. tomskit/celery/README.md +693 -0
  7. tomskit/celery/__init__.py +4 -0
  8. tomskit/celery/celery.py +306 -0
  9. tomskit/celery/config.py +377 -0
  10. tomskit/cli/__init__.py +207 -0
  11. tomskit/cli/__main__.py +8 -0
  12. tomskit/cli/scaffold.py +123 -0
  13. tomskit/cli/templates/__init__.py +42 -0
  14. tomskit/cli/templates/base.py +348 -0
  15. tomskit/cli/templates/celery.py +101 -0
  16. tomskit/cli/templates/extensions.py +213 -0
  17. tomskit/cli/templates/fastapi.py +400 -0
  18. tomskit/cli/templates/migrations.py +281 -0
  19. tomskit/cli/templates_config.py +122 -0
  20. tomskit/logger/README.md +466 -0
  21. tomskit/logger/__init__.py +4 -0
  22. tomskit/logger/config.py +106 -0
  23. tomskit/logger/logger.py +290 -0
  24. tomskit/py.typed +0 -0
  25. tomskit/redis/README.md +462 -0
  26. tomskit/redis/__init__.py +6 -0
  27. tomskit/redis/config.py +85 -0
  28. tomskit/redis/redis_pool.py +87 -0
  29. tomskit/redis/redis_sync.py +66 -0
  30. tomskit/server/__init__.py +47 -0
  31. tomskit/server/config.py +117 -0
  32. tomskit/server/exceptions.py +412 -0
  33. tomskit/server/middleware.py +371 -0
  34. tomskit/server/parser.py +312 -0
  35. tomskit/server/resource.py +464 -0
  36. tomskit/server/server.py +276 -0
  37. tomskit/server/type.py +263 -0
  38. tomskit/sqlalchemy/README.md +590 -0
  39. tomskit/sqlalchemy/__init__.py +20 -0
  40. tomskit/sqlalchemy/config.py +125 -0
  41. tomskit/sqlalchemy/database.py +125 -0
  42. tomskit/sqlalchemy/pagination.py +359 -0
  43. tomskit/sqlalchemy/property.py +19 -0
  44. tomskit/sqlalchemy/sqlalchemy.py +131 -0
  45. tomskit/sqlalchemy/types.py +32 -0
  46. tomskit/task/README.md +67 -0
  47. tomskit/task/__init__.py +4 -0
  48. tomskit/task/task_manager.py +124 -0
  49. tomskit/tools/README.md +63 -0
  50. tomskit/tools/__init__.py +18 -0
  51. tomskit/tools/config.py +70 -0
  52. tomskit/tools/warnings.py +37 -0
  53. tomskit/tools/woker.py +81 -0
  54. tomskit/utils/README.md +666 -0
  55. tomskit/utils/README_SERIALIZER.md +644 -0
  56. tomskit/utils/__init__.py +35 -0
  57. tomskit/utils/fields.py +434 -0
  58. tomskit/utils/marshal_utils.py +137 -0
  59. tomskit/utils/response_utils.py +13 -0
  60. tomskit/utils/serializers.py +447 -0
@@ -0,0 +1,306 @@
1
+ """
2
+ Celery 异步任务支持模块
3
+ 支持异步数据库、异步访问 Redis
4
+ 支持 trace_id 追踪,自动从任务参数或 headers 中提取并设置
5
+ """
6
+ import asyncio
7
+ import typing as t
8
+ import uuid
9
+ from contextvars import ContextVar
10
+
11
+ from celery import Celery
12
+ from celery.signals import task_prerun, task_postrun
13
+
14
+ from tomskit.sqlalchemy.database import db
15
+ from tomskit.redis.redis_pool import redis_client
16
+ from tomskit.logger import set_app_trace_id
17
+
18
+ # Celery 应用上下文变量
19
+ celery_context: ContextVar[t.Optional["AsyncCelery"]] = ContextVar(
20
+ "tomskit_celery_context_runtime", default=None
21
+ )
22
+
23
+
24
+ class AsyncCelery(Celery):
25
+ """
26
+ 异步 Celery 应用类,继承自 Celery,负责配置管理。
27
+
28
+ 该类不自动初始化资源,资源初始化应该由业务代码在 worker 启动时完成。
29
+
30
+ Example:
31
+ ```python
32
+ from tomskit.celery import AsyncCelery
33
+
34
+ celery_app = AsyncCelery(
35
+ 'myapp',
36
+ broker='redis://localhost:6379/0',
37
+ backend='redis://localhost:6379/0'
38
+ )
39
+
40
+ celery_app.from_mapping(
41
+ SQLALCHEMY_DATABASE_URI='mysql+aiomysql://user:pass@localhost/db',
42
+ SQLALCHEMY_ENGINE_OPTIONS={...},
43
+ REDIS_HOST='localhost', # 注意:这是给 celery_app.config 用的键名
44
+ REDIS_PORT=6379,
45
+ )
46
+ ```
47
+ """
48
+
49
+ def __init__(self, *args: t.Any, **kwargs: t.Any) -> None:
50
+ super().__init__(*args, **kwargs)
51
+ self.config: dict[str, t.Any] = {}
52
+ self.app_root_path: t.Optional[str] = None
53
+ celery_context.set(self)
54
+
55
+ # 注册信号处理器,自动处理 trace_id
56
+ self._setup_trace_id_support()
57
+
58
+ def set_app_root_path(self, app_root_path: str) -> None:
59
+ """
60
+ 设置应用根路径。
61
+
62
+ Args:
63
+ app_root_path: 应用根路径
64
+ """
65
+ self.app_root_path = app_root_path
66
+
67
+ def from_mapping(
68
+ self, mapping: t.Mapping[str, t.Any] | None = None, **kwargs: t.Any
69
+ ) -> bool:
70
+ """
71
+ 从映射或关键字参数中加载配置。
72
+ 只提取大写的配置项。
73
+
74
+ Args:
75
+ mapping: 配置映射
76
+ **kwargs: 配置关键字参数
77
+
78
+ Returns:
79
+ True 表示成功
80
+ """
81
+ mappings: dict[str, t.Any] = {}
82
+ if mapping is not None:
83
+ mappings.update(mapping)
84
+ mappings.update(kwargs)
85
+ for key, value in mappings.items():
86
+ if key.isupper():
87
+ self.config[key] = value
88
+ return True
89
+
90
+ def _setup_trace_id_support(self) -> None:
91
+ """
92
+ 设置 trace_id 支持。
93
+
94
+ 注册 Celery 信号处理器,在任务执行前自动从任务参数或 headers 中提取 trace_id
95
+ 并设置到日志上下文中,确保任务日志包含 trace_id。
96
+
97
+ 该方法只需要执行一次(在 AsyncCelery 初始化时),之后每个任务执行前
98
+ 信号处理器会自动调用。
99
+ """
100
+ def set_trace_id_from_task(
101
+ sender=None,
102
+ task_id=None,
103
+ task=None,
104
+ args=None,
105
+ kwargs=None,
106
+ request=None,
107
+ **kwds
108
+ ):
109
+ """
110
+ 在任务执行前设置 trace_id。
111
+
112
+ 按以下优先级提取 trace_id:
113
+ 1. 从任务 kwargs 中的 'trace_id' 参数
114
+ 2. 从任务 kwargs 中的 'request_id' 参数(兼容性)
115
+ 3. 从任务 request.headers 中的 'trace_id' 或 'X-Request-ID'
116
+ 4. 如果都没有(定时任务),生成新的 UUID 或使用任务 ID
117
+
118
+ Args:
119
+ sender: 任务对象(Task 实例)
120
+ task_id: 任务 ID
121
+ task: 任务函数对象
122
+ args: 任务位置参数
123
+ kwargs: 任务关键字参数
124
+ request: 任务请求对象(Celery 提供)
125
+ """
126
+ trace_id = None
127
+
128
+ # 1. 从 kwargs 中提取(手动调用时可能传入)
129
+ if kwargs:
130
+ trace_id = kwargs.get('trace_id') or kwargs.get('request_id')
131
+
132
+ # 2. 从 request.headers 中提取(使用 apply_async 时可能传入)
133
+ if not trace_id and request and hasattr(request, 'headers'):
134
+ headers = request.headers or {}
135
+ trace_id = (
136
+ headers.get('trace_id') or
137
+ headers.get('X-Request-ID') or
138
+ headers.get('X-Trace-ID')
139
+ )
140
+
141
+ # 3. 如果没有 trace_id(定时任务的情况),生成新的 UUID
142
+ if not trace_id:
143
+ # 为定时任务生成新的 trace_id,便于追踪
144
+ trace_id = str(uuid.uuid4())
145
+
146
+ # 设置 trace_id 到日志上下文变量
147
+ set_app_trace_id(str(trace_id))
148
+
149
+ def clear_trace_id_after_task(sender=None, **kwds):
150
+ """
151
+ 任务执行后清理 trace_id(可选,保持上下文干净)。
152
+
153
+ 注意:由于 ContextVar 是线程/协程安全的,每个任务都有独立的上下文,
154
+ 清理不是必须的,但可以保持代码的清晰性。
155
+ """
156
+ # 重置为默认值
157
+ set_app_trace_id("-")
158
+
159
+ # 绑定信号处理器到当前应用实例
160
+ # 只注册一次,之后每个任务执行前会自动触发
161
+ task_prerun.connect(set_trace_id_from_task, sender=self)
162
+ task_postrun.connect(clear_trace_id_after_task, sender=self)
163
+
164
+
165
+ class AsyncTaskRunner:
166
+ """
167
+ 异步任务运行器,用于在 Celery 任务中执行异步函数。
168
+
169
+ 只负责:
170
+ 1. 运行异步函数
171
+ 2. 自动创建/关闭数据库 session(如果启用)
172
+
173
+ 前提:数据库连接池和 Redis 客户端应该由业务代码在 worker 启动时初始化。
174
+
175
+ Parameters:
176
+ async_task: 异步任务函数(必须是协程函数)
177
+ use_db: 是否启用数据库 session 管理,默认为 True
178
+ use_redis: 是否检查 Redis 客户端,默认为 False(仅检查,不管理)
179
+
180
+ Example:
181
+ ```python
182
+ from celery.signals import worker_process_init
183
+ from tomskit.celery import AsyncCelery, AsyncTaskRunner
184
+ from tomskit.sqlalchemy.database import db
185
+ from tomskit.redis.redis_pool import RedisClientWrapper
186
+
187
+ celery_app = AsyncCelery(...)
188
+
189
+ @worker_process_init.connect
190
+ def init_worker(sender=None, **kwargs):
191
+ # 初始化数据库连接池
192
+ db.initialize_session_pool(
193
+ celery_app.config["SQLALCHEMY_DATABASE_URI"],
194
+ celery_app.config.get("SQLALCHEMY_ENGINE_OPTIONS", {})
195
+ )
196
+ # 初始化 Redis(如果需要)
197
+ RedisClientWrapper.initialize(redis_config)
198
+
199
+ @celery_app.task
200
+ def my_task():
201
+ runner = AsyncTaskRunner(async_my_task, use_db=True, use_redis=True)
202
+ return runner.run()
203
+
204
+ async def async_my_task():
205
+ # 直接使用 db.session 和 redis_client
206
+ user = await db.session.get(User, 1)
207
+ await redis_client.set("key", "value")
208
+ return "success"
209
+ ```
210
+ """
211
+
212
+ def __init__(
213
+ self,
214
+ async_task: t.Callable[..., t.Awaitable[t.Any]],
215
+ use_db: bool = True,
216
+ use_redis: bool = False
217
+ ):
218
+ """
219
+ 初始化异步任务运行器。
220
+
221
+ Args:
222
+ async_task: 异步任务函数(必须是协程函数)
223
+ use_db: 是否启用数据库 session 管理,默认为 True
224
+ use_redis: 是否检查 Redis 客户端,默认为 False
225
+
226
+ Raises:
227
+ RuntimeError: 如果 Celery 应用未初始化,或异步任务不是协程函数
228
+ """
229
+ self.__async_task = async_task
230
+ self.__current_celery_app = celery_context.get()
231
+ self.__use_db = use_db
232
+ self.__use_redis = use_redis
233
+
234
+ if self.__current_celery_app is None:
235
+ raise RuntimeError(
236
+ "Celery app is not initialized. "
237
+ "Please ensure AsyncCelery is created first."
238
+ )
239
+
240
+ if not asyncio.iscoroutinefunction(self.__async_task):
241
+ raise RuntimeError(
242
+ "async_task must be an asynchronous function (coroutine function)"
243
+ )
244
+
245
+ def run(self, *args: t.Any, **kwargs: t.Any) -> t.Any:
246
+ """
247
+ 运行异步任务。
248
+
249
+ 使用 asyncio.run 在新的事件循环中执行异步任务,并自动管理数据库 session。
250
+
251
+ Args:
252
+ *args: 传递给异步任务的位置参数
253
+ **kwargs: 传递给异步任务的关键字参数
254
+
255
+ Returns:
256
+ 异步任务的返回值
257
+ """
258
+ return asyncio.run(self._run(*args, **kwargs))
259
+
260
+ async def _run(self, *args: t.Any, **kwargs: t.Any) -> t.Any:
261
+ """
262
+ 执行任务,自动管理 session 生命周期。
263
+
264
+ 注意:
265
+ - 数据库连接池应该在 worker 启动时由业务代码初始化
266
+ - Redis 客户端应该在 worker 启动时由业务代码初始化
267
+
268
+ Args:
269
+ *args: 传递给异步任务的位置参数
270
+ **kwargs: 传递给异步任务的关键字参数
271
+
272
+ Returns:
273
+ 异步任务的返回值
274
+
275
+ Raises:
276
+ RuntimeError: 如果数据库连接池或 Redis 客户端未初始化
277
+ """
278
+ # 创建数据库 session(如果需要)
279
+ db_session = None
280
+ if self.__use_db:
281
+ # 检查连接池是否已初始化
282
+ if not hasattr(db, '_engine') or db._engine is None:
283
+ raise RuntimeError(
284
+ "Database connection pool is not initialized. "
285
+ "Please initialize it in worker_process_init signal handler using: "
286
+ "db.initialize_session_pool(db_uri, engine_options)"
287
+ )
288
+ db_session = db.create_session()
289
+
290
+ # 检查 Redis 是否已初始化(如果需要)
291
+ if self.__use_redis:
292
+ if not hasattr(redis_client, '_client') or redis_client._client is None:
293
+ raise RuntimeError(
294
+ "Redis client is not initialized. "
295
+ "Please initialize it in worker_process_init signal handler using: "
296
+ "RedisClientWrapper.initialize(redis_config)"
297
+ )
298
+
299
+ try:
300
+ # 执行任务
301
+ result = await self.__async_task(*args, **kwargs)
302
+ return result
303
+ finally:
304
+ # 关闭 session(不关闭连接池)
305
+ if db_session:
306
+ await db.close_session(db_session)
@@ -0,0 +1,377 @@
1
+ from typing import Any, Optional
2
+ from urllib.parse import quote_plus
3
+
4
+ from pydantic import Field, NonNegativeInt, PositiveInt, computed_field
5
+ from pydantic_settings import BaseSettings
6
+
7
+
8
+ class CeleryConfig(BaseSettings):
9
+ """
10
+ Celery 配置类
11
+
12
+ 所有配置项统一以 CELERY_ 开头,便于管理和识别。
13
+ 支持:
14
+ - Redis 作为 broker 和 backend
15
+ - 数据库作为结果后端
16
+ - 所有标准 Celery 配置选项
17
+ """
18
+
19
+ # ========== Redis Broker Configuration ==========
20
+ CELERY_BROKER_REDIS_HOST: str = Field(
21
+ description="Redis host for Celery broker",
22
+ default="localhost",
23
+ )
24
+
25
+ CELERY_BROKER_REDIS_PORT: PositiveInt = Field(
26
+ description="Redis port for Celery broker",
27
+ default=6379,
28
+ )
29
+
30
+ CELERY_BROKER_REDIS_USERNAME: Optional[str] = Field(
31
+ description="Redis username for Celery broker (if required)",
32
+ default=None,
33
+ )
34
+
35
+ CELERY_BROKER_REDIS_PASSWORD: Optional[str] = Field(
36
+ description="Redis password for Celery broker (if required)",
37
+ default=None,
38
+ )
39
+
40
+ CELERY_BROKER_REDIS_DB: NonNegativeInt = Field(
41
+ description="Redis database number for Celery broker",
42
+ default=0,
43
+ )
44
+
45
+ # ========== Result Backend Configuration ==========
46
+ CELERY_RESULT_BACKEND_TYPE: str = Field(
47
+ description="Result backend type: 'redis' or 'database'",
48
+ default="redis",
49
+ )
50
+
51
+ # Redis Backend Configuration (if CELERY_RESULT_BACKEND_TYPE='redis')
52
+ CELERY_RESULT_BACKEND_REDIS_HOST: str = Field(
53
+ description="Redis host for Celery result backend",
54
+ default="localhost",
55
+ )
56
+
57
+ CELERY_RESULT_BACKEND_REDIS_PORT: PositiveInt = Field(
58
+ description="Redis port for Celery result backend",
59
+ default=6379,
60
+ )
61
+
62
+ CELERY_RESULT_BACKEND_REDIS_USERNAME: Optional[str] = Field(
63
+ description="Redis username for Celery result backend (if required)",
64
+ default=None,
65
+ )
66
+
67
+ CELERY_RESULT_BACKEND_REDIS_PASSWORD: Optional[str] = Field(
68
+ description="Redis password for Celery result backend (if required)",
69
+ default=None,
70
+ )
71
+
72
+ CELERY_RESULT_BACKEND_REDIS_DB: NonNegativeInt = Field(
73
+ description="Redis database number for Celery result backend",
74
+ default=1, # Different from broker DB by default
75
+ )
76
+
77
+ # Database Backend Configuration (if CELERY_RESULT_BACKEND_TYPE='database')
78
+ # Uses the same database config as SQLAlchemy
79
+ CELERY_RESULT_BACKEND_DATABASE_URI_SCHEME: str = Field(
80
+ description="Database URI scheme for Celery result backend (e.g., 'mysql', 'postgresql')",
81
+ default="mysql",
82
+ )
83
+
84
+ # ========== Celery Task Configuration ==========
85
+ CELERY_TASK_SERIALIZER: str = Field(
86
+ description="Task serialization format",
87
+ default="json",
88
+ )
89
+
90
+ CELERY_RESULT_SERIALIZER: str = Field(
91
+ description="Result serialization format",
92
+ default="json",
93
+ )
94
+
95
+ CELERY_ACCEPT_CONTENT: list[str] = Field(
96
+ description="Accepted content types",
97
+ default=["json"],
98
+ )
99
+
100
+ CELERY_TIMEZONE: str = Field(
101
+ description="Celery timezone",
102
+ default="UTC",
103
+ )
104
+
105
+ CELERY_ENABLE_UTC: bool = Field(
106
+ description="Enable UTC timezone",
107
+ default=True,
108
+ )
109
+
110
+ CELERY_TASK_TRACK_STARTED: bool = Field(
111
+ description="Track task started state",
112
+ default=True,
113
+ )
114
+
115
+ CELERY_TASK_TIME_LIMIT: Optional[NonNegativeInt] = Field(
116
+ description="Hard time limit for tasks in seconds",
117
+ default=None,
118
+ )
119
+
120
+ CELERY_TASK_SOFT_TIME_LIMIT: Optional[NonNegativeInt] = Field(
121
+ description="Soft time limit for tasks in seconds",
122
+ default=None,
123
+ )
124
+
125
+ CELERY_TASK_IGNORE_RESULT: bool = Field(
126
+ description="Ignore task results by default",
127
+ default=False,
128
+ )
129
+
130
+ CELERY_RESULT_EXPIRES: Optional[NonNegativeInt] = Field(
131
+ description="Result expiration time in seconds",
132
+ default=None,
133
+ )
134
+
135
+ # ========== Database Configuration (for worker and result backend) ==========
136
+ CELERY_DB_HOST: str = Field(
137
+ description="数据库主机地址(用于 worker 和结果后端)",
138
+ default="localhost",
139
+ )
140
+
141
+ CELERY_DB_PORT: PositiveInt = Field(
142
+ description="数据库端口(用于 worker 和结果后端)",
143
+ default=5432,
144
+ )
145
+
146
+ CELERY_DB_USERNAME: str = Field(
147
+ description="数据库用户名(用于 worker 和结果后端)",
148
+ default="",
149
+ )
150
+
151
+ CELERY_DB_PASSWORD: str = Field(
152
+ description="数据库密码(用于 worker 和结果后端)",
153
+ default="",
154
+ )
155
+
156
+ CELERY_DB_DATABASE: str = Field(
157
+ description="数据库名称(用于 worker 和结果后端)",
158
+ default="tomskitdb",
159
+ )
160
+
161
+ CELERY_DB_CHARSET: str = Field(
162
+ description="数据库字符集(用于 worker 和结果后端)",
163
+ default="",
164
+ )
165
+
166
+ CELERY_DB_EXTRAS: str = Field(
167
+ description="数据库额外参数(用于 worker 和结果后端)。示例: keepalives_idle=60&keepalives=1",
168
+ default="",
169
+ )
170
+
171
+ CELERY_SQLALCHEMY_DATABASE_URI_SCHEME: str = Field(
172
+ description="SQLAlchemy 异步数据库 URI 协议(用于 worker)",
173
+ default="mysql+aiomysql",
174
+ )
175
+
176
+ CELERY_SQLALCHEMY_DATABASE_SYNC_URI_SCHEME: str = Field(
177
+ description="SQLAlchemy 同步数据库 URI 协议(用于 worker)",
178
+ default="mysql+pymysql",
179
+ )
180
+
181
+ CELERY_SQLALCHEMY_POOL_SIZE: NonNegativeInt = Field(
182
+ description="SQLAlchemy 连接池大小(用于 worker)",
183
+ default=300,
184
+ )
185
+
186
+ CELERY_SQLALCHEMY_MAX_OVERFLOW: NonNegativeInt = Field(
187
+ description="SQLAlchemy 最大溢出连接数(用于 worker)",
188
+ default=10,
189
+ )
190
+
191
+ CELERY_SQLALCHEMY_POOL_RECYCLE: NonNegativeInt = Field(
192
+ description="SQLAlchemy 连接池回收时间(秒,用于 worker)",
193
+ default=3600,
194
+ )
195
+
196
+ CELERY_SQLALCHEMY_POOL_PRE_PING: bool = Field(
197
+ description="启用 SQLAlchemy 连接池预检查(用于 worker)",
198
+ default=False,
199
+ )
200
+
201
+ CELERY_SQLALCHEMY_ECHO: bool = Field(
202
+ description="启用 SQLAlchemy SQL 回显(用于 worker)",
203
+ default=False,
204
+ )
205
+
206
+ CELERY_SQLALCHEMY_POOL_ECHO: bool = Field(
207
+ description="启用 SQLAlchemy 连接池回显(用于 worker)",
208
+ default=False,
209
+ )
210
+
211
+ # ========== Redis Configuration (for worker) ==========
212
+ CELERY_WORKER_REDIS_HOST: str = Field(
213
+ description="Redis 主机地址(用于 worker)",
214
+ default="localhost",
215
+ )
216
+
217
+ CELERY_WORKER_REDIS_PORT: PositiveInt = Field(
218
+ description="Redis 端口(用于 worker)",
219
+ default=6379,
220
+ )
221
+
222
+ CELERY_WORKER_REDIS_USERNAME: Optional[str] = Field(
223
+ description="Redis 用户名(用于 worker,可选)",
224
+ default=None,
225
+ )
226
+
227
+ CELERY_WORKER_REDIS_PASSWORD: Optional[str] = Field(
228
+ description="Redis 密码(用于 worker,可选)",
229
+ default=None,
230
+ )
231
+
232
+ CELERY_WORKER_REDIS_DB: NonNegativeInt = Field(
233
+ description="Redis 数据库编号(用于 worker)",
234
+ default=0,
235
+ )
236
+
237
+ # ========== Computed Properties ==========
238
+
239
+ @computed_field # type: ignore
240
+ @property
241
+ def CELERY_BROKER_URL(self) -> str:
242
+ """Generate Redis broker URL"""
243
+ auth = ""
244
+ if self.CELERY_BROKER_REDIS_USERNAME or self.CELERY_BROKER_REDIS_PASSWORD:
245
+ username = quote_plus(self.CELERY_BROKER_REDIS_USERNAME or "")
246
+ password = quote_plus(self.CELERY_BROKER_REDIS_PASSWORD or "")
247
+ auth = f"{username}:{password}@"
248
+ return f"redis://{auth}{self.CELERY_BROKER_REDIS_HOST}:{self.CELERY_BROKER_REDIS_PORT}/{self.CELERY_BROKER_REDIS_DB}"
249
+
250
+ @computed_field # type: ignore
251
+ @property
252
+ def CELERY_RESULT_BACKEND(self) -> str:
253
+ """生成结果后端 URL"""
254
+ if self.CELERY_RESULT_BACKEND_TYPE == "database":
255
+ # 数据库后端
256
+ db_extras = (
257
+ f"{self.CELERY_DB_EXTRAS}&client_encoding={self.CELERY_DB_CHARSET}"
258
+ if self.CELERY_DB_CHARSET
259
+ else self.CELERY_DB_EXTRAS
260
+ ).strip("&")
261
+ db_extras = f"?{db_extras}" if db_extras else ""
262
+
263
+ username = quote_plus(self.CELERY_DB_USERNAME)
264
+ password = quote_plus(self.CELERY_DB_PASSWORD)
265
+ return (
266
+ f"db+{self.CELERY_RESULT_BACKEND_DATABASE_URI_SCHEME}://"
267
+ f"{username}:{password}@{self.CELERY_DB_HOST}:{self.CELERY_DB_PORT}/{self.CELERY_DB_DATABASE}"
268
+ f"{db_extras}"
269
+ )
270
+ else:
271
+ # Redis backend
272
+ auth = ""
273
+ if (
274
+ self.CELERY_RESULT_BACKEND_REDIS_USERNAME
275
+ or self.CELERY_RESULT_BACKEND_REDIS_PASSWORD
276
+ ):
277
+ username = quote_plus(
278
+ self.CELERY_RESULT_BACKEND_REDIS_USERNAME or ""
279
+ )
280
+ password = quote_plus(
281
+ self.CELERY_RESULT_BACKEND_REDIS_PASSWORD or ""
282
+ )
283
+ auth = f"{username}:{password}@"
284
+ return (
285
+ f"redis://{auth}{self.CELERY_RESULT_BACKEND_REDIS_HOST}:"
286
+ f"{self.CELERY_RESULT_BACKEND_REDIS_PORT}/{self.CELERY_RESULT_BACKEND_REDIS_DB}"
287
+ )
288
+
289
+ @computed_field # type: ignore
290
+ @property
291
+ def SQLALCHEMY_DATABASE_URI(self) -> str:
292
+ """生成 SQLAlchemy 异步数据库 URI"""
293
+ db_extras = (
294
+ f"{self.CELERY_DB_EXTRAS}&client_encoding={self.CELERY_DB_CHARSET}"
295
+ if self.CELERY_DB_CHARSET
296
+ else self.CELERY_DB_EXTRAS
297
+ ).strip("&")
298
+ db_extras = f"?{db_extras}" if db_extras else ""
299
+ username = quote_plus(self.CELERY_DB_USERNAME)
300
+ password = quote_plus(self.CELERY_DB_PASSWORD)
301
+ return (
302
+ f"{self.CELERY_SQLALCHEMY_DATABASE_URI_SCHEME}://"
303
+ f"{username}:{password}@{self.CELERY_DB_HOST}:{self.CELERY_DB_PORT}/{self.CELERY_DB_DATABASE}"
304
+ f"{db_extras}"
305
+ )
306
+
307
+ @computed_field # type: ignore
308
+ @property
309
+ def SQLALCHEMY_DATABASE_SYNC_URI(self) -> str:
310
+ """生成 SQLAlchemy 同步数据库 URI"""
311
+ db_extras = (
312
+ f"{self.CELERY_DB_EXTRAS}&client_encoding={self.CELERY_DB_CHARSET}"
313
+ if self.CELERY_DB_CHARSET
314
+ else self.CELERY_DB_EXTRAS
315
+ ).strip("&")
316
+ db_extras = f"?{db_extras}" if db_extras else ""
317
+ username = quote_plus(self.CELERY_DB_USERNAME)
318
+ password = quote_plus(self.CELERY_DB_PASSWORD)
319
+ return (
320
+ f"{self.CELERY_SQLALCHEMY_DATABASE_SYNC_URI_SCHEME}://"
321
+ f"{username}:{password}@{self.CELERY_DB_HOST}:{self.CELERY_DB_PORT}/{self.CELERY_DB_DATABASE}"
322
+ f"{db_extras}"
323
+ )
324
+
325
+ @computed_field # type: ignore
326
+ @property
327
+ def SQLALCHEMY_ENGINE_OPTIONS(self) -> dict[str, Any]:
328
+ """生成 SQLAlchemy 引擎选项"""
329
+ return {
330
+ "pool_size": self.CELERY_SQLALCHEMY_POOL_SIZE,
331
+ "max_overflow": self.CELERY_SQLALCHEMY_MAX_OVERFLOW,
332
+ "pool_recycle": self.CELERY_SQLALCHEMY_POOL_RECYCLE,
333
+ "pool_pre_ping": self.CELERY_SQLALCHEMY_POOL_PRE_PING,
334
+ "echo": self.CELERY_SQLALCHEMY_ECHO,
335
+ "echo_pool": self.CELERY_SQLALCHEMY_POOL_ECHO,
336
+ }
337
+
338
+ def get_celery_config_dict(self) -> dict[str, Any]:
339
+ """
340
+ 获取 Celery 配置字典,用于 from_mapping
341
+
342
+ Returns:
343
+ 包含所有 Celery 配置选项的字典
344
+ """
345
+ config = {
346
+ "CELERY_TASK_SERIALIZER": self.CELERY_TASK_SERIALIZER,
347
+ "CELERY_RESULT_SERIALIZER": self.CELERY_RESULT_SERIALIZER,
348
+ "CELERY_ACCEPT_CONTENT": self.CELERY_ACCEPT_CONTENT,
349
+ "CELERY_TIMEZONE": self.CELERY_TIMEZONE,
350
+ "CELERY_ENABLE_UTC": self.CELERY_ENABLE_UTC,
351
+ "CELERY_TASK_TRACK_STARTED": self.CELERY_TASK_TRACK_STARTED,
352
+ "CELERY_TASK_IGNORE_RESULT": self.CELERY_TASK_IGNORE_RESULT,
353
+ # 数据库配置(用于 worker)
354
+ "SQLALCHEMY_DATABASE_URI": self.SQLALCHEMY_DATABASE_URI,
355
+ "SQLALCHEMY_ENGINE_OPTIONS": self.SQLALCHEMY_ENGINE_OPTIONS,
356
+ # Redis 配置(用于 worker)
357
+ "REDIS_HOST": self.CELERY_WORKER_REDIS_HOST,
358
+ "REDIS_PORT": self.CELERY_WORKER_REDIS_PORT,
359
+ "REDIS_DB": self.CELERY_WORKER_REDIS_DB,
360
+ }
361
+
362
+ if self.CELERY_TASK_TIME_LIMIT is not None:
363
+ config["CELERY_TASK_TIME_LIMIT"] = self.CELERY_TASK_TIME_LIMIT
364
+
365
+ if self.CELERY_TASK_SOFT_TIME_LIMIT is not None:
366
+ config["CELERY_TASK_SOFT_TIME_LIMIT"] = self.CELERY_TASK_SOFT_TIME_LIMIT
367
+
368
+ if self.CELERY_RESULT_EXPIRES is not None:
369
+ config["CELERY_RESULT_EXPIRES"] = self.CELERY_RESULT_EXPIRES
370
+
371
+ if self.CELERY_WORKER_REDIS_USERNAME:
372
+ config["REDIS_USERNAME"] = self.CELERY_WORKER_REDIS_USERNAME
373
+
374
+ if self.CELERY_WORKER_REDIS_PASSWORD:
375
+ config["REDIS_PASSWORD"] = self.CELERY_WORKER_REDIS_PASSWORD
376
+
377
+ return config