codex-lb 0.2.0__tar.gz → 0.3.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- codex_lb-0.3.0/.github/release-please-manifest.json +3 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/.gitignore +2 -1
- {codex_lb-0.2.0 → codex_lb-0.3.0}/CHANGELOG.md +29 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/PKG-INFO +2 -2
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/core/auth/__init__.py +10 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/core/balancer/logic.py +33 -6
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/core/config/settings.py +2 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/core/usage/__init__.py +2 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/core/usage/logs.py +12 -2
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/core/usage/quota.py +10 -4
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/core/usage/types.py +3 -2
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/db/migrations/__init__.py +14 -3
- codex_lb-0.3.0/app/db/migrations/versions/add_accounts_chatgpt_account_id.py +29 -0
- codex_lb-0.3.0/app/db/migrations/versions/add_accounts_reset_at.py +29 -0
- codex_lb-0.3.0/app/db/migrations/versions/add_dashboard_settings.py +31 -0
- codex_lb-0.3.0/app/db/migrations/versions/add_request_logs_reasoning_effort.py +21 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/db/models.py +33 -0
- codex_lb-0.3.0/app/db/session.py +136 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/dependencies.py +27 -1
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/main.py +11 -2
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/modules/accounts/auth_manager.py +44 -3
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/modules/accounts/repository.py +14 -6
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/modules/accounts/service.py +4 -2
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/modules/oauth/service.py +4 -3
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/modules/proxy/load_balancer.py +74 -5
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/modules/proxy/service.py +155 -31
- codex_lb-0.3.0/app/modules/proxy/sticky_repository.py +56 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/modules/request_logs/repository.py +6 -3
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/modules/request_logs/schemas.py +2 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/modules/request_logs/service.py +8 -1
- codex_lb-0.3.0/app/modules/settings/__init__.py +1 -0
- codex_lb-0.3.0/app/modules/settings/api.py +37 -0
- codex_lb-0.3.0/app/modules/settings/repository.py +40 -0
- codex_lb-0.3.0/app/modules/settings/schemas.py +13 -0
- codex_lb-0.3.0/app/modules/settings/service.py +33 -0
- codex_lb-0.3.0/app/modules/shared/schemas.py +22 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/modules/usage/schemas.py +1 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/modules/usage/service.py +17 -1
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/modules/usage/updater.py +36 -7
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/static/7.css +73 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/static/index.css +33 -4
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/static/index.html +51 -4
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/static/index.js +231 -25
- {codex_lb-0.2.0 → codex_lb-0.3.0}/docker-compose.yml +2 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/pyproject.toml +4 -4
- {codex_lb-0.2.0 → codex_lb-0.3.0}/tests/conftest.py +3 -2
- {codex_lb-0.2.0 → codex_lb-0.3.0}/tests/integration/test_accounts_api.py +18 -10
- {codex_lb-0.2.0 → codex_lb-0.3.0}/tests/integration/test_accounts_api_extended.py +7 -4
- {codex_lb-0.2.0 → codex_lb-0.3.0}/tests/integration/test_db_models.py +1 -1
- {codex_lb-0.2.0 → codex_lb-0.3.0}/tests/integration/test_migrations.py +2 -2
- {codex_lb-0.2.0 → codex_lb-0.3.0}/tests/integration/test_oauth_flow.py +8 -3
- {codex_lb-0.2.0 → codex_lb-0.3.0}/tests/integration/test_proxy_api_extended.py +19 -15
- {codex_lb-0.2.0 → codex_lb-0.3.0}/tests/integration/test_proxy_compact.py +14 -6
- {codex_lb-0.2.0 → codex_lb-0.3.0}/tests/integration/test_proxy_responses.py +9 -3
- codex_lb-0.3.0/tests/integration/test_proxy_sticky_sessions.py +298 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/tests/integration/test_repositories.py +24 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/tests/integration/test_request_logs_api.py +1 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/tests/integration/test_request_logs_filters.py +3 -0
- codex_lb-0.3.0/tests/integration/test_settings_api.py +32 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/tests/integration/test_usage_api.py +27 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/tests/unit/test_auth.py +3 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/tests/unit/test_auth_manager.py +2 -0
- codex_lb-0.3.0/tests/unit/test_load_balancer.py +305 -0
- codex_lb-0.3.0/tests/unit/test_usage_updater.py +124 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/uv.lock +138 -122
- codex_lb-0.2.0/.github/release-please-manifest.json +0 -3
- codex_lb-0.2.0/app/db/session.py +0 -76
- codex_lb-0.2.0/app/modules/shared/schemas.py +0 -8
- codex_lb-0.2.0/tests/unit/test_load_balancer.py +0 -129
- {codex_lb-0.2.0 → codex_lb-0.3.0}/.dockerignore +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/.env.example +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/.github/release-please-config.json +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/.github/workflows/ci.yml +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/.github/workflows/release-please.yml +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/.github/workflows/release.yml +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/.pre-commit-config.yaml +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/AGENTS.md +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/Dockerfile +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/LICENSE +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/README.md +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/__init__.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/cli.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/core/__init__.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/core/auth/models.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/core/auth/refresh.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/core/balancer/__init__.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/core/balancer/types.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/core/clients/__init__.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/core/clients/http.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/core/clients/oauth.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/core/clients/proxy.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/core/clients/usage.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/core/config/__init__.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/core/crypto.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/core/errors.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/core/openai/__init__.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/core/openai/models.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/core/openai/parsing.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/core/openai/requests.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/core/plan_types.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/core/types.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/core/usage/models.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/core/usage/pricing.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/core/utils/__init__.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/core/utils/request_id.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/core/utils/retry.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/core/utils/sse.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/core/utils/time.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/db/__init__.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/db/migrations/versions/__init__.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/db/migrations/versions/normalize_account_plan_types.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/modules/__init__.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/modules/accounts/__init__.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/modules/accounts/api.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/modules/accounts/schemas.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/modules/health/__init__.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/modules/health/api.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/modules/health/schemas.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/modules/oauth/__init__.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/modules/oauth/api.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/modules/oauth/schemas.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/modules/oauth/templates/oauth_success.html +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/modules/proxy/__init__.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/modules/proxy/api.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/modules/proxy/helpers.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/modules/proxy/schemas.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/modules/proxy/types.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/modules/request_logs/__init__.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/modules/request_logs/api.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/modules/shared/__init__.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/modules/usage/__init__.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/modules/usage/api.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/app/modules/usage/repository.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/docs/screenshots/accounts.jpeg +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/docs/screenshots/dashboard.jpeg +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/tests/__init__.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/tests/integration/test_codex_usage_api.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/tests/integration/test_health_and_errors.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/tests/integration/test_load_balancer_integration.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/tests/integration/test_usage_summary.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/tests/unit/test_auth_refresh.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/tests/unit/test_oauth_client.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/tests/unit/test_pricing.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/tests/unit/test_proxy_utils.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/tests/unit/test_retry.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/tests/unit/test_sse.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/tests/unit/test_usage.py +0 -0
- {codex_lb-0.2.0 → codex_lb-0.3.0}/tests/unit/test_usage_client.py +0 -0
|
@@ -1,5 +1,34 @@
|
|
|
1
1
|
# Changelog
|
|
2
2
|
|
|
3
|
+
## [0.3.0](https://github.com/Soju06/codex-lb/compare/v0.2.0...v0.3.0) (2026-01-21)
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
### Features
|
|
7
|
+
|
|
8
|
+
* add cached input tokens handling and update related metrics in … ([5bf6609](https://github.com/Soju06/codex-lb/commit/5bf66095b8000ffc8fbdf8d989f60171604f69d3))
|
|
9
|
+
* add cached input tokens handling and update related metrics in logs and usage schemas ([c965036](https://github.com/Soju06/codex-lb/commit/c9650367c1a2d14e63e3440788b7cd44b08ebd9a))
|
|
10
|
+
* add formatting for cached input tokens metadata in metrics display ([53feaa6](https://github.com/Soju06/codex-lb/commit/53feaa62f7c5c282508f37c3fd42d9af655c2fa9))
|
|
11
|
+
* add secondary usage tracking and selection logic for accounts in load balancer ([d66cf69](https://github.com/Soju06/codex-lb/commit/d66cf69b2834b42fefbbfa646d82477f9832fdda))
|
|
12
|
+
* add ty type checking and refactors ([41fa811](https://github.com/Soju06/codex-lb/commit/41fa8112ba9b900ffa5dbee3a39d94267e2caa75))
|
|
13
|
+
* **app:** add migrations and reasoning effort support ([9eae590](https://github.com/Soju06/codex-lb/commit/9eae5903a08363291e397f983a531ddf325658d7))
|
|
14
|
+
* implement dashboard settings for sticky threads and reset preferences ([cd04812](https://github.com/Soju06/codex-lb/commit/cd0481247f0ceffdd92173ea84773960e52a7253))
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
### Bug Fixes
|
|
18
|
+
|
|
19
|
+
* **app:** tune sqlite pragmas and usage UI ([a44a4fd](https://github.com/Soju06/codex-lb/commit/a44a4fd6fe5771282a12ee62a34c9be819254322))
|
|
20
|
+
* **app:** update effort display format in history ([0796740](https://github.com/Soju06/codex-lb/commit/0796740ab570cf476b2285a615559a9a6318082f))
|
|
21
|
+
* **app:** update effort display format to include parentheses ([6fbae96](https://github.com/Soju06/codex-lb/commit/6fbae960f393ff92cae0feb614ca0e811a855851))
|
|
22
|
+
* **dashboard:** fallback primary remaining to summary ([02b3d39](https://github.com/Soju06/codex-lb/commit/02b3d39c2b734271af7c420fc52b7e87350177e1))
|
|
23
|
+
* **db:** avoid leaked async connection in migration ([9aa1d03](https://github.com/Soju06/codex-lb/commit/9aa1d0395481a96a21db2d0add18ee1753f183b2))
|
|
24
|
+
* **db:** use returning for dml checks ([4ec7c7a](https://github.com/Soju06/codex-lb/commit/4ec7c7a6615e6e5852b0865e09184544f09ebedc))
|
|
25
|
+
* **ui:** style and label settings checkboxes ([722cad8](https://github.com/Soju06/codex-lb/commit/722cad851706e2784815dad4069902cc95b3f662))
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
### Documentation
|
|
29
|
+
|
|
30
|
+
* expand 0.2.0 changelog ([32148dc](https://github.com/Soju06/codex-lb/commit/32148dc2d195cec0dd85f61fc0a13d8cbef24e24))
|
|
31
|
+
|
|
3
32
|
## [0.2.0](https://github.com/Soju06/codex-lb/compare/v0.1.5...v0.2.0) (2026-01-19)
|
|
4
33
|
|
|
5
34
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: codex-lb
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.3.0
|
|
4
4
|
Summary: Codex load balancer and proxy for ChatGPT accounts with usage dashboard
|
|
5
5
|
Author-email: Soju06 <qlskssk@gmail.com>
|
|
6
6
|
Maintainer-email: Soju06 <qlskssk@gmail.com>
|
|
@@ -39,7 +39,7 @@ Classifier: Topic :: Software Development :: Libraries
|
|
|
39
39
|
Classifier: Topic :: System :: Networking
|
|
40
40
|
Requires-Python: >=3.13
|
|
41
41
|
Requires-Dist: aiohttp-retry>=2.9.1
|
|
42
|
-
Requires-Dist: aiohttp>=3.13.
|
|
42
|
+
Requires-Dist: aiohttp>=3.13.3
|
|
43
43
|
Requires-Dist: aiosqlite>=0.22.1
|
|
44
44
|
Requires-Dist: cryptography>=46.0.3
|
|
45
45
|
Requires-Dist: fastapi[standard]>=0.128.0
|
|
@@ -90,7 +90,17 @@ def claims_from_auth(auth: AuthFile) -> AccountClaims:
|
|
|
90
90
|
)
|
|
91
91
|
|
|
92
92
|
|
|
93
|
+
def generate_unique_account_id(account_id: str | None, email: str | None) -> str:
|
|
94
|
+
if account_id and email and email != DEFAULT_EMAIL:
|
|
95
|
+
email_hash = hashlib.sha256(email.encode()).hexdigest()[:8]
|
|
96
|
+
return f"{account_id}_{email_hash}"
|
|
97
|
+
if account_id:
|
|
98
|
+
return account_id
|
|
99
|
+
return fallback_account_id(email)
|
|
100
|
+
|
|
101
|
+
|
|
93
102
|
def fallback_account_id(email: str | None) -> str:
|
|
103
|
+
"""Generate a fallback account ID when no OpenAI account ID is available."""
|
|
94
104
|
if email and email != DEFAULT_EMAIL:
|
|
95
105
|
digest = hashlib.sha256(email.encode()).hexdigest()[:12]
|
|
96
106
|
return f"email_{digest}"
|
|
@@ -16,6 +16,9 @@ PERMANENT_FAILURE_CODES = {
|
|
|
16
16
|
"account_deleted": "Account has been deleted",
|
|
17
17
|
}
|
|
18
18
|
|
|
19
|
+
SECONDS_PER_DAY = 60 * 60 * 24
|
|
20
|
+
UNKNOWN_RESET_BUCKET_DAYS = 10_000
|
|
21
|
+
|
|
19
22
|
|
|
20
23
|
@dataclass
|
|
21
24
|
class AccountState:
|
|
@@ -24,6 +27,8 @@ class AccountState:
|
|
|
24
27
|
used_percent: float | None = None
|
|
25
28
|
reset_at: float | None = None
|
|
26
29
|
cooldown_until: float | None = None
|
|
30
|
+
secondary_used_percent: float | None = None
|
|
31
|
+
secondary_reset_at: int | None = None
|
|
27
32
|
last_error_at: float | None = None
|
|
28
33
|
last_selected_at: float | None = None
|
|
29
34
|
error_count: int = 0
|
|
@@ -36,7 +41,12 @@ class SelectionResult:
|
|
|
36
41
|
error_message: str | None
|
|
37
42
|
|
|
38
43
|
|
|
39
|
-
def select_account(
|
|
44
|
+
def select_account(
|
|
45
|
+
states: Iterable[AccountState],
|
|
46
|
+
now: float | None = None,
|
|
47
|
+
*,
|
|
48
|
+
prefer_earlier_reset: bool = False,
|
|
49
|
+
) -> SelectionResult:
|
|
40
50
|
current = now or time.time()
|
|
41
51
|
available: list[AccountState] = []
|
|
42
52
|
all_states = list(states)
|
|
@@ -95,18 +105,35 @@ def select_account(states: Iterable[AccountState], now: float | None = None) ->
|
|
|
95
105
|
return SelectionResult(None, f"Rate limit exceeded. Try again in {wait_seconds:.0f}s")
|
|
96
106
|
return SelectionResult(None, "No available accounts")
|
|
97
107
|
|
|
98
|
-
def
|
|
99
|
-
|
|
108
|
+
def _usage_sort_key(state: AccountState) -> tuple[float, float, float, str]:
|
|
109
|
+
primary_used = state.used_percent if state.used_percent is not None else 0.0
|
|
110
|
+
secondary_used = state.secondary_used_percent if state.secondary_used_percent is not None else primary_used
|
|
100
111
|
last_selected = state.last_selected_at or 0.0
|
|
101
|
-
return
|
|
102
|
-
|
|
103
|
-
|
|
112
|
+
return secondary_used, primary_used, last_selected, state.account_id
|
|
113
|
+
|
|
114
|
+
def _reset_first_sort_key(state: AccountState) -> tuple[int, float, float, float, str]:
|
|
115
|
+
reset_bucket_days = UNKNOWN_RESET_BUCKET_DAYS
|
|
116
|
+
if state.secondary_reset_at is not None:
|
|
117
|
+
reset_bucket_days = max(
|
|
118
|
+
0,
|
|
119
|
+
int((state.secondary_reset_at - current) // SECONDS_PER_DAY),
|
|
120
|
+
)
|
|
121
|
+
secondary_used, primary_used, last_selected, account_id = _usage_sort_key(state)
|
|
122
|
+
return reset_bucket_days, secondary_used, primary_used, last_selected, account_id
|
|
123
|
+
|
|
124
|
+
selected = min(available, key=_reset_first_sort_key if prefer_earlier_reset else _usage_sort_key)
|
|
104
125
|
return SelectionResult(selected, None)
|
|
105
126
|
|
|
106
127
|
|
|
107
128
|
def handle_rate_limit(state: AccountState, error: UpstreamError) -> None:
|
|
129
|
+
state.status = AccountStatus.RATE_LIMITED
|
|
108
130
|
state.error_count += 1
|
|
109
131
|
state.last_error_at = time.time()
|
|
132
|
+
|
|
133
|
+
reset_at = _extract_reset_at(error)
|
|
134
|
+
if reset_at is not None:
|
|
135
|
+
state.reset_at = reset_at
|
|
136
|
+
|
|
110
137
|
message = error.get("message")
|
|
111
138
|
delay = parse_retry_after(message) if message else None
|
|
112
139
|
if delay is None:
|
|
@@ -40,6 +40,8 @@ class Settings(BaseSettings):
|
|
|
40
40
|
usage_refresh_interval_seconds: int = 60
|
|
41
41
|
encryption_key_file: Path = DEFAULT_ENCRYPTION_KEY_FILE
|
|
42
42
|
database_migrations_fail_fast: bool = True
|
|
43
|
+
log_proxy_request_shape: bool = False
|
|
44
|
+
log_proxy_request_shape_raw_cache_key: bool = False
|
|
43
45
|
|
|
44
46
|
@field_validator("database_url")
|
|
45
47
|
@classmethod
|
|
@@ -17,12 +17,14 @@ from app.db.models import Account
|
|
|
17
17
|
PLAN_CAPACITY_CREDITS_PRIMARY = {
|
|
18
18
|
"plus": 225.0,
|
|
19
19
|
"business": 225.0,
|
|
20
|
+
"team": 225.0,
|
|
20
21
|
"pro": 1500.0,
|
|
21
22
|
}
|
|
22
23
|
|
|
23
24
|
PLAN_CAPACITY_CREDITS_SECONDARY = {
|
|
24
25
|
"plus": 7560.0,
|
|
25
26
|
"business": 7560.0,
|
|
27
|
+
"team": 7560.0,
|
|
26
28
|
"pro": 50400.0,
|
|
27
29
|
}
|
|
28
30
|
|
|
@@ -13,6 +13,17 @@ class RequestLogLike(Protocol):
|
|
|
13
13
|
reasoning_tokens: int | None
|
|
14
14
|
|
|
15
15
|
|
|
16
|
+
def cached_input_tokens_from_log(log: RequestLogLike) -> int | None:
|
|
17
|
+
cached_tokens = log.cached_input_tokens
|
|
18
|
+
if cached_tokens is None:
|
|
19
|
+
return None
|
|
20
|
+
cached_tokens = max(0, int(cached_tokens))
|
|
21
|
+
input_tokens = log.input_tokens
|
|
22
|
+
if input_tokens is not None:
|
|
23
|
+
cached_tokens = min(cached_tokens, int(input_tokens))
|
|
24
|
+
return cached_tokens
|
|
25
|
+
|
|
26
|
+
|
|
16
27
|
def usage_tokens_from_log(log: RequestLogLike) -> UsageTokens | None:
|
|
17
28
|
input_tokens = log.input_tokens
|
|
18
29
|
if input_tokens is None:
|
|
@@ -20,8 +31,7 @@ def usage_tokens_from_log(log: RequestLogLike) -> UsageTokens | None:
|
|
|
20
31
|
output_tokens = log.output_tokens if log.output_tokens is not None else log.reasoning_tokens
|
|
21
32
|
if output_tokens is None:
|
|
22
33
|
return None
|
|
23
|
-
cached_tokens = log
|
|
24
|
-
cached_tokens = max(0, min(cached_tokens, input_tokens))
|
|
34
|
+
cached_tokens = cached_input_tokens_from_log(log) or 0
|
|
25
35
|
return UsageTokens(
|
|
26
36
|
input_tokens=float(input_tokens),
|
|
27
37
|
output_tokens=float(output_tokens),
|
|
@@ -30,8 +30,11 @@ def apply_usage_quota(
|
|
|
30
30
|
reset_at = secondary_reset
|
|
31
31
|
return status, used_percent, reset_at
|
|
32
32
|
if status == AccountStatus.QUOTA_EXCEEDED:
|
|
33
|
-
|
|
34
|
-
|
|
33
|
+
if runtime_reset and runtime_reset > time.time():
|
|
34
|
+
reset_at = runtime_reset
|
|
35
|
+
else:
|
|
36
|
+
status = AccountStatus.ACTIVE
|
|
37
|
+
reset_at = None
|
|
35
38
|
elif status == AccountStatus.QUOTA_EXCEEDED and secondary_reset is not None:
|
|
36
39
|
reset_at = secondary_reset
|
|
37
40
|
|
|
@@ -45,8 +48,11 @@ def apply_usage_quota(
|
|
|
45
48
|
reset_at = _fallback_primary_reset(primary_window_minutes) or reset_at
|
|
46
49
|
return status, used_percent, reset_at
|
|
47
50
|
if status == AccountStatus.RATE_LIMITED:
|
|
48
|
-
|
|
49
|
-
|
|
51
|
+
if runtime_reset and runtime_reset > time.time():
|
|
52
|
+
reset_at = runtime_reset
|
|
53
|
+
else:
|
|
54
|
+
status = AccountStatus.ACTIVE
|
|
55
|
+
reset_at = None
|
|
50
56
|
|
|
51
57
|
return status, used_percent, reset_at
|
|
52
58
|
|
|
@@ -67,8 +67,9 @@ class UsageCostSummary:
|
|
|
67
67
|
class UsageMetricsSummary:
|
|
68
68
|
requests_7d: int | None
|
|
69
69
|
tokens_secondary_window: int | None
|
|
70
|
-
|
|
71
|
-
|
|
70
|
+
cached_tokens_secondary_window: int | None = None
|
|
71
|
+
error_rate_7d: float | None = None
|
|
72
|
+
top_error: str | None = None
|
|
72
73
|
|
|
73
74
|
|
|
74
75
|
@dataclass(frozen=True)
|
|
@@ -8,7 +8,13 @@ from typing import Awaitable, Callable, Final
|
|
|
8
8
|
from sqlalchemy import text
|
|
9
9
|
from sqlalchemy.ext.asyncio import AsyncSession
|
|
10
10
|
|
|
11
|
-
from app.db.migrations.versions import
|
|
11
|
+
from app.db.migrations.versions import (
|
|
12
|
+
add_accounts_chatgpt_account_id,
|
|
13
|
+
add_accounts_reset_at,
|
|
14
|
+
add_dashboard_settings,
|
|
15
|
+
add_request_logs_reasoning_effort,
|
|
16
|
+
normalize_account_plan_types,
|
|
17
|
+
)
|
|
12
18
|
|
|
13
19
|
_CREATE_MIGRATIONS_TABLE = """
|
|
14
20
|
CREATE TABLE IF NOT EXISTS schema_migrations (
|
|
@@ -21,6 +27,7 @@ _INSERT_MIGRATION = """
|
|
|
21
27
|
INSERT INTO schema_migrations (name, applied_at)
|
|
22
28
|
VALUES (:name, :applied_at)
|
|
23
29
|
ON CONFLICT(name) DO NOTHING
|
|
30
|
+
RETURNING name
|
|
24
31
|
"""
|
|
25
32
|
|
|
26
33
|
|
|
@@ -32,6 +39,10 @@ class Migration:
|
|
|
32
39
|
|
|
33
40
|
MIGRATIONS: Final[tuple[Migration, ...]] = (
|
|
34
41
|
Migration("001_normalize_account_plan_types", normalize_account_plan_types.run),
|
|
42
|
+
Migration("002_add_request_logs_reasoning_effort", add_request_logs_reasoning_effort.run),
|
|
43
|
+
Migration("003_add_accounts_reset_at", add_accounts_reset_at.run),
|
|
44
|
+
Migration("004_add_accounts_chatgpt_account_id", add_accounts_chatgpt_account_id.run),
|
|
45
|
+
Migration("005_add_dashboard_settings", add_dashboard_settings.run),
|
|
35
46
|
)
|
|
36
47
|
|
|
37
48
|
|
|
@@ -54,8 +65,8 @@ async def _apply_migration(session: AsyncSession, migration: Migration) -> bool:
|
|
|
54
65
|
"applied_at": _utcnow_iso(),
|
|
55
66
|
},
|
|
56
67
|
)
|
|
57
|
-
|
|
58
|
-
if
|
|
68
|
+
inserted = result.scalar_one_or_none()
|
|
69
|
+
if inserted is None:
|
|
59
70
|
return False
|
|
60
71
|
await migration.run(session)
|
|
61
72
|
return True
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from sqlalchemy import text
|
|
4
|
+
from sqlalchemy.ext.asyncio import AsyncSession
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
async def run(session: AsyncSession) -> None:
|
|
8
|
+
bind = session.get_bind()
|
|
9
|
+
dialect = getattr(getattr(bind, "dialect", None), "name", None)
|
|
10
|
+
if dialect == "sqlite":
|
|
11
|
+
await _sqlite_add_column_if_missing(session, "accounts", "chatgpt_account_id", "VARCHAR")
|
|
12
|
+
elif dialect == "postgresql":
|
|
13
|
+
await session.execute(
|
|
14
|
+
text("ALTER TABLE accounts ADD COLUMN IF NOT EXISTS chatgpt_account_id VARCHAR"),
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
async def _sqlite_add_column_if_missing(
|
|
19
|
+
session: AsyncSession,
|
|
20
|
+
table: str,
|
|
21
|
+
column: str,
|
|
22
|
+
column_type: str,
|
|
23
|
+
) -> None:
|
|
24
|
+
result = await session.execute(text(f"PRAGMA table_info({table})"))
|
|
25
|
+
rows = result.fetchall()
|
|
26
|
+
existing = {row[1] for row in rows if len(row) > 1}
|
|
27
|
+
if column in existing:
|
|
28
|
+
return
|
|
29
|
+
await session.execute(text(f"ALTER TABLE {table} ADD COLUMN {column} {column_type}"))
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from sqlalchemy import text
|
|
4
|
+
from sqlalchemy.ext.asyncio import AsyncSession
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
async def run(session: AsyncSession) -> None:
|
|
8
|
+
bind = session.get_bind()
|
|
9
|
+
dialect = getattr(getattr(bind, "dialect", None), "name", None)
|
|
10
|
+
if dialect == "sqlite":
|
|
11
|
+
await _sqlite_add_column_if_missing(session, "accounts", "reset_at", "INTEGER")
|
|
12
|
+
elif dialect == "postgresql":
|
|
13
|
+
await session.execute(
|
|
14
|
+
text("ALTER TABLE accounts ADD COLUMN IF NOT EXISTS reset_at INTEGER"),
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
async def _sqlite_add_column_if_missing(
|
|
19
|
+
session: AsyncSession,
|
|
20
|
+
table: str,
|
|
21
|
+
column: str,
|
|
22
|
+
column_type: str,
|
|
23
|
+
) -> None:
|
|
24
|
+
result = await session.execute(text(f"PRAGMA table_info({table})"))
|
|
25
|
+
rows = result.fetchall()
|
|
26
|
+
existing = {row[1] for row in rows if len(row) > 1}
|
|
27
|
+
if column in existing:
|
|
28
|
+
return
|
|
29
|
+
await session.execute(text(f"ALTER TABLE {table} ADD COLUMN {column} {column_type}"))
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from sqlalchemy import inspect
|
|
4
|
+
from sqlalchemy.ext.asyncio import AsyncSession
|
|
5
|
+
from sqlalchemy.orm import Session
|
|
6
|
+
|
|
7
|
+
from app.db.models import DashboardSettings
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def _settings_table_exists(session: Session) -> bool:
|
|
11
|
+
inspector = inspect(session.connection())
|
|
12
|
+
return inspector.has_table("dashboard_settings")
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
async def run(session: AsyncSession) -> None:
|
|
16
|
+
exists = await session.run_sync(_settings_table_exists)
|
|
17
|
+
if not exists:
|
|
18
|
+
return
|
|
19
|
+
|
|
20
|
+
row = await session.get(DashboardSettings, 1)
|
|
21
|
+
if row is not None:
|
|
22
|
+
return
|
|
23
|
+
|
|
24
|
+
session.add(
|
|
25
|
+
DashboardSettings(
|
|
26
|
+
id=1,
|
|
27
|
+
sticky_threads_enabled=False,
|
|
28
|
+
prefer_earlier_reset_accounts=False,
|
|
29
|
+
)
|
|
30
|
+
)
|
|
31
|
+
await session.flush()
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from sqlalchemy import inspect, text
|
|
4
|
+
from sqlalchemy.ext.asyncio import AsyncSession
|
|
5
|
+
from sqlalchemy.orm import Session
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def _request_logs_column_state(session: Session) -> tuple[bool, bool]:
|
|
9
|
+
conn = session.connection()
|
|
10
|
+
inspector = inspect(conn)
|
|
11
|
+
if not inspector.has_table("request_logs"):
|
|
12
|
+
return False, False
|
|
13
|
+
columns = {column["name"] for column in inspector.get_columns("request_logs")}
|
|
14
|
+
return True, "reasoning_effort" in columns
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
async def run(session: AsyncSession) -> None:
|
|
18
|
+
has_table, has_column = await session.run_sync(_request_logs_column_state)
|
|
19
|
+
if not has_table or has_column:
|
|
20
|
+
return
|
|
21
|
+
await session.execute(text("ALTER TABLE request_logs ADD COLUMN reasoning_effort VARCHAR"))
|
|
@@ -24,6 +24,7 @@ class Account(Base):
|
|
|
24
24
|
__tablename__ = "accounts"
|
|
25
25
|
|
|
26
26
|
id: Mapped[str] = mapped_column(String, primary_key=True)
|
|
27
|
+
chatgpt_account_id: Mapped[str | None] = mapped_column(String, nullable=True)
|
|
27
28
|
email: Mapped[str] = mapped_column(String, unique=True, nullable=False)
|
|
28
29
|
plan_type: Mapped[str] = mapped_column(String, nullable=False)
|
|
29
30
|
|
|
@@ -40,6 +41,7 @@ class Account(Base):
|
|
|
40
41
|
nullable=False,
|
|
41
42
|
)
|
|
42
43
|
deactivation_reason: Mapped[str | None] = mapped_column(Text, nullable=True)
|
|
44
|
+
reset_at: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
|
43
45
|
|
|
44
46
|
|
|
45
47
|
class UsageHistory(Base):
|
|
@@ -71,12 +73,43 @@ class RequestLog(Base):
|
|
|
71
73
|
output_tokens: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
|
72
74
|
cached_input_tokens: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
|
73
75
|
reasoning_tokens: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
|
76
|
+
reasoning_effort: Mapped[str | None] = mapped_column(String, nullable=True)
|
|
74
77
|
latency_ms: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
|
75
78
|
status: Mapped[str] = mapped_column(String, nullable=False)
|
|
76
79
|
error_code: Mapped[str | None] = mapped_column(String, nullable=True)
|
|
77
80
|
error_message: Mapped[str | None] = mapped_column(Text, nullable=True)
|
|
78
81
|
|
|
79
82
|
|
|
83
|
+
class StickySession(Base):
|
|
84
|
+
__tablename__ = "sticky_sessions"
|
|
85
|
+
|
|
86
|
+
key: Mapped[str] = mapped_column(String, primary_key=True)
|
|
87
|
+
account_id: Mapped[str] = mapped_column(String, ForeignKey("accounts.id"), nullable=False)
|
|
88
|
+
created_at: Mapped[datetime] = mapped_column(DateTime, server_default=func.now(), nullable=False)
|
|
89
|
+
updated_at: Mapped[datetime] = mapped_column(
|
|
90
|
+
DateTime,
|
|
91
|
+
server_default=func.now(),
|
|
92
|
+
onupdate=func.now(),
|
|
93
|
+
nullable=False,
|
|
94
|
+
)
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
class DashboardSettings(Base):
|
|
98
|
+
__tablename__ = "dashboard_settings"
|
|
99
|
+
|
|
100
|
+
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=False)
|
|
101
|
+
sticky_threads_enabled: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False)
|
|
102
|
+
prefer_earlier_reset_accounts: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False)
|
|
103
|
+
created_at: Mapped[datetime] = mapped_column(DateTime, server_default=func.now(), nullable=False)
|
|
104
|
+
updated_at: Mapped[datetime] = mapped_column(
|
|
105
|
+
DateTime,
|
|
106
|
+
server_default=func.now(),
|
|
107
|
+
onupdate=func.now(),
|
|
108
|
+
nullable=False,
|
|
109
|
+
)
|
|
110
|
+
|
|
111
|
+
|
|
80
112
|
Index("idx_usage_recorded_at", UsageHistory.recorded_at)
|
|
81
113
|
Index("idx_usage_account_time", UsageHistory.account_id, UsageHistory.recorded_at)
|
|
82
114
|
Index("idx_logs_account_time", RequestLog.account_id, RequestLog.requested_at)
|
|
115
|
+
Index("idx_sticky_account", StickySession.account_id)
|
|
@@ -0,0 +1,136 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
import sqlite3
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import AsyncIterator, Awaitable, TypeVar
|
|
7
|
+
|
|
8
|
+
import anyio
|
|
9
|
+
from sqlalchemy import event
|
|
10
|
+
from sqlalchemy.engine import Engine
|
|
11
|
+
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
|
|
12
|
+
|
|
13
|
+
from app.core.config.settings import get_settings
|
|
14
|
+
from app.db.migrations import run_migrations
|
|
15
|
+
|
|
16
|
+
DATABASE_URL = get_settings().database_url
|
|
17
|
+
|
|
18
|
+
logger = logging.getLogger(__name__)
|
|
19
|
+
|
|
20
|
+
_SQLITE_BUSY_TIMEOUT_MS = 5_000
|
|
21
|
+
_SQLITE_BUSY_TIMEOUT_SECONDS = _SQLITE_BUSY_TIMEOUT_MS / 1000
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def _is_sqlite_url(url: str) -> bool:
|
|
25
|
+
return url.startswith("sqlite+aiosqlite:///") or url.startswith("sqlite:///")
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def _is_sqlite_memory_url(url: str) -> bool:
|
|
29
|
+
return _is_sqlite_url(url) and ":memory:" in url
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def _configure_sqlite_engine(engine: Engine, *, enable_wal: bool) -> None:
|
|
33
|
+
@event.listens_for(engine, "connect")
|
|
34
|
+
def _set_sqlite_pragmas(dbapi_connection: sqlite3.Connection, _: object) -> None:
|
|
35
|
+
cursor: sqlite3.Cursor = dbapi_connection.cursor()
|
|
36
|
+
try:
|
|
37
|
+
if enable_wal:
|
|
38
|
+
cursor.execute("PRAGMA journal_mode=WAL")
|
|
39
|
+
cursor.execute("PRAGMA synchronous=NORMAL")
|
|
40
|
+
cursor.execute("PRAGMA foreign_keys=ON")
|
|
41
|
+
cursor.execute(f"PRAGMA busy_timeout={_SQLITE_BUSY_TIMEOUT_MS}")
|
|
42
|
+
finally:
|
|
43
|
+
cursor.close()
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
if _is_sqlite_url(DATABASE_URL):
|
|
47
|
+
engine = create_async_engine(
|
|
48
|
+
DATABASE_URL,
|
|
49
|
+
echo=False,
|
|
50
|
+
connect_args={"timeout": _SQLITE_BUSY_TIMEOUT_SECONDS},
|
|
51
|
+
)
|
|
52
|
+
_configure_sqlite_engine(engine.sync_engine, enable_wal=not _is_sqlite_memory_url(DATABASE_URL))
|
|
53
|
+
else:
|
|
54
|
+
engine = create_async_engine(DATABASE_URL, echo=False)
|
|
55
|
+
|
|
56
|
+
SessionLocal = async_sessionmaker(engine, expire_on_commit=False, class_=AsyncSession)
|
|
57
|
+
|
|
58
|
+
_T = TypeVar("_T")
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def _ensure_sqlite_dir(url: str) -> None:
|
|
62
|
+
if not (url.startswith("sqlite+aiosqlite:") or url.startswith("sqlite:")):
|
|
63
|
+
return
|
|
64
|
+
|
|
65
|
+
marker = ":///"
|
|
66
|
+
marker_index = url.find(marker)
|
|
67
|
+
if marker_index < 0:
|
|
68
|
+
return
|
|
69
|
+
|
|
70
|
+
# Works for both relative (sqlite+aiosqlite:///./db.sqlite) and absolute
|
|
71
|
+
# paths (sqlite+aiosqlite:////var/lib/app/db.sqlite).
|
|
72
|
+
path = url[marker_index + len(marker) :]
|
|
73
|
+
path = path.partition("?")[0]
|
|
74
|
+
path = path.partition("#")[0]
|
|
75
|
+
|
|
76
|
+
if not path or path == ":memory:":
|
|
77
|
+
return
|
|
78
|
+
|
|
79
|
+
Path(path).expanduser().parent.mkdir(parents=True, exist_ok=True)
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
async def _shielded(awaitable: Awaitable[_T]) -> _T:
|
|
83
|
+
with anyio.CancelScope(shield=True):
|
|
84
|
+
return await awaitable
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
async def _safe_rollback(session: AsyncSession) -> None:
|
|
88
|
+
if not session.in_transaction():
|
|
89
|
+
return
|
|
90
|
+
try:
|
|
91
|
+
await _shielded(session.rollback())
|
|
92
|
+
except BaseException:
|
|
93
|
+
return
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
async def _safe_close(session: AsyncSession) -> None:
|
|
97
|
+
try:
|
|
98
|
+
await _shielded(session.close())
|
|
99
|
+
except BaseException:
|
|
100
|
+
return
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
async def get_session() -> AsyncIterator[AsyncSession]:
|
|
104
|
+
session = SessionLocal()
|
|
105
|
+
try:
|
|
106
|
+
yield session
|
|
107
|
+
except BaseException:
|
|
108
|
+
await _safe_rollback(session)
|
|
109
|
+
raise
|
|
110
|
+
finally:
|
|
111
|
+
if session.in_transaction():
|
|
112
|
+
await _safe_rollback(session)
|
|
113
|
+
await _safe_close(session)
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
async def init_db() -> None:
|
|
117
|
+
from app.db.models import Base
|
|
118
|
+
|
|
119
|
+
_ensure_sqlite_dir(DATABASE_URL)
|
|
120
|
+
|
|
121
|
+
async with engine.begin() as conn:
|
|
122
|
+
await conn.run_sync(Base.metadata.create_all)
|
|
123
|
+
|
|
124
|
+
async with SessionLocal() as session:
|
|
125
|
+
try:
|
|
126
|
+
updated = await run_migrations(session)
|
|
127
|
+
if updated:
|
|
128
|
+
logger.info("Applied database migrations count=%s", updated)
|
|
129
|
+
except Exception:
|
|
130
|
+
logger.exception("Failed to apply database migrations")
|
|
131
|
+
if get_settings().database_migrations_fail_fast:
|
|
132
|
+
raise
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
async def close_db() -> None:
|
|
136
|
+
await engine.dispose()
|
|
@@ -12,8 +12,11 @@ from app.modules.accounts.repository import AccountsRepository
|
|
|
12
12
|
from app.modules.accounts.service import AccountsService
|
|
13
13
|
from app.modules.oauth.service import OauthService
|
|
14
14
|
from app.modules.proxy.service import ProxyService
|
|
15
|
+
from app.modules.proxy.sticky_repository import StickySessionsRepository
|
|
15
16
|
from app.modules.request_logs.repository import RequestLogsRepository
|
|
16
17
|
from app.modules.request_logs.service import RequestLogsService
|
|
18
|
+
from app.modules.settings.repository import SettingsRepository
|
|
19
|
+
from app.modules.settings.service import SettingsService
|
|
17
20
|
from app.modules.usage.repository import UsageRepository
|
|
18
21
|
from app.modules.usage.service import UsageService
|
|
19
22
|
|
|
@@ -49,6 +52,13 @@ class RequestLogsContext:
|
|
|
49
52
|
service: RequestLogsService
|
|
50
53
|
|
|
51
54
|
|
|
55
|
+
@dataclass(slots=True)
|
|
56
|
+
class SettingsContext:
|
|
57
|
+
session: AsyncSession
|
|
58
|
+
repository: SettingsRepository
|
|
59
|
+
service: SettingsService
|
|
60
|
+
|
|
61
|
+
|
|
52
62
|
def get_accounts_context(
|
|
53
63
|
session: AsyncSession = Depends(get_session),
|
|
54
64
|
) -> AccountsContext:
|
|
@@ -104,7 +114,15 @@ def get_proxy_context(
|
|
|
104
114
|
accounts_repository = AccountsRepository(session)
|
|
105
115
|
usage_repository = UsageRepository(session)
|
|
106
116
|
request_logs_repository = RequestLogsRepository(session)
|
|
107
|
-
|
|
117
|
+
sticky_repository = StickySessionsRepository(session)
|
|
118
|
+
settings_repository = SettingsRepository(session)
|
|
119
|
+
service = ProxyService(
|
|
120
|
+
accounts_repository,
|
|
121
|
+
usage_repository,
|
|
122
|
+
request_logs_repository,
|
|
123
|
+
sticky_repository,
|
|
124
|
+
settings_repository,
|
|
125
|
+
)
|
|
108
126
|
return ProxyContext(service=service)
|
|
109
127
|
|
|
110
128
|
|
|
@@ -114,3 +132,11 @@ def get_request_logs_context(
|
|
|
114
132
|
repository = RequestLogsRepository(session)
|
|
115
133
|
service = RequestLogsService(repository)
|
|
116
134
|
return RequestLogsContext(session=session, repository=repository, service=service)
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
def get_settings_context(
|
|
138
|
+
session: AsyncSession = Depends(get_session),
|
|
139
|
+
) -> SettingsContext:
|
|
140
|
+
repository = SettingsRepository(session)
|
|
141
|
+
service = SettingsService(repository)
|
|
142
|
+
return SettingsContext(session=session, repository=repository, service=service)
|