codex-lb 0.2.0__py3-none-any.whl → 0.3.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- app/core/auth/__init__.py +10 -0
- app/core/balancer/logic.py +33 -6
- app/core/config/settings.py +2 -0
- app/core/usage/__init__.py +2 -0
- app/core/usage/logs.py +12 -2
- app/core/usage/quota.py +10 -4
- app/core/usage/types.py +3 -2
- app/db/migrations/__init__.py +14 -3
- app/db/migrations/versions/add_accounts_chatgpt_account_id.py +29 -0
- app/db/migrations/versions/add_accounts_reset_at.py +29 -0
- app/db/migrations/versions/add_dashboard_settings.py +31 -0
- app/db/migrations/versions/add_request_logs_reasoning_effort.py +21 -0
- app/db/models.py +33 -0
- app/db/session.py +71 -11
- app/dependencies.py +27 -1
- app/main.py +11 -2
- app/modules/accounts/auth_manager.py +44 -3
- app/modules/accounts/repository.py +14 -6
- app/modules/accounts/service.py +4 -2
- app/modules/oauth/service.py +4 -3
- app/modules/proxy/load_balancer.py +74 -5
- app/modules/proxy/service.py +155 -31
- app/modules/proxy/sticky_repository.py +56 -0
- app/modules/request_logs/repository.py +6 -3
- app/modules/request_logs/schemas.py +2 -0
- app/modules/request_logs/service.py +8 -1
- app/modules/settings/__init__.py +1 -0
- app/modules/settings/api.py +37 -0
- app/modules/settings/repository.py +40 -0
- app/modules/settings/schemas.py +13 -0
- app/modules/settings/service.py +33 -0
- app/modules/shared/schemas.py +16 -2
- app/modules/usage/schemas.py +1 -0
- app/modules/usage/service.py +17 -1
- app/modules/usage/updater.py +36 -7
- app/static/index.css +1024 -319
- app/static/index.html +461 -377
- app/static/index.js +327 -49
- {codex_lb-0.2.0.dist-info → codex_lb-0.3.1.dist-info}/METADATA +33 -7
- {codex_lb-0.2.0.dist-info → codex_lb-0.3.1.dist-info}/RECORD +43 -34
- app/static/7.css +0 -1336
- {codex_lb-0.2.0.dist-info → codex_lb-0.3.1.dist-info}/WHEEL +0 -0
- {codex_lb-0.2.0.dist-info → codex_lb-0.3.1.dist-info}/entry_points.txt +0 -0
- {codex_lb-0.2.0.dist-info → codex_lb-0.3.1.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from sqlalchemy import delete, select
|
|
4
|
+
from sqlalchemy.dialects.postgresql import insert as pg_insert
|
|
5
|
+
from sqlalchemy.dialects.sqlite import insert as sqlite_insert
|
|
6
|
+
from sqlalchemy.ext.asyncio import AsyncSession
|
|
7
|
+
from sqlalchemy.sql import Insert, func
|
|
8
|
+
|
|
9
|
+
from app.db.models import StickySession
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class StickySessionsRepository:
|
|
13
|
+
def __init__(self, session: AsyncSession) -> None:
|
|
14
|
+
self._session = session
|
|
15
|
+
|
|
16
|
+
async def get_account_id(self, key: str) -> str | None:
|
|
17
|
+
if not key:
|
|
18
|
+
return None
|
|
19
|
+
result = await self._session.execute(select(StickySession.account_id).where(StickySession.key == key))
|
|
20
|
+
return result.scalar_one_or_none()
|
|
21
|
+
|
|
22
|
+
async def upsert(self, key: str, account_id: str) -> StickySession:
|
|
23
|
+
statement = self._build_upsert_statement(key, account_id)
|
|
24
|
+
await self._session.execute(statement)
|
|
25
|
+
await self._session.commit()
|
|
26
|
+
row = await self._session.get(StickySession, key)
|
|
27
|
+
if row is None:
|
|
28
|
+
raise RuntimeError(f"StickySession upsert failed for key={key!r}")
|
|
29
|
+
await self._session.refresh(row)
|
|
30
|
+
return row
|
|
31
|
+
|
|
32
|
+
async def delete(self, key: str) -> bool:
|
|
33
|
+
if not key:
|
|
34
|
+
return False
|
|
35
|
+
result = await self._session.execute(
|
|
36
|
+
delete(StickySession).where(StickySession.key == key).returning(StickySession.key)
|
|
37
|
+
)
|
|
38
|
+
await self._session.commit()
|
|
39
|
+
return result.scalar_one_or_none() is not None
|
|
40
|
+
|
|
41
|
+
def _build_upsert_statement(self, key: str, account_id: str) -> Insert:
|
|
42
|
+
dialect = self._session.get_bind().dialect.name
|
|
43
|
+
if dialect == "postgresql":
|
|
44
|
+
insert_fn = pg_insert
|
|
45
|
+
elif dialect == "sqlite":
|
|
46
|
+
insert_fn = sqlite_insert
|
|
47
|
+
else:
|
|
48
|
+
raise RuntimeError(f"StickySession upsert unsupported for dialect={dialect!r}")
|
|
49
|
+
statement = insert_fn(StickySession).values(key=key, account_id=account_id)
|
|
50
|
+
return statement.on_conflict_do_update(
|
|
51
|
+
index_elements=[StickySession.key],
|
|
52
|
+
set_={
|
|
53
|
+
"account_id": account_id,
|
|
54
|
+
"updated_at": func.now(),
|
|
55
|
+
},
|
|
56
|
+
)
|
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
-
import asyncio
|
|
4
3
|
from datetime import datetime
|
|
5
4
|
|
|
5
|
+
import anyio
|
|
6
6
|
from sqlalchemy import and_, select
|
|
7
7
|
from sqlalchemy.ext.asyncio import AsyncSession
|
|
8
8
|
|
|
@@ -33,6 +33,7 @@ class RequestLogsRepository:
|
|
|
33
33
|
requested_at: datetime | None = None,
|
|
34
34
|
cached_input_tokens: int | None = None,
|
|
35
35
|
reasoning_tokens: int | None = None,
|
|
36
|
+
reasoning_effort: str | None = None,
|
|
36
37
|
) -> RequestLog:
|
|
37
38
|
resolved_request_id = ensure_request_id(request_id)
|
|
38
39
|
log = RequestLog(
|
|
@@ -43,6 +44,7 @@ class RequestLogsRepository:
|
|
|
43
44
|
output_tokens=output_tokens,
|
|
44
45
|
cached_input_tokens=cached_input_tokens,
|
|
45
46
|
reasoning_tokens=reasoning_tokens,
|
|
47
|
+
reasoning_effort=reasoning_effort,
|
|
46
48
|
latency_ms=latency_ms,
|
|
47
49
|
status=status,
|
|
48
50
|
error_code=error_code,
|
|
@@ -95,6 +97,7 @@ async def _safe_rollback(session: AsyncSession) -> None:
|
|
|
95
97
|
if not session.in_transaction():
|
|
96
98
|
return
|
|
97
99
|
try:
|
|
98
|
-
|
|
99
|
-
|
|
100
|
+
with anyio.CancelScope(shield=True):
|
|
101
|
+
await session.rollback()
|
|
102
|
+
except BaseException:
|
|
100
103
|
return
|
|
@@ -17,6 +17,8 @@ class RequestLogEntry(DashboardModel):
|
|
|
17
17
|
error_code: str | None = None
|
|
18
18
|
error_message: str | None = None
|
|
19
19
|
tokens: int | None = None
|
|
20
|
+
cached_input_tokens: int | None = None
|
|
21
|
+
reasoning_effort: str | None = None
|
|
20
22
|
cost_usd: float | None = None
|
|
21
23
|
latency_ms: int | None = None
|
|
22
24
|
|
|
@@ -3,7 +3,12 @@ from __future__ import annotations
|
|
|
3
3
|
from datetime import datetime
|
|
4
4
|
from typing import cast
|
|
5
5
|
|
|
6
|
-
from app.core.usage.logs import
|
|
6
|
+
from app.core.usage.logs import (
|
|
7
|
+
RequestLogLike,
|
|
8
|
+
cached_input_tokens_from_log,
|
|
9
|
+
cost_from_log,
|
|
10
|
+
total_tokens_from_log,
|
|
11
|
+
)
|
|
7
12
|
from app.db.models import RequestLog
|
|
8
13
|
from app.modules.request_logs.repository import RequestLogsRepository
|
|
9
14
|
from app.modules.request_logs.schemas import RequestLogEntry
|
|
@@ -70,10 +75,12 @@ def _to_entry(log: RequestLog) -> RequestLogEntry:
|
|
|
70
75
|
account_id=log.account_id,
|
|
71
76
|
request_id=log.request_id,
|
|
72
77
|
model=log.model,
|
|
78
|
+
reasoning_effort=log.reasoning_effort,
|
|
73
79
|
status=_log_status(log),
|
|
74
80
|
error_code=log.error_code,
|
|
75
81
|
error_message=log.error_message,
|
|
76
82
|
tokens=total_tokens_from_log(log_like),
|
|
83
|
+
cached_input_tokens=cached_input_tokens_from_log(log_like),
|
|
77
84
|
cost_usd=cost_from_log(log_like, precision=6),
|
|
78
85
|
latency_ms=log.latency_ms,
|
|
79
86
|
)
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from fastapi import APIRouter, Body, Depends
|
|
4
|
+
|
|
5
|
+
from app.dependencies import SettingsContext, get_settings_context
|
|
6
|
+
from app.modules.settings.schemas import DashboardSettingsResponse, DashboardSettingsUpdateRequest
|
|
7
|
+
from app.modules.settings.service import DashboardSettingsData
|
|
8
|
+
|
|
9
|
+
router = APIRouter(prefix="/api/settings", tags=["dashboard"])
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@router.get("", response_model=DashboardSettingsResponse)
|
|
13
|
+
async def get_settings(
|
|
14
|
+
context: SettingsContext = Depends(get_settings_context),
|
|
15
|
+
) -> DashboardSettingsResponse:
|
|
16
|
+
settings = await context.service.get_settings()
|
|
17
|
+
return DashboardSettingsResponse(
|
|
18
|
+
sticky_threads_enabled=settings.sticky_threads_enabled,
|
|
19
|
+
prefer_earlier_reset_accounts=settings.prefer_earlier_reset_accounts,
|
|
20
|
+
)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
@router.put("", response_model=DashboardSettingsResponse)
|
|
24
|
+
async def update_settings(
|
|
25
|
+
payload: DashboardSettingsUpdateRequest = Body(...),
|
|
26
|
+
context: SettingsContext = Depends(get_settings_context),
|
|
27
|
+
) -> DashboardSettingsResponse:
|
|
28
|
+
updated = await context.service.update_settings(
|
|
29
|
+
DashboardSettingsData(
|
|
30
|
+
sticky_threads_enabled=payload.sticky_threads_enabled,
|
|
31
|
+
prefer_earlier_reset_accounts=payload.prefer_earlier_reset_accounts,
|
|
32
|
+
)
|
|
33
|
+
)
|
|
34
|
+
return DashboardSettingsResponse(
|
|
35
|
+
sticky_threads_enabled=updated.sticky_threads_enabled,
|
|
36
|
+
prefer_earlier_reset_accounts=updated.prefer_earlier_reset_accounts,
|
|
37
|
+
)
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from sqlalchemy.ext.asyncio import AsyncSession
|
|
4
|
+
|
|
5
|
+
from app.db.models import DashboardSettings
|
|
6
|
+
|
|
7
|
+
_SETTINGS_ID = 1
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class SettingsRepository:
|
|
11
|
+
def __init__(self, session: AsyncSession) -> None:
|
|
12
|
+
self._session = session
|
|
13
|
+
|
|
14
|
+
async def get_or_create(self) -> DashboardSettings:
|
|
15
|
+
existing = await self._session.get(DashboardSettings, _SETTINGS_ID)
|
|
16
|
+
if existing is not None:
|
|
17
|
+
return existing
|
|
18
|
+
|
|
19
|
+
row = DashboardSettings(
|
|
20
|
+
id=_SETTINGS_ID,
|
|
21
|
+
sticky_threads_enabled=False,
|
|
22
|
+
prefer_earlier_reset_accounts=False,
|
|
23
|
+
)
|
|
24
|
+
self._session.add(row)
|
|
25
|
+
await self._session.commit()
|
|
26
|
+
await self._session.refresh(row)
|
|
27
|
+
return row
|
|
28
|
+
|
|
29
|
+
async def update(
|
|
30
|
+
self,
|
|
31
|
+
*,
|
|
32
|
+
sticky_threads_enabled: bool,
|
|
33
|
+
prefer_earlier_reset_accounts: bool,
|
|
34
|
+
) -> DashboardSettings:
|
|
35
|
+
settings = await self.get_or_create()
|
|
36
|
+
settings.sticky_threads_enabled = sticky_threads_enabled
|
|
37
|
+
settings.prefer_earlier_reset_accounts = prefer_earlier_reset_accounts
|
|
38
|
+
await self._session.commit()
|
|
39
|
+
await self._session.refresh(settings)
|
|
40
|
+
return settings
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from app.modules.shared.schemas import DashboardModel
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class DashboardSettingsResponse(DashboardModel):
|
|
7
|
+
sticky_threads_enabled: bool
|
|
8
|
+
prefer_earlier_reset_accounts: bool
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class DashboardSettingsUpdateRequest(DashboardModel):
|
|
12
|
+
sticky_threads_enabled: bool
|
|
13
|
+
prefer_earlier_reset_accounts: bool
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
|
|
5
|
+
from app.modules.settings.repository import SettingsRepository
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@dataclass(frozen=True, slots=True)
|
|
9
|
+
class DashboardSettingsData:
|
|
10
|
+
sticky_threads_enabled: bool
|
|
11
|
+
prefer_earlier_reset_accounts: bool
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class SettingsService:
|
|
15
|
+
def __init__(self, repository: SettingsRepository) -> None:
|
|
16
|
+
self._repository = repository
|
|
17
|
+
|
|
18
|
+
async def get_settings(self) -> DashboardSettingsData:
|
|
19
|
+
row = await self._repository.get_or_create()
|
|
20
|
+
return DashboardSettingsData(
|
|
21
|
+
sticky_threads_enabled=row.sticky_threads_enabled,
|
|
22
|
+
prefer_earlier_reset_accounts=row.prefer_earlier_reset_accounts,
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
async def update_settings(self, payload: DashboardSettingsData) -> DashboardSettingsData:
|
|
26
|
+
row = await self._repository.update(
|
|
27
|
+
sticky_threads_enabled=payload.sticky_threads_enabled,
|
|
28
|
+
prefer_earlier_reset_accounts=payload.prefer_earlier_reset_accounts,
|
|
29
|
+
)
|
|
30
|
+
return DashboardSettingsData(
|
|
31
|
+
sticky_threads_enabled=row.sticky_threads_enabled,
|
|
32
|
+
prefer_earlier_reset_accounts=row.prefer_earlier_reset_accounts,
|
|
33
|
+
)
|
app/modules/shared/schemas.py
CHANGED
|
@@ -1,8 +1,22 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
-
from
|
|
3
|
+
from datetime import datetime
|
|
4
|
+
|
|
5
|
+
from pydantic import BaseModel, ConfigDict, field_serializer
|
|
4
6
|
from pydantic.alias_generators import to_camel
|
|
5
7
|
|
|
6
8
|
|
|
7
9
|
class DashboardModel(BaseModel):
|
|
8
|
-
model_config = ConfigDict(
|
|
10
|
+
model_config = ConfigDict(
|
|
11
|
+
alias_generator=to_camel,
|
|
12
|
+
populate_by_name=True,
|
|
13
|
+
ser_json_timedelta="iso8601",
|
|
14
|
+
)
|
|
15
|
+
|
|
16
|
+
@field_serializer("*", when_used="json")
|
|
17
|
+
def serialize_datetime_as_utc(value, _info):
|
|
18
|
+
if isinstance(value, datetime):
|
|
19
|
+
if value.tzinfo is None:
|
|
20
|
+
return value.isoformat() + "Z"
|
|
21
|
+
return value.isoformat().replace("+00:00", "Z")
|
|
22
|
+
return value
|
app/modules/usage/schemas.py
CHANGED
|
@@ -30,6 +30,7 @@ class UsageCost(DashboardModel):
|
|
|
30
30
|
class UsageMetrics(DashboardModel):
|
|
31
31
|
requests_7d: int | None = Field(default=None, alias="requests7d")
|
|
32
32
|
tokens_secondary_window: int | None = None
|
|
33
|
+
cached_tokens_secondary_window: int | None = None
|
|
33
34
|
error_rate_7d: float | None = Field(default=None, alias="errorRate7d")
|
|
34
35
|
top_error: str | None = None
|
|
35
36
|
|
app/modules/usage/service.py
CHANGED
|
@@ -4,7 +4,13 @@ from datetime import timedelta
|
|
|
4
4
|
from typing import cast
|
|
5
5
|
|
|
6
6
|
from app.core import usage as usage_core
|
|
7
|
-
from app.core.usage.logs import
|
|
7
|
+
from app.core.usage.logs import (
|
|
8
|
+
RequestLogLike,
|
|
9
|
+
cached_input_tokens_from_log,
|
|
10
|
+
cost_from_log,
|
|
11
|
+
total_tokens_from_log,
|
|
12
|
+
usage_tokens_from_log,
|
|
13
|
+
)
|
|
8
14
|
from app.core.usage.pricing import CostItem, calculate_costs
|
|
9
15
|
from app.core.usage.types import (
|
|
10
16
|
UsageCostSummary,
|
|
@@ -181,9 +187,11 @@ def _usage_metrics(logs_secondary: list[RequestLog]) -> UsageMetricsSummary:
|
|
|
181
187
|
error_rate = len(error_logs) / total_requests
|
|
182
188
|
top_error = _top_error_code(error_logs)
|
|
183
189
|
tokens_secondary = _sum_tokens(logs_secondary)
|
|
190
|
+
cached_tokens_secondary = _sum_cached_input_tokens(logs_secondary)
|
|
184
191
|
return UsageMetricsSummary(
|
|
185
192
|
requests_7d=total_requests,
|
|
186
193
|
tokens_secondary_window=tokens_secondary,
|
|
194
|
+
cached_tokens_secondary_window=cached_tokens_secondary,
|
|
187
195
|
error_rate_7d=error_rate,
|
|
188
196
|
top_error=top_error,
|
|
189
197
|
)
|
|
@@ -196,6 +204,13 @@ def _sum_tokens(logs: list[RequestLog]) -> int:
|
|
|
196
204
|
return total
|
|
197
205
|
|
|
198
206
|
|
|
207
|
+
def _sum_cached_input_tokens(logs: list[RequestLog]) -> int:
|
|
208
|
+
total = 0
|
|
209
|
+
for log in logs:
|
|
210
|
+
total += cached_input_tokens_from_log(cast(RequestLogLike, log)) or 0
|
|
211
|
+
return total
|
|
212
|
+
|
|
213
|
+
|
|
199
214
|
def _top_error_code(logs: list[RequestLog]) -> str | None:
|
|
200
215
|
counts: dict[str, int] = {}
|
|
201
216
|
for log in logs:
|
|
@@ -242,6 +257,7 @@ def _metrics_summary_to_model(metrics: UsageMetricsSummary) -> UsageMetrics:
|
|
|
242
257
|
return UsageMetrics(
|
|
243
258
|
requests_7d=metrics.requests_7d,
|
|
244
259
|
tokens_secondary_window=metrics.tokens_secondary_window,
|
|
260
|
+
cached_tokens_secondary_window=metrics.cached_tokens_secondary_window,
|
|
245
261
|
error_rate_7d=metrics.error_rate_7d,
|
|
246
262
|
top_error=metrics.top_error,
|
|
247
263
|
)
|
app/modules/usage/updater.py
CHANGED
|
@@ -2,7 +2,9 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
import logging
|
|
4
4
|
import math
|
|
5
|
-
from
|
|
5
|
+
from collections import Counter
|
|
6
|
+
from datetime import datetime
|
|
7
|
+
from typing import Mapping, Protocol
|
|
6
8
|
|
|
7
9
|
from app.core.auth.refresh import RefreshError
|
|
8
10
|
from app.core.clients.usage import UsageFetchError, fetch_usage
|
|
@@ -14,15 +16,31 @@ from app.core.utils.time import utcnow
|
|
|
14
16
|
from app.db.models import Account, AccountStatus, UsageHistory
|
|
15
17
|
from app.modules.accounts.auth_manager import AuthManager
|
|
16
18
|
from app.modules.accounts.repository import AccountsRepository
|
|
17
|
-
from app.modules.usage.repository import UsageRepository
|
|
18
19
|
|
|
19
20
|
logger = logging.getLogger(__name__)
|
|
20
21
|
|
|
21
22
|
|
|
23
|
+
class UsageRepositoryPort(Protocol):
|
|
24
|
+
async def add_entry(
|
|
25
|
+
self,
|
|
26
|
+
account_id: str,
|
|
27
|
+
used_percent: float,
|
|
28
|
+
input_tokens: int | None = None,
|
|
29
|
+
output_tokens: int | None = None,
|
|
30
|
+
recorded_at: datetime | None = None,
|
|
31
|
+
window: str | None = None,
|
|
32
|
+
reset_at: int | None = None,
|
|
33
|
+
window_minutes: int | None = None,
|
|
34
|
+
credits_has: bool | None = None,
|
|
35
|
+
credits_unlimited: bool | None = None,
|
|
36
|
+
credits_balance: float | None = None,
|
|
37
|
+
) -> UsageHistory | None: ...
|
|
38
|
+
|
|
39
|
+
|
|
22
40
|
class UsageUpdater:
|
|
23
41
|
def __init__(
|
|
24
42
|
self,
|
|
25
|
-
usage_repo:
|
|
43
|
+
usage_repo: UsageRepositoryPort,
|
|
26
44
|
accounts_repo: AccountsRepository | None = None,
|
|
27
45
|
) -> None:
|
|
28
46
|
self._usage_repo = usage_repo
|
|
@@ -38,6 +56,7 @@ class UsageUpdater:
|
|
|
38
56
|
if not settings.usage_refresh_enabled:
|
|
39
57
|
return
|
|
40
58
|
|
|
59
|
+
shared_chatgpt_account_ids = _shared_chatgpt_account_ids(accounts)
|
|
41
60
|
now = utcnow()
|
|
42
61
|
interval = settings.usage_refresh_interval_seconds
|
|
43
62
|
for account in accounts:
|
|
@@ -46,11 +65,16 @@ class UsageUpdater:
|
|
|
46
65
|
latest = latest_usage.get(account.id)
|
|
47
66
|
if latest and (now - latest.recorded_at).total_seconds() < interval:
|
|
48
67
|
continue
|
|
68
|
+
usage_account_id = (
|
|
69
|
+
None
|
|
70
|
+
if account.chatgpt_account_id and account.chatgpt_account_id in shared_chatgpt_account_ids
|
|
71
|
+
else account.chatgpt_account_id
|
|
72
|
+
)
|
|
49
73
|
# NOTE: AsyncSession is not safe for concurrent use. Run sequentially
|
|
50
74
|
# within the request-scoped session to avoid PK collisions and
|
|
51
75
|
# flush-time warnings (SAWarning: Session.add during flush).
|
|
52
76
|
try:
|
|
53
|
-
await self._refresh_account(account)
|
|
77
|
+
await self._refresh_account(account, usage_account_id=usage_account_id)
|
|
54
78
|
except Exception as exc:
|
|
55
79
|
logger.warning(
|
|
56
80
|
"Usage refresh failed account_id=%s request_id=%s error=%s",
|
|
@@ -62,12 +86,12 @@ class UsageUpdater:
|
|
|
62
86
|
# swallow per-account failures so the whole refresh loop keeps going
|
|
63
87
|
continue
|
|
64
88
|
|
|
65
|
-
async def _refresh_account(self, account: Account) -> None:
|
|
89
|
+
async def _refresh_account(self, account: Account, *, usage_account_id: str | None) -> None:
|
|
66
90
|
access_token = self._encryptor.decrypt(account.access_token_encrypted)
|
|
67
91
|
try:
|
|
68
92
|
payload = await fetch_usage(
|
|
69
93
|
access_token=access_token,
|
|
70
|
-
account_id=
|
|
94
|
+
account_id=usage_account_id,
|
|
71
95
|
)
|
|
72
96
|
except UsageFetchError as exc:
|
|
73
97
|
if exc.status_code != 401 or not self._auth_manager:
|
|
@@ -80,7 +104,7 @@ class UsageUpdater:
|
|
|
80
104
|
try:
|
|
81
105
|
payload = await fetch_usage(
|
|
82
106
|
access_token=access_token,
|
|
83
|
-
account_id=
|
|
107
|
+
account_id=usage_account_id,
|
|
84
108
|
)
|
|
85
109
|
except UsageFetchError:
|
|
86
110
|
return
|
|
@@ -145,3 +169,8 @@ def _window_minutes(limit_seconds: int | None) -> int | None:
|
|
|
145
169
|
if not limit_seconds or limit_seconds <= 0:
|
|
146
170
|
return None
|
|
147
171
|
return max(1, math.ceil(limit_seconds / 60))
|
|
172
|
+
|
|
173
|
+
|
|
174
|
+
def _shared_chatgpt_account_ids(accounts: list[Account]) -> set[str]:
|
|
175
|
+
counts = Counter(account.chatgpt_account_id for account in accounts if account.chatgpt_account_id)
|
|
176
|
+
return {account_id for account_id, count in counts.items() if count > 1}
|