arize-phoenix 11.4.0__py3-none-any.whl → 11.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of arize-phoenix might be problematic. Click here for more details.

Files changed (39) hide show
  1. {arize_phoenix-11.4.0.dist-info → arize_phoenix-11.5.0.dist-info}/METADATA +2 -2
  2. {arize_phoenix-11.4.0.dist-info → arize_phoenix-11.5.0.dist-info}/RECORD +39 -37
  3. phoenix/config.py +51 -2
  4. phoenix/server/api/auth.py +1 -1
  5. phoenix/server/api/queries.py +34 -22
  6. phoenix/server/api/routers/v1/annotation_configs.py +4 -1
  7. phoenix/server/api/routers/v1/datasets.py +3 -1
  8. phoenix/server/api/routers/v1/evaluations.py +3 -1
  9. phoenix/server/api/routers/v1/experiment_runs.py +3 -1
  10. phoenix/server/api/routers/v1/experiments.py +3 -1
  11. phoenix/server/api/routers/v1/projects.py +4 -1
  12. phoenix/server/api/routers/v1/prompts.py +4 -1
  13. phoenix/server/api/routers/v1/spans.py +4 -1
  14. phoenix/server/api/routers/v1/traces.py +4 -1
  15. phoenix/server/api/routers/v1/users.py +2 -2
  16. phoenix/server/app.py +41 -2
  17. phoenix/server/authorization.py +9 -0
  18. phoenix/server/bearer_auth.py +18 -15
  19. phoenix/server/daemons/db_disk_usage_monitor.py +209 -0
  20. phoenix/server/email/sender.py +25 -0
  21. phoenix/server/email/templates/db_disk_usage_notification.html +16 -0
  22. phoenix/server/email/types.py +11 -0
  23. phoenix/server/grpc_server.py +3 -3
  24. phoenix/server/prometheus.py +22 -0
  25. phoenix/server/static/.vite/manifest.json +44 -44
  26. phoenix/server/static/assets/{components-CVcMbu2U.js → components-Bwf6zNbg.js} +175 -176
  27. phoenix/server/static/assets/{index-Dz7I-Hpn.js → index-Bfg9uQ43.js} +2 -2
  28. phoenix/server/static/assets/{pages-QK2o2V7x.js → pages-BCR8hW_l.js} +426 -421
  29. phoenix/server/static/assets/{vendor-pg5m6BWE.js → vendor-DRWIRkSJ.js} +1 -1
  30. phoenix/server/static/assets/{vendor-arizeai-BwMsgSAG.js → vendor-arizeai-DUhQaeau.js} +1 -1
  31. phoenix/server/static/assets/{vendor-codemirror-BwSDEu2g.js → vendor-codemirror-D_6Q6Auv.js} +1 -1
  32. phoenix/server/static/assets/{vendor-recharts-SW3HwAtG.js → vendor-recharts-BNBwj7vz.js} +1 -1
  33. phoenix/server/static/assets/{vendor-shiki-BsdYoDvs.js → vendor-shiki-k1qj_XjP.js} +1 -1
  34. phoenix/server/types.py +7 -0
  35. phoenix/version.py +1 -1
  36. {arize_phoenix-11.4.0.dist-info → arize_phoenix-11.5.0.dist-info}/WHEEL +0 -0
  37. {arize_phoenix-11.4.0.dist-info → arize_phoenix-11.5.0.dist-info}/entry_points.txt +0 -0
  38. {arize_phoenix-11.4.0.dist-info → arize_phoenix-11.5.0.dist-info}/licenses/IP_NOTICE +0 -0
  39. {arize_phoenix-11.4.0.dist-info → arize_phoenix-11.5.0.dist-info}/licenses/LICENSE +0 -0
@@ -43,7 +43,7 @@ from phoenix.server.api.routers.v1.utils import (
43
43
  add_errors_to_responses,
44
44
  )
45
45
  from phoenix.server.api.types.node import from_global_id_with_expected_type
46
- from phoenix.server.authorization import require_admin
46
+ from phoenix.server.authorization import is_not_locked, require_admin
47
47
 
48
48
  logger = logging.getLogger(__name__)
49
49
 
@@ -194,7 +194,7 @@ async def list_users(
194
194
  HTTP_422_UNPROCESSABLE_ENTITY,
195
195
  ]
196
196
  ),
197
- dependencies=[Depends(require_admin)],
197
+ dependencies=[Depends(require_admin), Depends(is_not_locked)],
198
198
  response_model_by_alias=True,
199
199
  response_model_exclude_unset=True,
200
200
  response_model_exclude_defaults=True,
phoenix/server/app.py CHANGED
@@ -16,17 +16,20 @@ from typing import (
16
16
  Any,
17
17
  NamedTuple,
18
18
  Optional,
19
+ Protocol,
19
20
  TypedDict,
20
21
  Union,
21
22
  cast,
22
23
  )
23
24
  from urllib.parse import urlparse
24
25
 
26
+ import grpc
25
27
  import strawberry
26
28
  from fastapi import APIRouter, Depends, FastAPI
27
29
  from fastapi.middleware.cors import CORSMiddleware
28
30
  from fastapi.utils import is_body_allowed_for_status_code
29
31
  from grpc.aio import ServerInterceptor
32
+ from grpc_interceptor import AsyncServerInterceptor
30
33
  from sqlalchemy import select
31
34
  from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession, async_sessionmaker
32
35
  from starlette.datastructures import URL, Secret
@@ -44,7 +47,7 @@ from starlette.types import Scope, StatefulLifespan
44
47
  from strawberry.extensions import SchemaExtension
45
48
  from strawberry.fastapi import GraphQLRouter
46
49
  from strawberry.subscriptions import GRAPHQL_TRANSPORT_WS_PROTOCOL
47
- from typing_extensions import TypeAlias
50
+ from typing_extensions import TypeAlias, override
48
51
 
49
52
  import phoenix.trace.v1 as pb
50
53
  from phoenix.config import (
@@ -134,6 +137,7 @@ from phoenix.server.api.routers import (
134
137
  from phoenix.server.api.routers.v1 import REST_API_VERSION
135
138
  from phoenix.server.api.schema import build_graphql_schema
136
139
  from phoenix.server.bearer_auth import BearerTokenAuthBackend, is_authenticated
140
+ from phoenix.server.daemons.db_disk_usage_monitor import DbDiskUsageMonitor
137
141
  from phoenix.server.daemons.generative_model_store import GenerativeModelStore
138
142
  from phoenix.server.daemons.span_cost_calculator import SpanCostCalculator
139
143
  from phoenix.server.dml_event import DmlEvent
@@ -523,6 +527,7 @@ def _lifespan(
523
527
  trace_data_sweeper: Optional[TraceDataSweeper],
524
528
  span_cost_calculator: SpanCostCalculator,
525
529
  generative_model_store: GenerativeModelStore,
530
+ db_disk_usage_monitor: DbDiskUsageMonitor,
526
531
  token_store: Optional[TokenStore] = None,
527
532
  tracer_provider: Optional["TracerProvider"] = None,
528
533
  enable_prometheus: bool = False,
@@ -530,6 +535,7 @@ def _lifespan(
530
535
  shutdown_callbacks: Iterable[_Callback] = (),
531
536
  read_only: bool = False,
532
537
  scaffolder_config: Optional[ScaffolderConfig] = None,
538
+ grpc_interceptors: Iterable[AsyncServerInterceptor] = (),
533
539
  ) -> StatefulLifespan[FastAPI]:
534
540
  @contextlib.asynccontextmanager
535
541
  async def lifespan(_: FastAPI) -> AsyncIterator[dict[str, Any]]:
@@ -551,7 +557,7 @@ def _lifespan(
551
557
  tracer_provider=tracer_provider,
552
558
  enable_prometheus=enable_prometheus,
553
559
  token_store=token_store,
554
- interceptors=user_grpc_interceptors(),
560
+ interceptors=user_grpc_interceptors() + list(grpc_interceptors),
555
561
  )
556
562
  await stack.enter_async_context(grpc_server)
557
563
  await stack.enter_async_context(dml_event_handler)
@@ -559,6 +565,7 @@ def _lifespan(
559
565
  await stack.enter_async_context(trace_data_sweeper)
560
566
  await stack.enter_async_context(span_cost_calculator)
561
567
  await stack.enter_async_context(generative_model_store)
568
+ await stack.enter_async_context(db_disk_usage_monitor)
562
569
  if scaffolder_config:
563
570
  scaffolder = Scaffolder(
564
571
  config=scaffolder_config,
@@ -826,6 +833,34 @@ async def plain_text_http_exception_handler(request: Request, exc: HTTPException
826
833
  return PlainTextResponse(str(exc.detail), status_code=exc.status_code, headers=headers)
827
834
 
828
835
 
836
+ class _HasDbStatus(Protocol):
837
+ @property
838
+ def should_not_insert_or_update(self) -> bool: ...
839
+
840
+
841
+ class DbDiskUsageInterceptor(AsyncServerInterceptor):
842
+ def __init__(self, db: _HasDbStatus) -> None:
843
+ self._db = db
844
+
845
+ @override
846
+ async def intercept(
847
+ self,
848
+ method: Callable[[Any, grpc.aio.ServicerContext], Awaitable[Any]],
849
+ request_or_iterator: Any,
850
+ context: grpc.aio.ServicerContext,
851
+ method_name: str,
852
+ ) -> Any:
853
+ if (
854
+ method_name.endswith("trace.v1.TraceService/Export")
855
+ and self._db.should_not_insert_or_update
856
+ ):
857
+ await context.abort(
858
+ grpc.StatusCode.RESOURCE_EXHAUSTED,
859
+ "Database disk usage threshold exceeded",
860
+ )
861
+ return await method(request_or_iterator, context)
862
+
863
+
829
864
  def create_app(
830
865
  db: DbSessionFactory,
831
866
  export_path: Path,
@@ -971,6 +1006,8 @@ def create_app(
971
1006
  from phoenix.server.prometheus import PrometheusMiddleware
972
1007
 
973
1008
  middlewares.append(Middleware(PrometheusMiddleware))
1009
+ grpc_interceptors: list[AsyncServerInterceptor] = []
1010
+ grpc_interceptors.append(DbDiskUsageInterceptor(db))
974
1011
  app = FastAPI(
975
1012
  title="Arize-Phoenix REST API",
976
1013
  version=REST_API_VERSION,
@@ -982,6 +1019,8 @@ def create_app(
982
1019
  trace_data_sweeper=trace_data_sweeper,
983
1020
  span_cost_calculator=span_cost_calculator,
984
1021
  generative_model_store=generative_model_store,
1022
+ db_disk_usage_monitor=DbDiskUsageMonitor(db, email_sender),
1023
+ grpc_interceptors=grpc_interceptors,
985
1024
  token_store=token_store,
986
1025
  tracer_provider=tracer_provider,
987
1026
  enable_prometheus=enable_prometheus,
@@ -51,3 +51,12 @@ def require_admin(request: Request) -> None:
51
51
  status_code=fastapi_status.HTTP_403_FORBIDDEN,
52
52
  detail="Only admin or system users can perform this action.",
53
53
  )
54
+
55
+
56
+ def is_not_locked(request: Request) -> None:
57
+ if request.app.state.db.should_not_insert_or_update:
58
+ raise HTTPException(
59
+ status_code=fastapi_status.HTTP_507_INSUFFICIENT_STORAGE,
60
+ detail="Operations that insert or update database "
61
+ "records are currently not allowed.",
62
+ )
@@ -7,10 +7,10 @@ from typing import Any, Optional, cast
7
7
  import grpc
8
8
  from fastapi import HTTPException, Request, WebSocket, WebSocketException
9
9
  from grpc_interceptor import AsyncServerInterceptor
10
- from grpc_interceptor.exceptions import Unauthenticated
11
10
  from starlette.authentication import AuthCredentials, AuthenticationBackend, BaseUser
12
11
  from starlette.requests import HTTPConnection
13
12
  from starlette.status import HTTP_401_UNAUTHORIZED
13
+ from typing_extensions import override
14
14
 
15
15
  from phoenix import config
16
16
  from phoenix.auth import (
@@ -100,16 +100,19 @@ class PhoenixSystemUser(PhoenixUser):
100
100
 
101
101
 
102
102
  class ApiKeyInterceptor(HasTokenStore, AsyncServerInterceptor):
103
+ @override
103
104
  async def intercept(
104
105
  self,
105
- method: Callable[[Any, grpc.ServicerContext], Awaitable[Any]],
106
+ method: Callable[[Any, grpc.aio.ServicerContext], Awaitable[Any]],
106
107
  request_or_iterator: Any,
107
- context: grpc.ServicerContext,
108
+ context: grpc.aio.ServicerContext,
108
109
  method_name: str,
109
110
  ) -> Any:
110
- for datum in context.invocation_metadata():
111
- if datum.key.lower() == "authorization":
112
- scheme, _, token = datum.value.partition(" ")
111
+ for key, value in context.invocation_metadata() or ():
112
+ if key.lower() == "authorization":
113
+ if isinstance(value, bytes):
114
+ value = value.decode("utf-8")
115
+ scheme, _, token = value.partition(" ")
113
116
  if scheme.lower() != "bearer" or not token:
114
117
  break
115
118
  if (
@@ -119,16 +122,16 @@ class ApiKeyInterceptor(HasTokenStore, AsyncServerInterceptor):
119
122
  ):
120
123
  return await method(request_or_iterator, context)
121
124
  claims = await self._token_store.read(Token(token))
122
- if not (isinstance(claims, UserClaimSet) and isinstance(claims.subject, UserId)):
125
+ if (
126
+ not (
127
+ isinstance(claims, (ApiKeyClaims, AccessTokenClaims))
128
+ and isinstance(claims.subject, UserId)
129
+ )
130
+ or claims.status is not ClaimSetStatus.VALID
131
+ ):
123
132
  break
124
- if not isinstance(claims, (ApiKeyClaims, AccessTokenClaims)):
125
- raise Unauthenticated(details="Invalid token")
126
- if claims.status is ClaimSetStatus.EXPIRED:
127
- raise Unauthenticated(details="Expired token")
128
- if claims.status is ClaimSetStatus.VALID:
129
- return await method(request_or_iterator, context)
130
- raise Unauthenticated()
131
- raise Unauthenticated()
133
+ return await method(request_or_iterator, context)
134
+ await context.abort(grpc.StatusCode.UNAUTHENTICATED)
132
135
 
133
136
 
134
137
  async def is_authenticated(
@@ -0,0 +1,209 @@
1
+ from __future__ import annotations
2
+
3
+ import logging
4
+ from asyncio import sleep
5
+ from datetime import datetime, timedelta, timezone
6
+ from typing import Optional
7
+
8
+ import sqlalchemy as sa
9
+ from email_validator import EmailNotValidError, validate_email
10
+ from sqlalchemy import text
11
+ from typing_extensions import assert_never
12
+
13
+ from phoenix.config import (
14
+ get_env_database_allocated_storage_capacity_gibibytes,
15
+ get_env_database_usage_email_warning_threshold_percentage,
16
+ get_env_database_usage_insertion_blocking_threshold_percentage,
17
+ )
18
+ from phoenix.db import models
19
+ from phoenix.db.helpers import SupportedSQLDialect
20
+ from phoenix.server.email.types import DbUsageWarningEmailSender
21
+ from phoenix.server.prometheus import (
22
+ DB_DISK_USAGE_BYTES,
23
+ DB_DISK_USAGE_RATIO,
24
+ DB_DISK_USAGE_WARNING_EMAIL_ERRORS,
25
+ DB_DISK_USAGE_WARNING_EMAILS_SENT,
26
+ DB_INSERTIONS_BLOCKED,
27
+ )
28
+ from phoenix.server.types import DaemonTask, DbSessionFactory
29
+
30
+ logger = logging.getLogger(__name__)
31
+
32
+ _SLEEP_SECONDS = 60
33
+ _EMAIL_FREQUENCY_HOURS = 24
34
+ _BYTES_PER_GIBIBYTE = 1024**3
35
+
36
+
37
+ class DbDiskUsageMonitor(DaemonTask):
38
+ """
39
+ Monitors database disk space usage and triggers warnings/blocking when thresholds are exceeded.
40
+
41
+ This daemon:
42
+ - Periodically checks current database size
43
+ - Compares usage against configured thresholds
44
+ - Sends warning emails to admins when warning threshold is reached
45
+ - Toggles insertion blocking when blocking threshold is reached
46
+ """
47
+
48
+ def __init__(
49
+ self,
50
+ db: DbSessionFactory,
51
+ email_sender: Optional[DbUsageWarningEmailSender] = None,
52
+ ) -> None:
53
+ super().__init__()
54
+ self._db = db
55
+ self._email_sender = email_sender
56
+ # Tracks last email send time per admin email address to prevent spam
57
+ self._last_email_sent: dict[str, datetime] = {}
58
+
59
+ @property
60
+ def _is_disabled(self) -> bool:
61
+ return not bool(
62
+ get_env_database_allocated_storage_capacity_gibibytes()
63
+ and (
64
+ get_env_database_usage_email_warning_threshold_percentage()
65
+ or get_env_database_usage_insertion_blocking_threshold_percentage()
66
+ )
67
+ )
68
+
69
+ async def _run(self) -> None:
70
+ if self._is_disabled:
71
+ return
72
+
73
+ while self._running:
74
+ try:
75
+ current_usage_bytes = await self._check_disk_usage_bytes()
76
+ except Exception:
77
+ logger.exception("Failed to check disk space")
78
+ else:
79
+ DB_DISK_USAGE_BYTES.set(current_usage_bytes)
80
+ current_usage_gibibytes = current_usage_bytes / _BYTES_PER_GIBIBYTE
81
+ try:
82
+ await self._check_thresholds(current_usage_gibibytes)
83
+ except Exception:
84
+ logger.exception("Failed to check database usage thresholds")
85
+ await sleep(_SLEEP_SECONDS)
86
+
87
+ async def _check_disk_usage_bytes(self) -> float:
88
+ if self._db.dialect is SupportedSQLDialect.SQLITE:
89
+ async with self._db() as session:
90
+ page_count = await session.scalar(text("PRAGMA page_count;"))
91
+ freelist_count = await session.scalar(text("PRAGMA freelist_count;"))
92
+ page_size = await session.scalar(text("PRAGMA page_size;"))
93
+ current_usage_bytes = (page_count - freelist_count) * page_size
94
+ elif self._db.dialect is SupportedSQLDialect.POSTGRESQL:
95
+ async with self._db() as session:
96
+ current_usage_bytes = await session.scalar(
97
+ text("SELECT pg_database_size(current_database());")
98
+ )
99
+ else:
100
+ assert_never(self._db.dialect)
101
+ if not isinstance(current_usage_bytes, (int, float)):
102
+ raise TypeError(f"Expected int or float, got {type(current_usage_bytes)}")
103
+ return float(current_usage_bytes)
104
+
105
+ async def _check_thresholds(self, current_usage_gibibytes: float) -> None:
106
+ allocated_capacity_gibibytes = get_env_database_allocated_storage_capacity_gibibytes()
107
+ if not allocated_capacity_gibibytes:
108
+ return
109
+
110
+ used_ratio = current_usage_gibibytes / allocated_capacity_gibibytes
111
+ DB_DISK_USAGE_RATIO.set(used_ratio)
112
+ used_percentage = used_ratio * 100
113
+
114
+ # Check insertion blocking threshold
115
+ if (
116
+ insertion_blocking_threshold_percentage
117
+ := get_env_database_usage_insertion_blocking_threshold_percentage()
118
+ ):
119
+ should_not_insert_or_update = used_percentage > insertion_blocking_threshold_percentage
120
+ self._db.should_not_insert_or_update = should_not_insert_or_update
121
+ DB_INSERTIONS_BLOCKED.set(int(should_not_insert_or_update))
122
+
123
+ # Check warning email threshold
124
+ if (
125
+ notification_threshold_percentage
126
+ := get_env_database_usage_email_warning_threshold_percentage()
127
+ ):
128
+ if used_percentage > notification_threshold_percentage:
129
+ await self._send_warning_emails(
130
+ used_percentage,
131
+ allocated_capacity_gibibytes,
132
+ notification_threshold_percentage,
133
+ )
134
+
135
+ async def _send_warning_emails(
136
+ self,
137
+ used_percentage: float,
138
+ allocated_capacity_gibibytes: float,
139
+ notification_threshold_percentage: float,
140
+ ) -> None:
141
+ if not self._email_sender:
142
+ return
143
+
144
+ current_usage_gibibytes = used_percentage / 100 * allocated_capacity_gibibytes
145
+ stmt = (
146
+ sa.select(models.User.email)
147
+ .join(models.UserRole)
148
+ .where(models.UserRole.name == "ADMIN")
149
+ )
150
+
151
+ try:
152
+ async with self._db() as session:
153
+ admin_emails = (await session.scalars(stmt)).all()
154
+ except Exception:
155
+ logger.exception(
156
+ "Failed to fetch admin emails from database, "
157
+ "skipping database usage warning emails"
158
+ )
159
+ return
160
+
161
+ if not admin_emails:
162
+ return
163
+
164
+ # Validate email addresses
165
+ valid_emails: list[str] = []
166
+
167
+ for email in admin_emails:
168
+ try:
169
+ normalized_email = validate_email(email, check_deliverability=False).normalized
170
+ except EmailNotValidError:
171
+ pass
172
+ else:
173
+ valid_emails.append(normalized_email)
174
+
175
+ if not valid_emails:
176
+ return
177
+
178
+ self._last_email_sent = {
179
+ email: timestamp
180
+ for email, timestamp in self._last_email_sent.items()
181
+ if email in valid_emails
182
+ }
183
+
184
+ now = datetime.now(timezone.utc)
185
+ emails_sent = 0
186
+ send_attempts = 0
187
+
188
+ for email in valid_emails:
189
+ if email in self._last_email_sent and now - self._last_email_sent[email] < timedelta(
190
+ hours=_EMAIL_FREQUENCY_HOURS
191
+ ):
192
+ continue
193
+ send_attempts += 1
194
+ try:
195
+ await self._email_sender.send_db_usage_warning_email(
196
+ email=email,
197
+ current_usage_gibibytes=current_usage_gibibytes,
198
+ allocated_storage_gibibytes=allocated_capacity_gibibytes,
199
+ notification_threshold_percentage=notification_threshold_percentage,
200
+ )
201
+ except Exception:
202
+ logger.exception(f"Failed to send database usage warning email to {email}")
203
+ # Count email send errors
204
+ DB_DISK_USAGE_WARNING_EMAIL_ERRORS.inc()
205
+ else:
206
+ self._last_email_sent[email] = now
207
+ emails_sent += 1
208
+ # Count successful warning email sends
209
+ DB_DISK_USAGE_WARNING_EMAILS_SENT.inc()
@@ -81,6 +81,31 @@ class SimpleEmailSender:
81
81
 
82
82
  await to_thread.run_sync(self._send_email, msg)
83
83
 
84
+ async def send_db_usage_warning_email(
85
+ self,
86
+ email: str,
87
+ current_usage_gibibytes: float,
88
+ allocated_storage_gibibytes: float,
89
+ notification_threshold_percentage: float,
90
+ ) -> None:
91
+ subject = "[Phoenix] Database Usage Threshold Exceeded"
92
+ template_name = "db_disk_usage_notification.html"
93
+
94
+ template = self.env.get_template(template_name)
95
+ html_content = template.render(
96
+ current_usage_gibibytes=current_usage_gibibytes,
97
+ allocated_storage_gibibytes=allocated_storage_gibibytes,
98
+ notification_threshold_percentage=notification_threshold_percentage,
99
+ )
100
+
101
+ msg = EmailMessage()
102
+ msg["Subject"] = subject
103
+ msg["From"] = self.sender_email
104
+ msg["To"] = email
105
+ msg.set_content(html_content, subtype="html")
106
+
107
+ await to_thread.run_sync(self._send_email, msg)
108
+
84
109
  def _send_email(self, msg: EmailMessage) -> None:
85
110
  context: ssl.SSLContext
86
111
  if self.validate_certs:
@@ -0,0 +1,16 @@
1
+ <!DOCTYPE html>
2
+ <html>
3
+ <head>
4
+ <meta charset="UTF-8" />
5
+ <title>Database Usage Notification</title>
6
+ </head>
7
+ <body>
8
+ <h1>Database Usage Notification</h1>
9
+ <p>Your Phoenix database usage has exceeded the notification threshold.</p>
10
+ <p><strong>Current Usage:</strong> {{ current_usage_gibibytes|round(1) }} GiB</p>
11
+ <p><strong>Allocated Storage:</strong> {{ allocated_storage_gibibytes|round(1) }} GiB</p>
12
+ <p><strong>Usage Percentage:</strong> {{ ((current_usage_gibibytes / allocated_storage_gibibytes) * 100)|round(1) }}%</p>
13
+ <p><strong>Notification Threshold:</strong> {{ notification_threshold_percentage }}%</p>
14
+ <p>Please consider removing old data or increasing your storage allocation to prevent interruption.</p>
15
+ </body>
16
+ </html>
@@ -19,8 +19,19 @@ class PasswordResetEmailSender(Protocol):
19
19
  ) -> None: ...
20
20
 
21
21
 
22
+ class DbUsageWarningEmailSender(Protocol):
23
+ async def send_db_usage_warning_email(
24
+ self,
25
+ email: str,
26
+ current_usage_gibibytes: float,
27
+ allocated_storage_gibibytes: float,
28
+ notification_threshold_percentage: float,
29
+ ) -> None: ...
30
+
31
+
22
32
  class EmailSender(
23
33
  WelcomeEmailSender,
24
34
  PasswordResetEmailSender,
35
+ DbUsageWarningEmailSender,
25
36
  Protocol,
26
37
  ): ...
@@ -1,5 +1,5 @@
1
1
  from collections.abc import Awaitable, Callable
2
- from typing import TYPE_CHECKING, Any, Optional
2
+ from typing import TYPE_CHECKING, Any, Iterable, Optional
3
3
 
4
4
  import grpc
5
5
  from grpc.aio import RpcContext, Server, ServerInterceptor
@@ -61,7 +61,7 @@ class GrpcServer:
61
61
  enable_prometheus: bool = False,
62
62
  disabled: bool = False,
63
63
  token_store: Optional[CanReadToken] = None,
64
- interceptors: list[ServerInterceptor] = [],
64
+ interceptors: Iterable[ServerInterceptor] = (),
65
65
  ) -> None:
66
66
  self._callback = callback
67
67
  self._server: Optional[Server] = None
@@ -69,7 +69,7 @@ class GrpcServer:
69
69
  self._enable_prometheus = enable_prometheus
70
70
  self._disabled = disabled
71
71
  self._token_store = token_store
72
- self._interceptors = interceptors
72
+ self._interceptors = list(interceptors)
73
73
 
74
74
  async def __aenter__(self) -> None:
75
75
  interceptors = self._interceptors
@@ -73,6 +73,28 @@ JWT_STORE_API_KEYS_ACTIVE = Gauge(
73
73
  documentation="Current number of API keys in the JWT store",
74
74
  )
75
75
 
76
+ DB_DISK_USAGE_BYTES = Gauge(
77
+ name="database_disk_usage_bytes",
78
+ documentation="Current database disk usage in bytes",
79
+ )
80
+ DB_DISK_USAGE_RATIO = Gauge(
81
+ name="database_disk_usage_ratio",
82
+ documentation="Current database disk usage as ratio of allocated capacity (0-1)",
83
+ )
84
+ DB_INSERTIONS_BLOCKED = Gauge(
85
+ name="database_insertions_blocked",
86
+ documentation="Whether database insertions are currently blocked due to disk usage "
87
+ "(1 = blocked, 0 = not blocked)",
88
+ )
89
+ DB_DISK_USAGE_WARNING_EMAILS_SENT = Counter(
90
+ name="database_disk_usage_warning_emails_sent_total",
91
+ documentation="Total count of database disk usage warning emails sent",
92
+ )
93
+ DB_DISK_USAGE_WARNING_EMAIL_ERRORS = Counter(
94
+ name="database_disk_usage_warning_email_errors_total",
95
+ documentation="Total count of database disk usage warning email send errors",
96
+ )
97
+
76
98
 
77
99
  class PrometheusMiddleware(BaseHTTPMiddleware):
78
100
  async def dispatch(self, request: Request, call_next: RequestResponseEndpoint) -> Response:
@@ -1,67 +1,67 @@
1
1
  {
2
- "_components-CVcMbu2U.js": {
3
- "file": "assets/components-CVcMbu2U.js",
2
+ "_components-Bwf6zNbg.js": {
3
+ "file": "assets/components-Bwf6zNbg.js",
4
4
  "name": "components",
5
5
  "imports": [
6
- "_vendor-pg5m6BWE.js",
7
- "_pages-QK2o2V7x.js",
8
- "_vendor-arizeai-BwMsgSAG.js",
9
- "_vendor-codemirror-BwSDEu2g.js",
6
+ "_vendor-DRWIRkSJ.js",
7
+ "_pages-BCR8hW_l.js",
8
+ "_vendor-arizeai-DUhQaeau.js",
9
+ "_vendor-codemirror-D_6Q6Auv.js",
10
10
  "_vendor-three-C5WAXd5r.js"
11
11
  ]
12
12
  },
13
- "_pages-QK2o2V7x.js": {
14
- "file": "assets/pages-QK2o2V7x.js",
13
+ "_pages-BCR8hW_l.js": {
14
+ "file": "assets/pages-BCR8hW_l.js",
15
15
  "name": "pages",
16
16
  "imports": [
17
- "_vendor-pg5m6BWE.js",
18
- "_vendor-arizeai-BwMsgSAG.js",
19
- "_components-CVcMbu2U.js",
20
- "_vendor-codemirror-BwSDEu2g.js",
21
- "_vendor-recharts-SW3HwAtG.js"
17
+ "_vendor-DRWIRkSJ.js",
18
+ "_vendor-arizeai-DUhQaeau.js",
19
+ "_components-Bwf6zNbg.js",
20
+ "_vendor-codemirror-D_6Q6Auv.js",
21
+ "_vendor-recharts-BNBwj7vz.js"
22
+ ]
23
+ },
24
+ "_vendor-DRWIRkSJ.js": {
25
+ "file": "assets/vendor-DRWIRkSJ.js",
26
+ "name": "vendor",
27
+ "imports": [
28
+ "_vendor-three-C5WAXd5r.js"
29
+ ],
30
+ "css": [
31
+ "assets/vendor-WIZid84E.css"
22
32
  ]
23
33
  },
24
34
  "_vendor-WIZid84E.css": {
25
35
  "file": "assets/vendor-WIZid84E.css",
26
36
  "src": "_vendor-WIZid84E.css"
27
37
  },
28
- "_vendor-arizeai-BwMsgSAG.js": {
29
- "file": "assets/vendor-arizeai-BwMsgSAG.js",
38
+ "_vendor-arizeai-DUhQaeau.js": {
39
+ "file": "assets/vendor-arizeai-DUhQaeau.js",
30
40
  "name": "vendor-arizeai",
31
41
  "imports": [
32
- "_vendor-pg5m6BWE.js"
42
+ "_vendor-DRWIRkSJ.js"
33
43
  ]
34
44
  },
35
- "_vendor-codemirror-BwSDEu2g.js": {
36
- "file": "assets/vendor-codemirror-BwSDEu2g.js",
45
+ "_vendor-codemirror-D_6Q6Auv.js": {
46
+ "file": "assets/vendor-codemirror-D_6Q6Auv.js",
37
47
  "name": "vendor-codemirror",
38
48
  "imports": [
39
- "_vendor-pg5m6BWE.js",
40
- "_vendor-shiki-BsdYoDvs.js"
41
- ]
42
- },
43
- "_vendor-pg5m6BWE.js": {
44
- "file": "assets/vendor-pg5m6BWE.js",
45
- "name": "vendor",
46
- "imports": [
47
- "_vendor-three-C5WAXd5r.js"
48
- ],
49
- "css": [
50
- "assets/vendor-WIZid84E.css"
49
+ "_vendor-DRWIRkSJ.js",
50
+ "_vendor-shiki-k1qj_XjP.js"
51
51
  ]
52
52
  },
53
- "_vendor-recharts-SW3HwAtG.js": {
54
- "file": "assets/vendor-recharts-SW3HwAtG.js",
53
+ "_vendor-recharts-BNBwj7vz.js": {
54
+ "file": "assets/vendor-recharts-BNBwj7vz.js",
55
55
  "name": "vendor-recharts",
56
56
  "imports": [
57
- "_vendor-pg5m6BWE.js"
57
+ "_vendor-DRWIRkSJ.js"
58
58
  ]
59
59
  },
60
- "_vendor-shiki-BsdYoDvs.js": {
61
- "file": "assets/vendor-shiki-BsdYoDvs.js",
60
+ "_vendor-shiki-k1qj_XjP.js": {
61
+ "file": "assets/vendor-shiki-k1qj_XjP.js",
62
62
  "name": "vendor-shiki",
63
63
  "imports": [
64
- "_vendor-pg5m6BWE.js"
64
+ "_vendor-DRWIRkSJ.js"
65
65
  ]
66
66
  },
67
67
  "_vendor-three-C5WAXd5r.js": {
@@ -69,19 +69,19 @@
69
69
  "name": "vendor-three"
70
70
  },
71
71
  "index.tsx": {
72
- "file": "assets/index-Dz7I-Hpn.js",
72
+ "file": "assets/index-Bfg9uQ43.js",
73
73
  "name": "index",
74
74
  "src": "index.tsx",
75
75
  "isEntry": true,
76
76
  "imports": [
77
- "_vendor-pg5m6BWE.js",
78
- "_vendor-arizeai-BwMsgSAG.js",
79
- "_pages-QK2o2V7x.js",
80
- "_components-CVcMbu2U.js",
77
+ "_vendor-DRWIRkSJ.js",
78
+ "_vendor-arizeai-DUhQaeau.js",
79
+ "_pages-BCR8hW_l.js",
80
+ "_components-Bwf6zNbg.js",
81
81
  "_vendor-three-C5WAXd5r.js",
82
- "_vendor-codemirror-BwSDEu2g.js",
83
- "_vendor-shiki-BsdYoDvs.js",
84
- "_vendor-recharts-SW3HwAtG.js"
82
+ "_vendor-codemirror-D_6Q6Auv.js",
83
+ "_vendor-shiki-k1qj_XjP.js",
84
+ "_vendor-recharts-BNBwj7vz.js"
85
85
  ]
86
86
  }
87
87
  }