nucliadb-telemetry 6.9.6.post5456__py3-none-any.whl → 6.10.0.post5758__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -15,7 +15,6 @@
15
15
 
16
16
  import asyncio
17
17
  import time
18
- from typing import List, Optional
19
18
 
20
19
  from opentelemetry.context import ( # type: ignore
21
20
  _SUPPRESS_INSTRUMENTATION_KEY,
@@ -78,7 +77,7 @@ class BatchSpanProcessor(SpanProcessor):
78
77
  self.worker(), name="OtelBatchSpanProcessor"
79
78
  )
80
79
  self.condition = asyncio.Condition()
81
- self._flush_request = None # type: Optional[_FlushRequest]
80
+ self._flush_request: _FlushRequest | None = None
82
81
  self.schedule_delay_millis = schedule_delay_millis
83
82
  self.max_export_batch_size = max_export_batch_size
84
83
  self.max_queue_size = max_queue_size
@@ -87,9 +86,10 @@ class BatchSpanProcessor(SpanProcessor):
87
86
  # flag that indicates that spans are being dropped
88
87
  self._spans_dropped = False
89
88
  # precallocated list to send spans to exporter
90
- self.spans_list: List[Optional[Span]] = [None] * self.max_export_batch_size
89
+ self.spans_list: list[Span | None] = [None] * self.max_export_batch_size
90
+ self.notify_tasks: set[asyncio.Task[None]] = set()
91
91
 
92
- def on_start(self, span: Span, parent_context: Optional[Context] = None) -> None:
92
+ def on_start(self, span: Span, parent_context: Context | None = None) -> None:
93
93
  pass
94
94
 
95
95
  def on_end(self, span: ReadableSpan) -> None:
@@ -115,7 +115,9 @@ class BatchSpanProcessor(SpanProcessor):
115
115
  logger.exception(e)
116
116
 
117
117
  if self.queue.qsize() >= self.max_export_batch_size:
118
- asyncio.create_task(self.notify())
118
+ task = asyncio.create_task(self.notify())
119
+ self.notify_tasks.add(task)
120
+ task.add_done_callback(self.notify_tasks.discard)
119
121
 
120
122
  async def notify(self):
121
123
  async with self.condition:
@@ -135,7 +137,7 @@ class BatchSpanProcessor(SpanProcessor):
135
137
 
136
138
  async def _worker(self) -> None:
137
139
  timeout = self.schedule_delay_millis / 1e3
138
- flush_request = None # type: Optional[_FlushRequest]
140
+ flush_request: _FlushRequest | None = None
139
141
  while not self.done:
140
142
  logger.debug("Waiting condition")
141
143
  async with self.condition:
@@ -187,7 +189,7 @@ class BatchSpanProcessor(SpanProcessor):
187
189
 
188
190
  def _get_and_unset_flush_request(
189
191
  self,
190
- ) -> Optional[_FlushRequest]:
192
+ ) -> _FlushRequest | None:
191
193
  """Returns the current flush request and makes it invisible to the
192
194
  worker thread for subsequent calls.
193
195
  """
@@ -199,7 +201,7 @@ class BatchSpanProcessor(SpanProcessor):
199
201
 
200
202
  @staticmethod
201
203
  def _notify_flush_request_finished(
202
- flush_request: Optional[_FlushRequest],
204
+ flush_request: _FlushRequest | None,
203
205
  ):
204
206
  """Notifies the flush initiator(s) waiting on the given request/event
205
207
  that the flush operation was finished.
@@ -220,7 +222,7 @@ class BatchSpanProcessor(SpanProcessor):
220
222
  self._flush_request = _FlushRequest()
221
223
  return self._flush_request
222
224
 
223
- async def _export(self, flush_request: Optional[_FlushRequest]):
225
+ async def _export(self, flush_request: _FlushRequest | None):
224
226
  """Exports spans considering the given flush_request.
225
227
  In case of a given flush_requests spans are exported in batches until
226
228
  the number of exported spans reached or exceeded the number of spans in
@@ -274,7 +276,7 @@ class BatchSpanProcessor(SpanProcessor):
274
276
  while self.queue.qsize():
275
277
  await self._export_batch()
276
278
 
277
- async def async_force_flush(self, timeout_millis: Optional[int] = None) -> bool:
279
+ async def async_force_flush(self, timeout_millis: int | None = None) -> bool:
278
280
  if timeout_millis is None:
279
281
  timeout_millis = self.export_timeout_millis
280
282
 
@@ -22,16 +22,16 @@
22
22
  # This allows us to leverage context data for both tracing and logs.
23
23
  #
24
24
  import contextvars
25
- from typing import Dict, Optional, Sequence, Union
25
+ from collections.abc import Sequence
26
26
 
27
27
  from opentelemetry.trace import get_current_span
28
28
 
29
29
  from nucliadb_telemetry.settings import telemetry_settings
30
30
 
31
- context_data = contextvars.ContextVar[Optional[Dict[str, str]]]("data", default=None)
31
+ context_data = contextvars.ContextVar[dict[str, str] | None]("data", default=None)
32
32
 
33
33
 
34
- def add_context(new_data: Dict[str, str]):
34
+ def add_context(new_data: dict[str, str]):
35
35
  """
36
36
  This implementation always merges and sets the context, even if is was already set.
37
37
 
@@ -55,23 +55,14 @@ def clear_context():
55
55
  context_data.set({})
56
56
 
57
57
 
58
- def get_context() -> Dict[str, str]:
58
+ def get_context() -> dict[str, str]:
59
59
  return context_data.get() or {}
60
60
 
61
61
 
62
62
  def set_info_on_span(
63
- headers: Dict[
63
+ headers: dict[
64
64
  str,
65
- Union[
66
- str,
67
- bool,
68
- int,
69
- float,
70
- Sequence[str],
71
- Sequence[bool],
72
- Sequence[int],
73
- Sequence[float],
74
- ],
65
+ (str | bool | int | float | Sequence[str] | Sequence[bool] | Sequence[int] | Sequence[float]),
75
66
  ],
76
67
  ):
77
68
  if telemetry_settings.jaeger_enabled:
@@ -18,7 +18,7 @@ import os
18
18
 
19
19
  # abstract advanced error handling into its own module to prevent
20
20
  # code from handling sentry integration everywhere
21
- from typing import Any, ContextManager, List, Literal, Optional
21
+ from typing import Any, ContextManager, Literal
22
22
 
23
23
  import pydantic
24
24
  from pydantic_settings import BaseSettings
@@ -43,7 +43,7 @@ except ImportError: # pragma: no cover
43
43
  SENTRY = False
44
44
 
45
45
 
46
- def capture_exception(error: BaseException) -> Optional[str]:
46
+ def capture_exception(error: BaseException) -> str | None:
47
47
  if SENTRY:
48
48
  return sentry_sdk.capture_exception(error)
49
49
  return None
@@ -51,10 +51,10 @@ def capture_exception(error: BaseException) -> Optional[str]:
51
51
 
52
52
  def capture_message(
53
53
  error_msg: str,
54
- level: Optional[Literal["fatal", "critical", "error", "warning", "info", "debug"]] = None,
55
- scope: Optional[Any] = None,
54
+ level: Literal["fatal", "critical", "error", "warning", "info", "debug"] | None = None,
55
+ scope: Any | None = None,
56
56
  **scope_args: Any,
57
- ) -> Optional[str]:
57
+ ) -> str | None:
58
58
  if SENTRY:
59
59
  return sentry_sdk.capture_message(error_msg, level, scope, **scope_args)
60
60
  return None
@@ -80,7 +80,7 @@ class ErrorHandlingSettings(BaseSettings):
80
80
  zone: str = pydantic.Field(
81
81
  default="local", validation_alias=pydantic.AliasChoices("NUCLIA_ZONE", "ZONE")
82
82
  )
83
- sentry_url: Optional[str] = None
83
+ sentry_url: str | None = None
84
84
  environment: str = pydantic.Field(
85
85
  default="local",
86
86
  validation_alias=pydantic.AliasChoices("environment", "running_environment"),
@@ -111,7 +111,7 @@ def setup_error_handling(version: str) -> None:
111
111
 
112
112
 
113
113
  class SentryHandler(EventHandler):
114
- def __init__(self, allowed_loggers: List[str], *args, **kwargs):
114
+ def __init__(self, allowed_loggers: list[str], *args, **kwargs):
115
115
  super().__init__(*args, **kwargs)
116
116
  self._allowed_loggers = allowed_loggers
117
117
 
@@ -121,7 +121,7 @@ class SentryHandler(EventHandler):
121
121
 
122
122
 
123
123
  class SentryLoggingIntegration(LoggingIntegration):
124
- def __init__(self, allowed_loggers: List[str], level=logging.INFO, event_level=logging.ERROR):
124
+ def __init__(self, allowed_loggers: list[str], level=logging.INFO, event_level=logging.ERROR):
125
125
  self._breadcrumb_handler = BreadcrumbHandler(level=level)
126
126
  self._handler = SentryHandler(allowed_loggers, level=event_level)
127
127
 
@@ -129,7 +129,7 @@ class SentryLoggingIntegration(LoggingIntegration):
129
129
  # Initialize Sentry with the custom logging handler
130
130
 
131
131
 
132
- def setup_sentry_logging_integration(for_loggers: List[str]) -> None:
132
+ def setup_sentry_logging_integration(for_loggers: list[str]) -> None:
133
133
  settings = ErrorHandlingSettings()
134
134
  if settings.sentry_url:
135
135
  sentry_sdk.init(
@@ -13,7 +13,7 @@
13
13
  # limitations under the License.
14
14
  #
15
15
 
16
- from typing import Iterable, List
16
+ from collections.abc import Iterable
17
17
  from urllib.parse import urlparse
18
18
 
19
19
  import prometheus_client
@@ -60,7 +60,7 @@ class ExcludeList:
60
60
 
61
61
  def instrument_app(
62
62
  app: FastAPI,
63
- excluded_urls: List[str],
63
+ excluded_urls: list[str],
64
64
  server_request_hook: ServerRequestHookT = None,
65
65
  tracer_provider=None,
66
66
  metrics=False,
@@ -12,15 +12,14 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
  #
15
- import typing
16
15
  import urllib
16
+ from collections.abc import Callable
17
17
  from functools import wraps
18
- from typing import Callable, Optional, Tuple
19
18
 
20
19
  from asgiref.compatibility import guarantee_single_callable
21
20
  from fastapi import Request, Response
22
21
  from opentelemetry import context, trace
23
- from opentelemetry.instrumentation.asgi.version import __version__ # noqa
22
+ from opentelemetry.instrumentation.asgi.version import __version__
24
23
  from opentelemetry.instrumentation.propagators import get_global_response_propagator
25
24
  from opentelemetry.instrumentation.utils import (
26
25
  _start_internal_or_server_span,
@@ -28,7 +27,7 @@ from opentelemetry.instrumentation.utils import (
28
27
  )
29
28
  from opentelemetry.propagators.textmap import Getter, Setter
30
29
  from opentelemetry.semconv.trace import SpanAttributes
31
- from opentelemetry.trace import Span, format_trace_id, set_span_in_context
30
+ from opentelemetry.trace import INVALID_SPAN, Span, format_trace_id, set_span_in_context
32
31
  from opentelemetry.trace.status import Status, StatusCode
33
32
  from opentelemetry.util.http import (
34
33
  OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SANITIZE_FIELDS,
@@ -42,7 +41,7 @@ from opentelemetry.util.http import (
42
41
  )
43
42
  from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint
44
43
 
45
- ServerRequestHookT = Optional[Callable[[Span, dict], None]]
44
+ ServerRequestHookT = Callable[[Span, dict], None] | None
46
45
 
47
46
 
48
47
  NUCLIA_TRACE_ID_HEADER = "X-NUCLIA-TRACE-ID"
@@ -60,7 +59,7 @@ ACCESS_CONTROL_EXPOSE_HEADER = "Access-Control-Expose-Headers"
60
59
 
61
60
 
62
61
  class ASGIGetter(Getter[dict]):
63
- def get(self, carrier: dict, key: str) -> typing.Optional[typing.List[str]]:
62
+ def get(self, carrier: dict, key: str) -> list[str] | None:
64
63
  """Getter implementation to retrieve a HTTP header value from the ASGI
65
64
  scope.
66
65
 
@@ -86,7 +85,7 @@ class ASGIGetter(Getter[dict]):
86
85
  return None
87
86
  return decoded
88
87
 
89
- def keys(self, carrier: dict) -> typing.List[str]:
88
+ def keys(self, carrier: dict) -> list[str]:
90
89
  headers = carrier.get("headers") or []
91
90
  return [_key.decode("utf8", errors="replace") for (_key, _) in headers]
92
91
 
@@ -227,7 +226,7 @@ def set_status_code(span, status_code):
227
226
  span.set_status(Status(http_status_to_status_code(status_code, server_span=True)))
228
227
 
229
228
 
230
- def get_default_span_details(scope: dict) -> Tuple[str, dict]:
229
+ def get_default_span_details(scope: dict) -> tuple[str, dict]:
231
230
  """
232
231
  Default span name is the HTTP method and URL path, or just the method.
233
232
  https://github.com/open-telemetry/opentelemetry-specification/pull/3165
@@ -360,7 +359,7 @@ class OpenTelemetryMiddleware:
360
359
  class CaptureTraceIdMiddleware(BaseHTTPMiddleware):
361
360
  def capture_trace_id(self, response):
362
361
  span = trace.get_current_span()
363
- if span is None:
362
+ if span is INVALID_SPAN:
364
363
  return
365
364
  trace_id = format_trace_id(span.get_span_context().trace_id)
366
365
  response.headers[NUCLIA_TRACE_ID_HEADER] = trace_id
@@ -377,8 +376,8 @@ class CaptureTraceIdMiddleware(BaseHTTPMiddleware):
377
376
  response = None
378
377
  try:
379
378
  response = await call_next(request)
379
+ return response
380
380
  finally:
381
381
  if response is not None:
382
382
  self.capture_trace_id(response)
383
383
  self.expose_trace_id_header(response)
384
- return response
@@ -12,7 +12,7 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
  #
15
- from typing import List, NamedTuple, Optional, Tuple
15
+ from typing import NamedTuple
16
16
 
17
17
  from fastapi.responses import JSONResponse
18
18
  from starlette.applications import Starlette
@@ -26,7 +26,7 @@ from nucliadb_telemetry import errors
26
26
 
27
27
  class FoundPathTemplate(NamedTuple):
28
28
  path: str
29
- scope: Optional[Scope]
29
+ scope: Scope | None
30
30
  match: bool
31
31
 
32
32
 
@@ -42,7 +42,7 @@ def get_path_template(scope: Scope) -> FoundPathTemplate:
42
42
  return path_template
43
43
 
44
44
 
45
- def find_route(scope: Scope, routes: List[Route]) -> Tuple[Optional[str], Optional[Scope]]:
45
+ def find_route(scope: Scope, routes: list[Route]) -> tuple[str | None, Scope | None]:
46
46
  # we mutate scope, so we need a copy
47
47
  scope = scope.copy() # type:ignore
48
48
  for route in routes:
@@ -15,9 +15,10 @@
15
15
 
16
16
  import functools
17
17
  from collections import OrderedDict
18
+ from collections.abc import Awaitable, Callable
18
19
  from concurrent import futures
19
20
  from contextlib import contextmanager
20
- from typing import Any, Awaitable, Callable, List, Optional, Tuple
21
+ from typing import Any
21
22
 
22
23
  import grpc
23
24
  from grpc import ChannelCredentials, ClientCallDetails, aio
@@ -371,8 +372,8 @@ class GRPCTelemetry:
371
372
  self,
372
373
  server_addr: str,
373
374
  max_send_message: int = 100,
374
- credentials: Optional[ChannelCredentials] = None,
375
- options: Optional[List[Tuple[str, Any]]] = None,
375
+ credentials: ChannelCredentials | None = None,
376
+ options: list[tuple[str, Any]] | None = None,
376
377
  ):
377
378
  options = [
378
379
  ("grpc.max_receive_message_length", max_send_message * 1024 * 1024),
@@ -397,8 +398,8 @@ class GRPCTelemetry:
397
398
  self,
398
399
  concurrency: int = 4,
399
400
  max_receive_message: int = 100,
400
- interceptors: Optional[List[aio.ServerInterceptor]] = None,
401
- options: Optional[List[Tuple[str, Any]]] = None,
401
+ interceptors: list[aio.ServerInterceptor] | None = None,
402
+ options: list[tuple[str, Any]] | None = None,
402
403
  ):
403
404
  _interceptors = (
404
405
  get_server_interceptors(self.service_name, self.tracer_provider)
@@ -14,7 +14,8 @@
14
14
  #
15
15
 
16
16
  import functools
17
- from typing import Any, Awaitable, Callable, Union
17
+ from collections.abc import Awaitable, Callable
18
+ from typing import Any
18
19
 
19
20
  import grpc
20
21
  from grpc import ClientCallDetails, aio
@@ -79,7 +80,7 @@ def finish_metric_grpc(metric: metrics.ObserverRecorder, result):
79
80
  metric.end()
80
81
 
81
82
 
82
- def _to_str(v: Union[str, bytes]) -> str:
83
+ def _to_str(v: str | bytes) -> str:
83
84
  if isinstance(v, str):
84
85
  return v
85
86
  return v.decode("utf-8")
@@ -14,7 +14,8 @@
14
14
  #
15
15
 
16
16
  import functools
17
- from typing import Any, Awaitable, Callable
17
+ from collections.abc import Awaitable, Callable
18
+ from typing import Any
18
19
 
19
20
  from grpc import HandlerCallDetails, RpcMethodHandler
20
21
  from grpc.experimental import ( # type: ignore
@@ -18,7 +18,6 @@ import math
18
18
  import socket
19
19
  from asyncio import Future
20
20
  from functools import partial
21
- from typing import List
22
21
 
23
22
  from opentelemetry.exporter.jaeger.thrift import JaegerExporter
24
23
  from opentelemetry.exporter.jaeger.thrift.gen.agent import Agent
@@ -37,14 +36,14 @@ UDP_PACKET_MAX_LENGTH = 65000
37
36
 
38
37
  class JaegerExporterAsync(JaegerExporter):
39
38
  def __init__(self, **kwags):
40
- super(JaegerExporterAsync, self).__init__(**kwags)
39
+ super().__init__(**kwags)
41
40
  self._agent_client = AgentClientUDPAsync(
42
41
  host_name=self.agent_host_name,
43
42
  port=self.agent_port,
44
43
  split_oversized_batches=self.udp_split_oversized_batches,
45
44
  )
46
45
 
47
- async def async_export(self, spans: List[Span]) -> SpanExportResult:
46
+ async def async_export(self, spans: list[Span]) -> SpanExportResult:
48
47
  # Populate service_name from first span
49
48
  # We restrict any SpanProcessor to be only associated with a single
50
49
  # TracerProvider, so it is safe to assume that all Spans in a single
@@ -13,9 +13,10 @@
13
13
  # limitations under the License.
14
14
  #
15
15
 
16
+ from collections.abc import Awaitable, Callable
16
17
  from datetime import datetime
17
18
  from functools import partial
18
- from typing import Any, Awaitable, Callable, Dict, List, Optional, Union
19
+ from typing import Any
19
20
  from urllib.parse import ParseResult
20
21
 
21
22
  import nats
@@ -61,7 +62,7 @@ msg_sent_counter = metrics.Counter("nuclia_nats_msg_sent", labels={"subject": ""
61
62
 
62
63
 
63
64
  def start_span_message_receiver(tracer: Tracer, msg: Msg):
64
- attributes: dict[str, Union[str, int]] = {
65
+ attributes: dict[str, str | int] = {
65
66
  SpanAttributes.MESSAGING_DESTINATION_KIND: "nats",
66
67
  SpanAttributes.MESSAGING_MESSAGE_PAYLOAD_SIZE_BYTES: len(msg.data),
67
68
  SpanAttributes.MESSAGING_MESSAGE_ID: msg.reply,
@@ -121,18 +122,18 @@ class JetStreamContextTelemetry:
121
122
  async def stream_info(self, name: str):
122
123
  return await self.js.stream_info(name)
123
124
 
124
- async def add_stream(self, name: str, subjects: List[str]):
125
+ async def add_stream(self, name: str, subjects: list[str]):
125
126
  return await self.js.add_stream(name=name, subjects=subjects)
126
127
 
127
128
  async def subscribe(
128
129
  self,
129
130
  subject: str,
130
- queue: Optional[str] = None,
131
- cb: Optional[Callable[[Msg], Awaitable[None]]] = None,
131
+ queue: str | None = None,
132
+ cb: Callable[[Msg], Awaitable[None]] | None = None,
132
133
  **kwargs,
133
134
  ):
134
135
  tracer = self.tracer_provider.get_tracer(f"{self.service_name}_js_subscriber")
135
- wrapped_cb: Optional[Callable[[Msg], Awaitable[None]]]
136
+ wrapped_cb: Callable[[Msg], Awaitable[None]] | None
136
137
  if cb is not None:
137
138
  wrapped_cb = partial(_traced_callback, cb, tracer)
138
139
  else:
@@ -143,7 +144,7 @@ class JetStreamContextTelemetry:
143
144
  self,
144
145
  subject: str,
145
146
  body: bytes,
146
- headers: Optional[Dict[str, str]] = None,
147
+ headers: dict[str, str] | None = None,
147
148
  **kwargs,
148
149
  ):
149
150
  tracer = self.tracer_provider.get_tracer(f"{self.service_name}_js_publisher")
@@ -170,9 +171,9 @@ class JetStreamContextTelemetry:
170
171
  async def pull_subscribe(
171
172
  self,
172
173
  subject: str,
173
- durable: Optional[str] = None,
174
- stream: Optional[str] = None,
175
- config: Optional[nats.js.api.ConsumerConfig] = None,
174
+ durable: str | None = None,
175
+ stream: str | None = None,
176
+ config: nats.js.api.ConsumerConfig | None = None,
176
177
  ) -> JetStreamContext.PullSubscription:
177
178
  return await self.js.pull_subscribe(subject, durable=durable, stream=stream, config=config) # type: ignore
178
179
 
@@ -212,7 +213,7 @@ class JetStreamContextTelemetry:
212
213
  },
213
214
  )
214
215
 
215
- async def consumer_info(self, stream: str, consumer: str, timeout: Optional[float] = None):
216
+ async def consumer_info(self, stream: str, consumer: str, timeout: float | None = None):
216
217
  return await self.js.consumer_info(stream, consumer, timeout)
217
218
 
218
219
 
@@ -226,7 +227,7 @@ class NatsClientTelemetry:
226
227
  self,
227
228
  subject: str,
228
229
  queue: str = "",
229
- cb: Optional[Callable[[Msg], Awaitable[None]]] = None,
230
+ cb: Callable[[Msg], Awaitable[None]] | None = None,
230
231
  **kwargs,
231
232
  ) -> Subscription:
232
233
  tracer = self.tracer_provider.get_tracer(f"{self.service_name}_nc_subscriber")
@@ -253,7 +254,7 @@ class NatsClientTelemetry:
253
254
  subject: str,
254
255
  body: bytes = b"",
255
256
  reply: str = "",
256
- headers: Optional[Dict[str, str]] = None,
257
+ headers: dict[str, str] | None = None,
257
258
  ) -> None:
258
259
  tracer = self.tracer_provider.get_tracer(f"{self.service_name}_nc_publisher")
259
260
  headers = {} if headers is None else headers
@@ -274,7 +275,7 @@ class NatsClientTelemetry:
274
275
  payload: bytes = b"",
275
276
  timeout: float = 0.5,
276
277
  old_style: bool = False,
277
- headers: Optional[Dict[str, Any]] = None,
278
+ headers: dict[str, Any] | None = None,
278
279
  ) -> Msg:
279
280
  headers = {} if headers is None else headers
280
281
  tracer = self.tracer_provider.get_tracer(f"{self.service_name}_nc_request")
@@ -295,7 +296,7 @@ class NatsClientTelemetry:
295
296
  return self.nc.is_connected
296
297
 
297
298
  @property
298
- def connected_url(self) -> Optional[ParseResult]:
299
+ def connected_url(self) -> ParseResult | None:
299
300
  return self.nc.connected_url
300
301
 
301
302
  def jetstream(self, **opts) -> nats.js.JetStreamContext:
@@ -314,7 +315,7 @@ class NatsClientTelemetry:
314
315
  return await self.nc.close()
315
316
 
316
317
 
317
- def get_traced_nats_client(nc: Client, service_name: str) -> Union[Client, NatsClientTelemetry]:
318
+ def get_traced_nats_client(nc: Client, service_name: str) -> Client | NatsClientTelemetry:
318
319
  tracer_provider = get_telemetry(service_name)
319
320
  if tracer_provider is not None:
320
321
  return NatsClientTelemetry(nc, service_name, tracer_provider)
@@ -323,8 +324,8 @@ def get_traced_nats_client(nc: Client, service_name: str) -> Union[Client, NatsC
323
324
 
324
325
 
325
326
  def get_traced_jetstream(
326
- nc: Union[Client, NatsClientTelemetry], service_name: str
327
- ) -> Union[JetStreamContext, JetStreamContextTelemetry]:
327
+ nc: Client | NatsClientTelemetry, service_name: str
328
+ ) -> JetStreamContext | JetStreamContextTelemetry:
328
329
  jetstream = nc.jetstream()
329
330
  tracer_provider = get_telemetry(service_name)
330
331
 
@@ -19,7 +19,7 @@ import os
19
19
  from copy import copy
20
20
  from datetime import datetime, timezone
21
21
  from logging.handlers import RotatingFileHandler
22
- from typing import Any, Dict, Optional
22
+ from typing import Any
23
23
 
24
24
  import orjson
25
25
  import pydantic
@@ -41,40 +41,38 @@ try:
41
41
  except ImportError: # pragma: no cover
42
42
  AccessFormatter = logging.Formatter # type: ignore
43
43
 
44
- _BUILTIN_ATTRS = set(
45
- [
46
- # list of all possible args
47
- "args",
48
- "asctime",
49
- "created",
50
- "exc_info",
51
- "exc_text",
52
- "filename",
53
- "funcName",
54
- "levelname",
55
- "levelno",
56
- "lineno",
57
- "module",
58
- "msecs",
59
- "message",
60
- "msg",
61
- "name",
62
- "pathname",
63
- "process",
64
- "processName",
65
- "relativeCreated",
66
- "stack_info",
67
- "thread",
68
- "threadName",
69
- ]
70
- )
44
+ _BUILTIN_ATTRS = {
45
+ # list of all possible args
46
+ "args",
47
+ "asctime",
48
+ "created",
49
+ "exc_info",
50
+ "exc_text",
51
+ "filename",
52
+ "funcName",
53
+ "levelname",
54
+ "levelno",
55
+ "lineno",
56
+ "module",
57
+ "msecs",
58
+ "message",
59
+ "msg",
60
+ "name",
61
+ "pathname",
62
+ "process",
63
+ "processName",
64
+ "relativeCreated",
65
+ "stack_info",
66
+ "thread",
67
+ "threadName",
68
+ }
71
69
 
72
70
 
73
71
  ACCESS_LOG_FMT = "%(asctime)s.%(msecs)03d - %(client_addr)s - %(request_line)s %(status_code)s"
74
72
  ACCESS_LOG_DATEFMT = "%Y-%m-%d,%H:%M:%S"
75
73
 
76
74
 
77
- def extra_from_record(record) -> Dict[str, Any]:
75
+ def extra_from_record(record) -> dict[str, Any]:
78
76
  return {attr_name: record.__dict__[attr_name] for attr_name in set(record.__dict__) - _BUILTIN_ATTRS}
79
77
 
80
78
 
@@ -84,7 +82,7 @@ class JSONFormatter(logging.Formatter):
84
82
  """
85
83
 
86
84
  def format(self, record: logging.LogRecord) -> str:
87
- extra: Dict[str, Any]
85
+ extra: dict[str, Any]
88
86
  if isinstance(record.msg, dict):
89
87
  extra = record.msg
90
88
  elif isinstance(record.msg, pydantic.BaseModel):
@@ -97,7 +95,7 @@ class JSONFormatter(logging.Formatter):
97
95
 
98
96
  return orjson.dumps(extra, default=repr).decode("utf-8", errors="ignore")
99
97
 
100
- def fill_log_data(self, data: Dict[str, Any], record: logging.LogRecord) -> None:
98
+ def fill_log_data(self, data: dict[str, Any], record: logging.LogRecord) -> None:
101
99
  if "time" not in data:
102
100
  data["time"] = datetime.now(timezone.utc)
103
101
 
@@ -153,7 +151,7 @@ class UvicornAccessFormatter(JSONFormatter):
153
151
  http_version,
154
152
  status_code,
155
153
  ) = recordcopy.args # type: ignore[misc]
156
- request_line = "%s %s HTTP/%s" % (method, full_path, http_version)
154
+ request_line = f"{method} {full_path} HTTP/{http_version}"
157
155
  recordcopy.__dict__.update(
158
156
  {
159
157
  "httpRequest": {
@@ -176,6 +174,7 @@ _default_logger_levels = {
176
174
  # some are too chatty
177
175
  "uvicorn.error": LogLevel.WARNING,
178
176
  "nucliadb_utils.utilities": LogLevel.WARNING,
177
+ "nucliadb.middleware": LogLevel.INFO,
179
178
  }
180
179
 
181
180
 
@@ -218,7 +217,7 @@ def setup_access_logging(settings: LogSettings) -> None:
218
217
  )
219
218
 
220
219
 
221
- def setup_logging(*, settings: Optional[LogSettings] = None) -> None:
220
+ def setup_logging(*, settings: LogSettings | None = None) -> None:
222
221
  if settings is None:
223
222
  settings = LogSettings()
224
223
 
@@ -12,22 +12,12 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
  #
15
- import asyncio
16
15
  import os
17
16
  import time
17
+ from collections.abc import Callable
18
18
  from functools import wraps
19
- from inspect import isasyncgenfunction, isgeneratorfunction
20
- from typing import (
21
- TYPE_CHECKING,
22
- Any,
23
- Callable,
24
- Dict,
25
- List,
26
- Optional,
27
- Type,
28
- TypeVar,
29
- Union,
30
- )
19
+ from inspect import isasyncgenfunction, iscoroutinefunction, isgeneratorfunction
20
+ from typing import TYPE_CHECKING, Any, TypeVar
31
21
 
32
22
  import prometheus_client
33
23
 
@@ -53,9 +43,9 @@ class Observer:
53
43
  self,
54
44
  name: str,
55
45
  *,
56
- error_mappings: Optional[Dict[str, Union[Type[Exception], Type[BaseException]]]] = None,
57
- labels: Optional[Dict[str, str]] = None,
58
- buckets: Optional[List[float]] = None,
46
+ error_mappings: dict[str, type[Exception] | type[BaseException]] | None = None,
47
+ labels: dict[str, str] | None = None,
48
+ buckets: list[float] | None = None,
59
49
  ):
60
50
  self.error_mappings = error_mappings or {}
61
51
  self.labels = labels or {}
@@ -71,7 +61,7 @@ class Observer:
71
61
  self.counter = prometheus_client.Counter(
72
62
  f"{name}_count",
73
63
  f"Number of times {name} was called.",
74
- labelnames=tuple(self.labels.keys()) + (_STATUS_METRIC,),
64
+ labelnames=(*tuple(self.labels.keys()), _STATUS_METRIC),
75
65
  )
76
66
  hist_kwargs = {}
77
67
  if buckets is not None:
@@ -83,9 +73,9 @@ class Observer:
83
73
  **hist_kwargs, # type: ignore
84
74
  )
85
75
 
86
- def wrap(self, labels: Optional[Dict[str, str]] = None) -> Callable[[F], F]:
76
+ def wrap(self, labels: dict[str, str] | None = None) -> Callable[[F], F]:
87
77
  def decorator(func):
88
- if asyncio.iscoroutinefunction(func):
78
+ if iscoroutinefunction(func):
89
79
 
90
80
  @wraps(func)
91
81
  async def inner(*args, **kwargs):
@@ -105,8 +95,7 @@ class Observer:
105
95
  @wraps(func)
106
96
  def inner(*args, **kwargs):
107
97
  with ObserverRecorder(self, labels or {}):
108
- for item in func(*args, **kwargs):
109
- yield item
98
+ yield from func(*args, **kwargs)
110
99
 
111
100
  else:
112
101
 
@@ -119,12 +108,12 @@ class Observer:
119
108
 
120
109
  return decorator
121
110
 
122
- def __call__(self, labels: Optional[Dict[str, str]] = None):
111
+ def __call__(self, labels: dict[str, str] | None = None):
123
112
  return ObserverRecorder(self, labels or {})
124
113
 
125
114
 
126
115
  class ObserverRecorder:
127
- def __init__(self, observer: Observer, label_overrides: Dict[str, str]):
116
+ def __init__(self, observer: Observer, label_overrides: dict[str, str]):
128
117
  self.observer = observer
129
118
  if len(label_overrides) > 0:
130
119
  self.labels = observer.labels.copy()
@@ -155,9 +144,9 @@ class ObserverRecorder:
155
144
 
156
145
  def __exit__(
157
146
  self,
158
- exc_type: Optional[Union[Type[Exception], Type[BaseException]]],
159
- exc_value: Optional[Union[Exception, BaseException]],
160
- traceback: Optional[StackSummary],
147
+ exc_type: type[Exception] | type[BaseException] | None,
148
+ exc_value: Exception | BaseException | None,
149
+ traceback: "StackSummary | None",
161
150
  ):
162
151
  if exc_type is not None:
163
152
  status = ERROR
@@ -171,7 +160,7 @@ class ObserverRecorder:
171
160
 
172
161
 
173
162
  class Gauge:
174
- def __init__(self, name: str, *, labels: Optional[Dict[str, str]] = None):
163
+ def __init__(self, name: str, *, labels: dict[str, str] | None = None):
175
164
  self.labels = labels or {}
176
165
  if _VERSION_ENV_VAR_NAME in os.environ:
177
166
  self.labels[_VERSION_METRIC] = os.environ[_VERSION_ENV_VAR_NAME]
@@ -180,7 +169,7 @@ class Gauge:
180
169
  name, f"Gauge for {name}.", labelnames=tuple(self.labels.keys())
181
170
  )
182
171
 
183
- def set(self, value: Union[float, int], labels: Optional[Dict[str, str]] = None):
172
+ def set(self, value: float | int, labels: dict[str, str] | None = None):
184
173
  merged_labels = self.labels.copy()
185
174
  merged_labels.update(labels or {})
186
175
 
@@ -189,7 +178,7 @@ class Gauge:
189
178
  else:
190
179
  self.gauge.set(value)
191
180
 
192
- def inc(self, value: Union[float, int], labels: Optional[Dict[str, str]] = None):
181
+ def inc(self, value: float | int, labels: dict[str, str] | None = None):
193
182
  merged_labels = self.labels.copy()
194
183
  merged_labels.update(labels or {})
195
184
 
@@ -198,7 +187,7 @@ class Gauge:
198
187
  else:
199
188
  self.gauge.inc(value)
200
189
 
201
- def dec(self, value: Union[float, int], labels: Optional[Dict[str, str]] = None):
190
+ def dec(self, value: float | int, labels: dict[str, str] | None = None):
202
191
  merged_labels = self.labels.copy()
203
192
  merged_labels.update(labels or {})
204
193
 
@@ -207,12 +196,12 @@ class Gauge:
207
196
  else:
208
197
  self.gauge.dec(value)
209
198
 
210
- def remove(self, labels: Dict[str, str]):
199
+ def remove(self, labels: dict[str, str]):
211
200
  self.gauge.remove(*[labels[k] for k in self.labels.keys()])
212
201
 
213
202
 
214
203
  class Counter:
215
- def __init__(self, name: str, *, labels: Optional[Dict[str, str]] = None):
204
+ def __init__(self, name: str, *, labels: dict[str, str] | None = None):
216
205
  self.labels = labels or {}
217
206
  if _VERSION_ENV_VAR_NAME in os.environ:
218
207
  self.labels[_VERSION_METRIC] = os.environ[_VERSION_ENV_VAR_NAME]
@@ -221,7 +210,7 @@ class Counter:
221
210
  name, f"Counter for {name}.", labelnames=tuple(self.labels.keys())
222
211
  )
223
212
 
224
- def inc(self, labels: Optional[Dict[str, str]] = None, value: Union[float, int] = 1):
213
+ def inc(self, labels: dict[str, str] | None = None, value: float | int = 1):
225
214
  merged_labels = self.labels.copy()
226
215
  merged_labels.update(labels or {})
227
216
 
@@ -236,8 +225,8 @@ class Histogram:
236
225
  self,
237
226
  name: str,
238
227
  *,
239
- labels: Optional[Dict[str, str]] = None,
240
- buckets: Optional[List[float]] = None,
228
+ labels: dict[str, str] | None = None,
229
+ buckets: list[float] | None = None,
241
230
  ):
242
231
  self.labels = labels or {}
243
232
  if _VERSION_ENV_VAR_NAME in os.environ:
@@ -253,7 +242,7 @@ class Histogram:
253
242
  **kwargs, # type: ignore
254
243
  )
255
244
 
256
- def observe(self, value: float, labels: Optional[Dict[str, str]] = None):
245
+ def observe(self, value: float, labels: dict[str, str] | None = None):
257
246
  merged_labels = self.labels.copy()
258
247
  merged_labels.update(labels or {})
259
248
  if len(merged_labels) > 0:
@@ -263,11 +252,11 @@ class Histogram:
263
252
 
264
253
 
265
254
  __all__ = (
266
- "Observer",
267
- "ObserverRecorder",
268
- "OK",
269
255
  "ERROR",
256
+ "OK",
270
257
  "Counter",
271
258
  "Gauge",
272
259
  "Histogram",
260
+ "Observer",
261
+ "ObserverRecorder",
273
262
  )
@@ -14,7 +14,6 @@
14
14
  #
15
15
 
16
16
  import enum
17
- from typing import Dict, Optional
18
17
 
19
18
  import pydantic
20
19
  from pydantic_settings import BaseSettings
@@ -53,7 +52,7 @@ class LogFormatType(enum.Enum):
53
52
  class LogSettings(BaseSettings):
54
53
  debug: bool = False
55
54
  log_level: LogLevel = LogLevel.WARNING
56
- logger_levels: Optional[Dict[str, LogLevel]] = None
55
+ logger_levels: dict[str, LogLevel] | None = None
57
56
  log_output_type: LogOutputType = LogOutputType.STDOUT
58
57
  log_format_type: LogFormatType = LogFormatType.STRUCTURED
59
58
 
@@ -1,4 +1,3 @@
1
- # -*- coding: utf-8 -*-
2
1
  # Generated by the protocol buffer compiler. DO NOT EDIT!
3
2
  # source: nucliadb_telemetry/tests/grpc/hellostreamingworld.proto
4
3
  """Generated protocol buffer code."""
@@ -7,7 +7,7 @@ from nucliadb_telemetry.tests.grpc import (
7
7
  )
8
8
 
9
9
 
10
- class MultiGreeterStub(object):
10
+ class MultiGreeterStub:
11
11
  """The greeting service definition."""
12
12
 
13
13
  def __init__(self, channel):
@@ -23,7 +23,7 @@ class MultiGreeterStub(object):
23
23
  )
24
24
 
25
25
 
26
- class MultiGreeterServicer(object):
26
+ class MultiGreeterServicer:
27
27
  """The greeting service definition."""
28
28
 
29
29
  def sayHello(self, request, context):
@@ -48,7 +48,7 @@ def add_MultiGreeterServicer_to_server(servicer, server):
48
48
 
49
49
 
50
50
  # This class is part of an EXPERIMENTAL API.
51
- class MultiGreeter(object):
51
+ class MultiGreeter:
52
52
  """The greeting service definition."""
53
53
 
54
54
  @staticmethod
@@ -1,4 +1,3 @@
1
- # -*- coding: utf-8 -*-
2
1
  # Generated by the protocol buffer compiler. DO NOT EDIT!
3
2
  # source: nucliadb_telemetry/tests/grpc/helloworld.proto
4
3
  """Generated protocol buffer code."""
@@ -7,7 +7,7 @@ from nucliadb_telemetry.tests.grpc import (
7
7
  )
8
8
 
9
9
 
10
- class GreeterStub(object):
10
+ class GreeterStub:
11
11
  """The greeting service definition."""
12
12
 
13
13
  def __init__(self, channel):
@@ -23,7 +23,7 @@ class GreeterStub(object):
23
23
  )
24
24
 
25
25
 
26
- class GreeterServicer(object):
26
+ class GreeterServicer:
27
27
  """The greeting service definition."""
28
28
 
29
29
  def SayHello(self, request, context):
@@ -48,7 +48,7 @@ def add_GreeterServicer_to_server(servicer, server):
48
48
 
49
49
 
50
50
  # This class is part of an EXPERIMENTAL API.
51
- class Greeter(object):
51
+ class Greeter:
52
52
  """The greeting service definition."""
53
53
 
54
54
  @staticmethod
@@ -15,7 +15,7 @@
15
15
 
16
16
  import asyncio
17
17
  import os
18
- from typing import AsyncIterator
18
+ from collections.abc import AsyncIterator
19
19
 
20
20
  import nats
21
21
  import pytest
@@ -62,7 +62,7 @@ class Jaeger(BaseImage):
62
62
  if os.environ.get("TESTING", "") == "jenkins" or "TRAVIS" in os.environ:
63
63
  return port if port else self.port
64
64
  network = self.container_obj.attrs["NetworkSettings"]
65
- service_port = "{0}/udp".format(port if port else self.port)
65
+ service_port = f"{port if port else self.port}/udp"
66
66
  for netport in network["Ports"].keys():
67
67
  if netport == service_port:
68
68
  return network["Ports"][service_port][0]["HostPort"]
@@ -15,7 +15,6 @@
15
15
 
16
16
  import asyncio
17
17
  import time
18
- from typing import Optional
19
18
 
20
19
  from opentelemetry.context import Context # type: ignore
21
20
  from opentelemetry.sdk.trace import ReadableSpan, Span, SpanProcessor, TracerProvider
@@ -43,7 +42,7 @@ class AsyncMultiSpanProcessor(SpanProcessor):
43
42
  def on_start(
44
43
  self,
45
44
  span: Span,
46
- parent_context: Optional[Context] = None,
45
+ parent_context: Context | None = None,
47
46
  ) -> None:
48
47
  for sp in self._span_processors:
49
48
  sp.on_start(span, parent_context=parent_context)
@@ -15,7 +15,6 @@
15
15
 
16
16
  import asyncio
17
17
  import os
18
- from typing import Dict, Optional
19
18
 
20
19
  from opentelemetry.propagate import set_global_textmap
21
20
  from opentelemetry.propagators.b3 import B3MultiFormat
@@ -29,14 +28,14 @@ from nucliadb_telemetry.tracerprovider import (
29
28
  AsyncTracerProvider,
30
29
  )
31
30
 
32
- from .context import set_info_on_span # noqa: F401
31
+ from .context import set_info_on_span
33
32
 
34
33
  set_info_on_span # b/w compatible import
35
34
 
36
- GLOBAL_PROVIDER: Dict[str, AsyncTracerProvider] = {}
35
+ GLOBAL_PROVIDER: dict[str, AsyncTracerProvider] = {}
37
36
 
38
37
 
39
- def get_telemetry(service_name: Optional[str] = None) -> Optional[AsyncTracerProvider]:
38
+ def get_telemetry(service_name: str | None = None) -> AsyncTracerProvider | None:
40
39
  if service_name is None:
41
40
  return None
42
41
  if service_name not in GLOBAL_PROVIDER and service_name is not None:
@@ -47,7 +46,7 @@ def get_telemetry(service_name: Optional[str] = None) -> Optional[AsyncTracerPro
47
46
  return GLOBAL_PROVIDER.get(service_name)
48
47
 
49
48
 
50
- def create_telemetry(service_name: str) -> Optional[AsyncTracerProvider]:
49
+ def create_telemetry(service_name: str) -> AsyncTracerProvider | None:
51
50
  if telemetry_settings.jaeger_enabled is False:
52
51
  return None
53
52
 
@@ -69,7 +68,7 @@ async def clean_telemetry(service_name: str):
69
68
  del GLOBAL_PROVIDER[service_name]
70
69
 
71
70
 
72
- async def init_telemetry(tracer_provider: Optional[AsyncTracerProvider] = None):
71
+ async def init_telemetry(tracer_provider: AsyncTracerProvider | None = None):
73
72
  if tracer_provider is None:
74
73
  return
75
74
 
@@ -107,7 +106,7 @@ async def init_telemetry(tracer_provider: Optional[AsyncTracerProvider] = None):
107
106
  tracer_provider.initialized = True
108
107
 
109
108
 
110
- async def setup_telemetry(service_name: str) -> Optional[AsyncTracerProvider]:
109
+ async def setup_telemetry(service_name: str) -> AsyncTracerProvider | None:
111
110
  """
112
111
  Setup telemetry for a service if it is enabled
113
112
  """
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: nucliadb_telemetry
3
- Version: 6.9.6.post5456
3
+ Version: 6.10.0.post5758
4
4
  Summary: NucliaDB Telemetry Library Python process
5
5
  Author-email: Nuclia <nucliadb@nuclia.com>
6
6
  License-Expression: AGPL-3.0-or-later
@@ -0,0 +1,36 @@
1
+ nucliadb_telemetry/__init__.py,sha256=7Bhx3ctAKKX5SZ5f5hnvSqdnE55LMMV_UdYI8k3TK8M,647
2
+ nucliadb_telemetry/batch_span.py,sha256=QoUF_XGF1XDb-veaOb2F44jtM9i86PwdcFRW4YHBneo,11863
3
+ nucliadb_telemetry/common.py,sha256=IbR_YnpLU66cpMr-iRQY7v0DakSjBXsev8KMcqZlJ2U,972
4
+ nucliadb_telemetry/context.py,sha256=TG-CjWE_N3iMDogkaOilEnYejg20UQuzI5IFp8F8tMA,2133
5
+ nucliadb_telemetry/errors.py,sha256=VqF_0pbUb0rurtX9Cps1e8Wo8MKmL18g-CRVxs0MKTI,4257
6
+ nucliadb_telemetry/grpc.py,sha256=Ya8tQ_KgwJE0170Xfwa1FRpZs4Wqgk4EvkjAFJ_8Tzw,14904
7
+ nucliadb_telemetry/grpc_metrics.py,sha256=HXC9MpN8EZ4JSUljC2M6HF_YgdBKodBq03IuG3DOeSQ,4938
8
+ nucliadb_telemetry/grpc_sentry.py,sha256=XJvziMEvH2pyG10IzDY_1qBXr9jcnxb6S6lQRhJFYko,2436
9
+ nucliadb_telemetry/jaeger.py,sha256=omGwtBUMIbXKACy6uJHTvik6ZDfsnAFCxxzU3KORCLQ,6737
10
+ nucliadb_telemetry/jetstream.py,sha256=jCrJRSB98wtDPm9v8ODNWMNkUVh8M5my5aJgsPj92OY,11741
11
+ nucliadb_telemetry/logs.py,sha256=kjf0WMGbVkch8Jx5hu5dTWh1chsIH2Fgy4NyRZUdhPM,8988
12
+ nucliadb_telemetry/metrics.py,sha256=nhK0Q1nerVPu2jTGrTAjlpv5ln8ozwLnwVqwOO0nkuc,8218
13
+ nucliadb_telemetry/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
14
+ nucliadb_telemetry/settings.py,sha256=ZFPg3vohjkXdMyaWeOB4CMM6yW6BOy7reF4jAm5qHLo,2224
15
+ nucliadb_telemetry/tracerprovider.py,sha256=F0Z_EAoVmSGzYS_upnju41DIjbJbAd1h8kt2cl0ylVc,3463
16
+ nucliadb_telemetry/utils.py,sha256=Wk7lXFlRYpdB_GPLbKXgv4wEyYZgciCSfAMgkIRsnew,4619
17
+ nucliadb_telemetry/fastapi/__init__.py,sha256=oXj0njbw9laK-B-qdvXNQOeO1EzjVrYMmSzx9083X1Y,3124
18
+ nucliadb_telemetry/fastapi/context.py,sha256=O2PhwGDtej0AAIVpU5G3_iRkkbpDetsXHaD8buUtLk8,1280
19
+ nucliadb_telemetry/fastapi/metrics.py,sha256=XgP0EEKWlm-lAnvgFSg0nG8kJzXjCgny-Bt4prmFcvI,3672
20
+ nucliadb_telemetry/fastapi/tracing.py,sha256=PPDNxl5TLwOVGK_GjaPpS902-LRQWRyleNudo0Mhnho,14763
21
+ nucliadb_telemetry/fastapi/utils.py,sha256=w5S-nJIJXN31y87E-kQ8aXX26VopeG3iSlsvPpmYpVI,2697
22
+ nucliadb_telemetry/tests/__init__.py,sha256=A81KxGC8myNIvXEK0ZSsws8bZtNiwtvgh722sOMhgBM,585
23
+ nucliadb_telemetry/tests/telemetry.py,sha256=5H7v9GxYVQXRvogPafVs8jZPfP00S7QRdO6hBeGlATo,12308
24
+ nucliadb_telemetry/tests/grpc/__init__.py,sha256=zG33bUz1rHFPtvqQPWn4rDwBJt3FJodGuQYD45quiQg,583
25
+ nucliadb_telemetry/tests/grpc/hellostreamingworld_pb2.py,sha256=fkqkinsdhqpOlihFYgNiAIHH68Muu3USESDPqWv1Jmk,2318
26
+ nucliadb_telemetry/tests/grpc/hellostreamingworld_pb2.pyi,sha256=4IKe88c3x4pFAANnnprD1ILWskVJjj6rKgwKk64cYjU,1412
27
+ nucliadb_telemetry/tests/grpc/hellostreamingworld_pb2_grpc.py,sha256=a5EvC63hy-A0UGC0lp-y5gygNP6E83G1zXzhUEprcmk,2798
28
+ nucliadb_telemetry/tests/grpc/hellostreamingworld_pb2_grpc.pyi,sha256=jihuN5OoNQzPVMcBUBu_gSNnLZoJZG-i7XCfwbzy-hY,1085
29
+ nucliadb_telemetry/tests/grpc/helloworld_pb2.py,sha256=Uazg5JZc22RFg0KcQbixaXlgSaTMO-_x1AWVcbljtyc,2268
30
+ nucliadb_telemetry/tests/grpc/helloworld_pb2.pyi,sha256=mBZCQE6z7riQw6fOegJgZ5lHmXqO23_zxL8iQMqSqms,1191
31
+ nucliadb_telemetry/tests/grpc/helloworld_pb2_grpc.py,sha256=pBwPy7FoTVON0P0QXyyd-SUNAguAdNEIprdSgURlpOw,2653
32
+ nucliadb_telemetry/tests/grpc/helloworld_pb2_grpc.pyi,sha256=Y6teCx-PhPU-rI6w5ItLBKaTb34FLpngPnuDVWtNve4,958
33
+ nucliadb_telemetry-6.10.0.post5758.dist-info/METADATA,sha256=7kgKD-CSyMConLpcDiNBZKT_EMmameA2VO5F7ZeHvGc,10938
34
+ nucliadb_telemetry-6.10.0.post5758.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
35
+ nucliadb_telemetry-6.10.0.post5758.dist-info/top_level.txt,sha256=3qEHI_5ttqQIL2gkNYwSlKsFyBBiEzDiIy9UKISzOaQ,19
36
+ nucliadb_telemetry-6.10.0.post5758.dist-info/RECORD,,
@@ -1,36 +0,0 @@
1
- nucliadb_telemetry/__init__.py,sha256=7Bhx3ctAKKX5SZ5f5hnvSqdnE55LMMV_UdYI8k3TK8M,647
2
- nucliadb_telemetry/batch_span.py,sha256=vUokuCi0R1BUUfoLo70ss5SMQul3--YRaNsUocvqA8U,11769
3
- nucliadb_telemetry/common.py,sha256=IbR_YnpLU66cpMr-iRQY7v0DakSjBXsev8KMcqZlJ2U,972
4
- nucliadb_telemetry/context.py,sha256=_qVWuJI-WD6f35OhELUBZ538NmlVLYH4GierXAxAx38,2255
5
- nucliadb_telemetry/errors.py,sha256=zrry3ZlJFKdZ-xkzsKQ0SZSz3uLxkEihaCfjv2WRxUk,4288
6
- nucliadb_telemetry/grpc.py,sha256=coc-RyKo_FeSE3uDwBSBwxN4YEoxDE9MsccNbm3STkM,14912
7
- nucliadb_telemetry/grpc_metrics.py,sha256=ZH9c0GMb7psdrnz5T2mCyv0Fj4KXeaW5JY9Tnjx-byU,4924
8
- nucliadb_telemetry/grpc_sentry.py,sha256=GgncQqN4ZCRyatXLbJbxnJNY9H9o9Jx1eWVuqcXSVPY,2409
9
- nucliadb_telemetry/jaeger.py,sha256=BWFJ8-i_7b7UVnRqjxZz_QgyIiq9W1xCCyCsWlNMfAo,6786
10
- nucliadb_telemetry/jetstream.py,sha256=0wkdo78SEY0P2qIvlfTHZELQnMquUHD2oToDL6BzbuM,11803
11
- nucliadb_telemetry/logs.py,sha256=L74RiKvnIFwP7EukkYi_9g9cNNELGBF5KYKih7_WM7M,9080
12
- nucliadb_telemetry/metrics.py,sha256=SjNCl7erLxV1n4i_5WyFEZo-oq3HuCKpjFOkOb7sWZ4,8402
13
- nucliadb_telemetry/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
14
- nucliadb_telemetry/settings.py,sha256=3pskwvZ165lshedP8SJA4GbX2iR17nvKBLgkn1lnBP8,2261
15
- nucliadb_telemetry/tracerprovider.py,sha256=qs21ay00zeBe--DQa2JbUJZ11r0R4TsZiE537i5KRbQ,3494
16
- nucliadb_telemetry/utils.py,sha256=-cYZxcVIHdR5KAzClr6S4HvBl1yez15BGVkKO__TQh0,4682
17
- nucliadb_telemetry/fastapi/__init__.py,sha256=ryR3rUKseYfr6GRfs6-v7u-NTpXw-_v5kYkP71z_Izs,3121
18
- nucliadb_telemetry/fastapi/context.py,sha256=O2PhwGDtej0AAIVpU5G3_iRkkbpDetsXHaD8buUtLk8,1280
19
- nucliadb_telemetry/fastapi/metrics.py,sha256=XgP0EEKWlm-lAnvgFSg0nG8kJzXjCgny-Bt4prmFcvI,3672
20
- nucliadb_telemetry/fastapi/tracing.py,sha256=F10Nr4Dl4-9w17mvzcScuDr3X9smEEFCN4xMK8NEOtk,14802
21
- nucliadb_telemetry/fastapi/utils.py,sha256=FWyr8odUI2U9e3TUP1onX9xzJgdjEhJUexm8NGL7GoI,2729
22
- nucliadb_telemetry/tests/__init__.py,sha256=A81KxGC8myNIvXEK0ZSsws8bZtNiwtvgh722sOMhgBM,585
23
- nucliadb_telemetry/tests/telemetry.py,sha256=34VZcsDP79hHX7qfQL5pcDiuFqfDp4c-XvZv0PMOlXA,12308
24
- nucliadb_telemetry/tests/grpc/__init__.py,sha256=zG33bUz1rHFPtvqQPWn4rDwBJt3FJodGuQYD45quiQg,583
25
- nucliadb_telemetry/tests/grpc/hellostreamingworld_pb2.py,sha256=TqaM_C1EP1Gy_5XQKcuoFOzem4idhAGoWc5t3K-I0zE,2342
26
- nucliadb_telemetry/tests/grpc/hellostreamingworld_pb2.pyi,sha256=4IKe88c3x4pFAANnnprD1ILWskVJjj6rKgwKk64cYjU,1412
27
- nucliadb_telemetry/tests/grpc/hellostreamingworld_pb2_grpc.py,sha256=t0AUJC2DgtMwFZUGs8dGlMlb2k4YmvXgzWRg029-m0k,2822
28
- nucliadb_telemetry/tests/grpc/hellostreamingworld_pb2_grpc.pyi,sha256=jihuN5OoNQzPVMcBUBu_gSNnLZoJZG-i7XCfwbzy-hY,1085
29
- nucliadb_telemetry/tests/grpc/helloworld_pb2.py,sha256=ht4dmi0pAy6qDrwcjkbtSf_hODt08LfexFjtzodPo_I,2292
30
- nucliadb_telemetry/tests/grpc/helloworld_pb2.pyi,sha256=mBZCQE6z7riQw6fOegJgZ5lHmXqO23_zxL8iQMqSqms,1191
31
- nucliadb_telemetry/tests/grpc/helloworld_pb2_grpc.py,sha256=_jxUNxl4Fx-JztK9RO5R6osjNP2sVNVPAxLnmczEYOc,2677
32
- nucliadb_telemetry/tests/grpc/helloworld_pb2_grpc.pyi,sha256=Y6teCx-PhPU-rI6w5ItLBKaTb34FLpngPnuDVWtNve4,958
33
- nucliadb_telemetry-6.9.6.post5456.dist-info/METADATA,sha256=0nwZB4rxBbzb8U75hFIvxtAxSUhY_ag3qQYsgP2GMaQ,10937
34
- nucliadb_telemetry-6.9.6.post5456.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
35
- nucliadb_telemetry-6.9.6.post5456.dist-info/top_level.txt,sha256=3qEHI_5ttqQIL2gkNYwSlKsFyBBiEzDiIy9UKISzOaQ,19
36
- nucliadb_telemetry-6.9.6.post5456.dist-info/RECORD,,