payi 0.1.0a96__py3-none-any.whl → 0.1.0a98__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of payi might be problematic. Click here for more details.

payi/_models.py CHANGED
@@ -2,9 +2,10 @@ from __future__ import annotations
2
2
 
3
3
  import os
4
4
  import inspect
5
- from typing import TYPE_CHECKING, Any, Type, Union, Generic, TypeVar, Callable, cast
5
+ from typing import TYPE_CHECKING, Any, Type, Union, Generic, TypeVar, Callable, Optional, cast
6
6
  from datetime import date, datetime
7
7
  from typing_extensions import (
8
+ List,
8
9
  Unpack,
9
10
  Literal,
10
11
  ClassVar,
@@ -366,7 +367,7 @@ def _construct_field(value: object, field: FieldInfo, key: str) -> object:
366
367
  if type_ is None:
367
368
  raise RuntimeError(f"Unexpected field type is None for {key}")
368
369
 
369
- return construct_type(value=value, type_=type_)
370
+ return construct_type(value=value, type_=type_, metadata=getattr(field, "metadata", None))
370
371
 
371
372
 
372
373
  def is_basemodel(type_: type) -> bool:
@@ -420,7 +421,7 @@ def construct_type_unchecked(*, value: object, type_: type[_T]) -> _T:
420
421
  return cast(_T, construct_type(value=value, type_=type_))
421
422
 
422
423
 
423
- def construct_type(*, value: object, type_: object) -> object:
424
+ def construct_type(*, value: object, type_: object, metadata: Optional[List[Any]] = None) -> object:
424
425
  """Loose coercion to the expected type with construction of nested values.
425
426
 
426
427
  If the given value does not match the expected type then it is returned as-is.
@@ -438,8 +439,10 @@ def construct_type(*, value: object, type_: object) -> object:
438
439
  type_ = type_.__value__ # type: ignore[unreachable]
439
440
 
440
441
  # unwrap `Annotated[T, ...]` -> `T`
441
- if is_annotated_type(type_):
442
- meta: tuple[Any, ...] = get_args(type_)[1:]
442
+ if metadata is not None:
443
+ meta: tuple[Any, ...] = tuple(metadata)
444
+ elif is_annotated_type(type_):
445
+ meta = get_args(type_)[1:]
443
446
  type_ = extract_type_arg(type_, 0)
444
447
  else:
445
448
  meta = tuple()
payi/_version.py CHANGED
@@ -1,4 +1,4 @@
1
1
  # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
2
2
 
3
3
  __title__ = "payi"
4
- __version__ = "0.1.0-alpha.96" # x-release-please-version
4
+ __version__ = "0.1.0-alpha.98" # x-release-please-version
@@ -9,9 +9,13 @@ from payi.lib.helpers import PayiCategories
9
9
  from payi.types.ingest_units_params import Units
10
10
 
11
11
  from .instrument import _ChunkResult, _IsStreaming, _StreamingType, _ProviderRequest, _PayiInstrumentor
12
+ from .version_helper import get_version_helper
12
13
 
13
14
 
14
15
  class AnthropicInstrumentor:
16
+ _module_name: str = "anthropic"
17
+ _module_version: str = ""
18
+
15
19
  @staticmethod
16
20
  def is_vertex(instance: Any) -> bool:
17
21
  from anthropic import AnthropicVertex, AsyncAnthropicVertex # type: ignore # noqa: I001
@@ -27,7 +31,7 @@ class AnthropicInstrumentor:
27
31
  @staticmethod
28
32
  def instrument(instrumentor: _PayiInstrumentor) -> None:
29
33
  try:
30
- import anthropic # type: ignore # noqa: F401 I001
34
+ AnthropicInstrumentor._module_version = get_version_helper(AnthropicInstrumentor._module_name)
31
35
 
32
36
  wrap_function_wrapper(
33
37
  "anthropic.resources.messages",
@@ -149,6 +153,8 @@ class _AnthropicProviderRequest(_ProviderRequest):
149
153
  instrumentor=instrumentor,
150
154
  category=category,
151
155
  streaming_type=streaming_type,
156
+ module_name=AnthropicInstrumentor._module_name,
157
+ module_version=AnthropicInstrumentor._module_version,
152
158
  )
153
159
 
154
160
  @override
@@ -11,15 +11,21 @@ from payi.types.ingest_units_params import Units
11
11
  from payi.types.pay_i_common_models_api_router_header_info_param import PayICommonModelsAPIRouterHeaderInfoParam
12
12
 
13
13
  from .instrument import _ChunkResult, _IsStreaming, _StreamingType, _ProviderRequest, _PayiInstrumentor
14
+ from .version_helper import get_version_helper
14
15
 
15
16
 
16
17
  class BedrockInstrumentor:
18
+ _module_name: str = "boto3"
19
+ _module_version: str = ""
20
+
17
21
  _instrumentor: _PayiInstrumentor
18
22
 
19
23
  @staticmethod
20
24
  def instrument(instrumentor: _PayiInstrumentor) -> None:
21
25
  BedrockInstrumentor._instrumentor = instrumentor
22
26
 
27
+ BedrockInstrumentor._module_version = get_version_helper(BedrockInstrumentor._module_name)
28
+
23
29
  try:
24
30
  wrap_function_wrapper(
25
31
  "botocore.client",
@@ -234,6 +240,8 @@ class _BedrockProviderRequest(_ProviderRequest):
234
240
  instrumentor=instrumentor,
235
241
  category=PayiCategories.aws_bedrock,
236
242
  streaming_type=_StreamingType.iterator,
243
+ module_name=BedrockInstrumentor._module_name,
244
+ module_version=BedrockInstrumentor._module_version,
237
245
  is_aws_client=True,
238
246
  )
239
247
 
@@ -5,12 +5,18 @@ from wrapt import wrap_function_wrapper # type: ignore
5
5
 
6
6
  from .instrument import _ChunkResult, _IsStreaming, _PayiInstrumentor
7
7
  from .VertexRequest import _VertexRequest
8
+ from .version_helper import get_version_helper
8
9
 
9
10
 
10
11
  class GoogleGenAiInstrumentor:
12
+ _module_name: str = "google-genai"
13
+ _module_version: str = ""
14
+
11
15
  @staticmethod
12
16
  def instrument(instrumentor: _PayiInstrumentor) -> None:
13
17
  try:
18
+ GoogleGenAiInstrumentor._module_version = get_version_helper(GoogleGenAiInstrumentor._module_name)
19
+
14
20
  wrap_function_wrapper(
15
21
  "google.genai.models",
16
22
  "Models.generate_content",
@@ -115,6 +121,8 @@ class _GoogleGenAiRequest(_VertexRequest):
115
121
  def __init__(self, instrumentor: _PayiInstrumentor):
116
122
  super().__init__(
117
123
  instrumentor=instrumentor,
124
+ module_name=GoogleGenAiInstrumentor._module_name,
125
+ module_version=GoogleGenAiInstrumentor._module_version,
118
126
  )
119
127
  self._prompt_character_count = 0
120
128
  self._candidates_character_count = 0
@@ -10,9 +10,13 @@ from payi.lib.helpers import PayiCategories, PayiHeaderNames
10
10
  from payi.types.ingest_units_params import Units
11
11
 
12
12
  from .instrument import _ChunkResult, _IsStreaming, _StreamingType, _ProviderRequest, _PayiInstrumentor
13
+ from .version_helper import get_version_helper
13
14
 
14
15
 
15
16
  class OpenAiInstrumentor:
17
+ _module_name: str = "openai"
18
+ _module_version: str = ""
19
+
16
20
  @staticmethod
17
21
  def is_azure(instance: Any) -> bool:
18
22
  from openai import AzureOpenAI, AsyncAzureOpenAI # type: ignore # noqa: I001
@@ -22,6 +26,8 @@ class OpenAiInstrumentor:
22
26
  @staticmethod
23
27
  def instrument(instrumentor: _PayiInstrumentor) -> None:
24
28
  try:
29
+ OpenAiInstrumentor._module_version = get_version_helper(OpenAiInstrumentor._module_name)
30
+
25
31
  wrap_function_wrapper(
26
32
  "openai.resources.chat.completions",
27
33
  "Completions.create",
@@ -187,6 +193,8 @@ class _OpenAiProviderRequest(_ProviderRequest):
187
193
  instrumentor=instrumentor,
188
194
  category=PayiCategories.openai,
189
195
  streaming_type=_StreamingType.iterator,
196
+ module_name=OpenAiInstrumentor._module_name,
197
+ module_version=OpenAiInstrumentor._module_version,
190
198
  )
191
199
  self._input_tokens_key = input_tokens_key
192
200
  self._output_tokens_key = output_tokens_key
@@ -5,12 +5,18 @@ from wrapt import wrap_function_wrapper # type: ignore
5
5
 
6
6
  from .instrument import _ChunkResult, _IsStreaming, _PayiInstrumentor
7
7
  from .VertexRequest import _VertexRequest
8
+ from .version_helper import get_version_helper
8
9
 
9
10
 
10
11
  class VertexInstrumentor:
12
+ _module_name: str = "google-cloud-aiplatform"
13
+ _module_version: str = ""
14
+
11
15
  @staticmethod
12
16
  def instrument(instrumentor: _PayiInstrumentor) -> None:
13
17
  try:
18
+ VertexInstrumentor._module_version = get_version_helper(VertexInstrumentor._module_name)
19
+
14
20
  wrap_function_wrapper(
15
21
  "vertexai.generative_models",
16
22
  "GenerativeModel.generate_content",
@@ -85,6 +91,8 @@ class _GoogleVertexRequest(_VertexRequest):
85
91
  def __init__(self, instrumentor: _PayiInstrumentor):
86
92
  super().__init__(
87
93
  instrumentor=instrumentor,
94
+ module_name=VertexInstrumentor._module_name,
95
+ module_version=VertexInstrumentor._module_version,
88
96
  )
89
97
  self._prompt_character_count = 0
90
98
  self._candidates_character_count = 0
payi/lib/VertexRequest.py CHANGED
@@ -10,11 +10,20 @@ from .instrument import _ChunkResult, _StreamingType, _ProviderRequest, _PayiIns
10
10
 
11
11
 
12
12
  class _VertexRequest(_ProviderRequest): # type: ignore
13
- def __init__(self, instrumentor: _PayiInstrumentor):
13
+ KNOWN_MODALITIES = ("VIDEO", "AUDIO", "TEXT", "VISION", "IMAGE")
14
+
15
+ def __init__(
16
+ self,
17
+ instrumentor: _PayiInstrumentor,
18
+ module_name: str,
19
+ module_version: str
20
+ ) -> None:
14
21
  super().__init__(
15
22
  instrumentor=instrumentor,
16
23
  category=PayiCategories.google_vertex,
17
24
  streaming_type=_StreamingType.generator,
25
+ module_name=module_name,
26
+ module_version=module_version,
18
27
  is_google_vertex_or_genai_client=True,
19
28
  )
20
29
  self._prompt_character_count = 0
@@ -154,6 +163,7 @@ class _VertexRequest(_ProviderRequest): # type: ignore
154
163
 
155
164
  prompt_tokens_details: list[dict[str, Any]] = usage.get("prompt_tokens_details", [])
156
165
  candidates_tokens_details: list[dict[str, Any]] = usage.get("candidates_tokens_details", [])
166
+ cache_tokens_details: list[dict[str, Any]] = usage.get("cache_tokens_details", [])
157
167
 
158
168
  if not model:
159
169
  model = ""
@@ -214,27 +224,50 @@ class _VertexRequest(_ProviderRequest): # type: ignore
214
224
  if is_large_context_token_model(model, input):
215
225
  large_context = "_large_context"
216
226
 
227
+ cache_details: dict[str, int] = {}
228
+
229
+ for details in cache_tokens_details:
230
+ modality = details.get("modality", "")
231
+ if not modality:
232
+ continue
233
+
234
+ modality_token_count = details.get("token_count", 0)
235
+
236
+ if modality == "IMAGE":
237
+ modality = "VISION"
238
+
239
+ if modality in _VertexRequest.KNOWN_MODALITIES:
240
+ cache_details[modality] = modality_token_count
241
+ add_units(self, modality.lower() + "_cache_read" + large_context, input=modality_token_count)
242
+
217
243
  for details in prompt_tokens_details:
218
244
  modality = details.get("modality", "")
219
245
  if not modality:
220
246
  continue
221
247
 
222
248
  modality_token_count = details.get("token_count", 0)
249
+
223
250
  if modality == "IMAGE":
224
- add_units(self, "vision"+large_context, input=modality_token_count)
225
- elif modality in ("VIDEO", "AUDIO", "TEXT"):
226
- add_units(self, modality.lower()+large_context, input=modality_token_count)
251
+ modality = "VISION"
252
+
253
+ if modality in _VertexRequest.KNOWN_MODALITIES:
254
+ # Subtract cache_details value if modality is present, floor at zero
255
+ if modality in cache_details:
256
+ modality_token_count = max(0, modality_token_count - cache_details[modality])
257
+
258
+ add_units(self, modality.lower() + large_context, input=modality_token_count)
259
+
227
260
  for details in candidates_tokens_details:
228
261
  modality = details.get("modality", "")
229
262
  if not modality:
230
263
  continue
231
264
 
232
265
  modality_token_count = details.get("token_count", 0)
233
- if modality in ("VIDEO", "AUDIO", "TEXT", "IMAGE"):
234
- add_units(self, modality.lower()+large_context, output=modality_token_count)
266
+ if modality in _VertexRequest.KNOWN_MODALITIES:
267
+ add_units(self, modality.lower() + large_context, output=modality_token_count)
235
268
 
236
269
  if thinking_token_count > 0:
237
- add_units(self, "reasoning"+large_context, output=thinking_token_count)
270
+ add_units(self, "reasoning" + large_context, output=thinking_token_count)
238
271
 
239
272
  if not self._ingest["units"]:
240
273
  input = usage.get("prompt_token_count", 0)
payi/lib/instrument.py CHANGED
@@ -15,11 +15,13 @@ from dataclasses import dataclass
15
15
  import nest_asyncio # type: ignore
16
16
  from wrapt import ObjectProxy # type: ignore
17
17
 
18
- from payi import Payi, AsyncPayi, APIConnectionError, __version__ as _payi_version
18
+ from payi import Payi, AsyncPayi, APIStatusError, APIConnectionError, __version__ as _payi_version
19
19
  from payi.types import IngestUnitsParams
20
20
  from payi.lib.helpers import PayiHeaderNames
21
+ from payi.types.shared import XproxyResult
21
22
  from payi.types.ingest_response import IngestResponse
22
23
  from payi.types.ingest_units_params import Units, ProviderResponseFunctionCall
24
+ from payi.types.shared.xproxy_error import XproxyError
23
25
  from payi.types.pay_i_common_models_api_router_header_info_param import PayICommonModelsAPIRouterHeaderInfoParam
24
26
 
25
27
  from .helpers import PayiCategories
@@ -35,13 +37,18 @@ class _ChunkResult:
35
37
 
36
38
  class _ProviderRequest:
37
39
  def __init__(
38
- self, instrumentor: '_PayiInstrumentor',
40
+ self,
41
+ instrumentor: '_PayiInstrumentor',
39
42
  category: str,
40
43
  streaming_type: '_StreamingType',
44
+ module_name: str,
45
+ module_version: str,
41
46
  is_aws_client: Optional[bool] = None,
42
47
  is_google_vertex_or_genai_client: Optional[bool] = None,
43
48
  ) -> None:
44
49
  self._instrumentor: '_PayiInstrumentor' = instrumentor
50
+ self._module_name: str = module_name
51
+ self._module_version: str = module_version
45
52
  self._estimated_prompt_tokens: Optional[int] = None
46
53
  self._category: str = category
47
54
  self._ingest: IngestUnitsParams = { "category": category, "units": {} } # type: ignore
@@ -146,6 +153,19 @@ class PayiInstrumentConfig(TypedDict, total=False):
146
153
  user_id: Optional[str]
147
154
  request_tags: Optional["list[str]"]
148
155
 
156
+ class PayiContext(TypedDict, total=False):
157
+ use_case_name: Optional[str]
158
+ use_case_id: Optional[str]
159
+ use_case_version: Optional[int]
160
+ use_case_step: Optional[str]
161
+ limit_ids: Optional['list[str]']
162
+ user_id: Optional[str]
163
+ request_tags: Optional["list[str]"]
164
+ price_as_category: Optional[str]
165
+ price_as_resource: Optional[str]
166
+ resource_scope: Optional[str]
167
+ last_result: Optional[Union[XproxyResult, XproxyError]]
168
+
149
169
  class _Context(TypedDict, total=False):
150
170
  proxy: Optional[bool]
151
171
  use_case_name: Optional[str]
@@ -169,7 +189,7 @@ class _StreamingType(Enum):
169
189
  iterator = 1
170
190
  stream_manager = 2
171
191
 
172
- class _TrackContext:
192
+ class _InternalTrackContext:
173
193
  def __init__(
174
194
  self,
175
195
  context: _Context,
@@ -190,6 +210,7 @@ class _TrackContext:
190
210
 
191
211
  class _PayiInstrumentor:
192
212
  _not_instrumented: str = "<not_instrumented>"
213
+ _instrumented_module_header_name: str = "xProxy-Instrumented-Module"
193
214
 
194
215
  def __init__(
195
216
  self,
@@ -235,6 +256,8 @@ class _PayiInstrumentor:
235
256
 
236
257
  self._instrument_inline_data: bool = global_config.get("instrument_inline_data", False)
237
258
 
259
+ self._last_result: Optional[Union[XproxyResult, XproxyError]] = None
260
+
238
261
  global_instrumentation = global_config.pop("global_instrumentation", True)
239
262
 
240
263
  if instruments is None or "*" in instruments:
@@ -354,9 +377,15 @@ class _PayiInstrumentor:
354
377
 
355
378
  return log_ingest_units
356
379
 
357
- def _process_ingest_units(self, request: _ProviderRequest, log_data: 'dict[str, str]') -> bool:
380
+ def _process_ingest_units(
381
+ self,
382
+ request: _ProviderRequest, log_data: 'dict[str, str]',
383
+ extra_headers: 'dict[str, str]') -> None:
358
384
  ingest_units = request._ingest
359
385
 
386
+ if request._module_version:
387
+ extra_headers[_PayiInstrumentor._instrumented_module_header_name] = f'{request._module_name}/{request._module_version}'
388
+
360
389
  if request._function_call_builder:
361
390
  # convert the function call builder to a list of function calls
362
391
  ingest_units["provider_response_function_calls"] = list(request._function_call_builder.values())
@@ -394,8 +423,6 @@ class _PayiInstrumentor:
394
423
  if stack_trace is not None:
395
424
  log_data["stack_trace"] = stack_trace
396
425
 
397
- return True
398
-
399
426
  def _process_ingest_units_response(self, ingest_response: IngestResponse) -> None:
400
427
  if ingest_response.xproxy_result.limits:
401
428
  for limit_id, state in ingest_response.xproxy_result.limits.items():
@@ -413,7 +440,7 @@ class _PayiInstrumentor:
413
440
  if removeBlockedId:
414
441
  self._blocked_limits.discard(limit_id)
415
442
 
416
- def _process_ingest_connection_error(self, e: APIConnectionError, ingest_units: IngestUnitsParams) -> None:
443
+ def _process_ingest_connection_error(self, e: APIConnectionError, ingest_units: IngestUnitsParams) -> XproxyError:
417
444
  now = time.time()
418
445
 
419
446
  if (now - self._api_connection_error_last_log_time) > self._api_connection_error_window:
@@ -430,7 +457,9 @@ class _PayiInstrumentor:
430
457
  # Suppress and count
431
458
  self._api_connection_error_count += 1
432
459
 
433
- async def _aingest_units(self, request: _ProviderRequest) -> Optional[IngestResponse]:
460
+ return XproxyError(code="api_connection_error", message=str(e))
461
+
462
+ async def _aingest_units_worker(self, request: _ProviderRequest) -> Optional[Union[XproxyResult, XproxyError]]:
434
463
  ingest_response: Optional[IngestResponse] = None
435
464
  ingest_units = request._ingest
436
465
 
@@ -438,21 +467,21 @@ class _PayiInstrumentor:
438
467
 
439
468
  # return early if there are no units to ingest and on a successul ingest request
440
469
  log_data: 'dict[str,str]' = {}
441
- if not self._process_ingest_units(request, log_data):
442
- self._logger.debug(f"_aingest_units: exit early")
443
- return None
470
+ extra_headers: 'dict[str, str]' = {}
471
+
472
+ self._process_ingest_units(request, log_data=log_data, extra_headers=extra_headers)
444
473
 
445
474
  try:
446
475
  if self._logger.isEnabledFor(logging.DEBUG):
447
476
  self._logger.debug(f"_aingest_units: sending ({self._create_logged_ingest_units(ingest_units)})")
448
477
 
449
478
  if self._apayi:
450
- ingest_response = await self._apayi.ingest.units(**ingest_units)
479
+ ingest_response = await self._apayi.ingest.units(**ingest_units, extra_headers=extra_headers)
451
480
  elif self._payi:
452
- ingest_response = self._payi.ingest.units(**ingest_units)
481
+ ingest_response = self._payi.ingest.units(**ingest_units, extra_headers=extra_headers)
453
482
  else:
454
483
  self._logger.error("No payi instance to ingest units")
455
- return None
484
+ return XproxyError(code="configuration_error", message="No Payi or AsyncPayi instance configured for ingesting units")
456
485
 
457
486
  self._logger.debug(f"_aingest_units: success ({ingest_response})")
458
487
 
@@ -463,15 +492,21 @@ class _PayiInstrumentor:
463
492
  request_id = ingest_response.xproxy_result.request_id
464
493
  self._prompt_and_response_logger(request_id, log_data) # type: ignore
465
494
 
466
- return ingest_response
467
- except Exception as e:
468
- if isinstance(e, APIConnectionError) and self._api_connection_error_window > 0:
469
- self._process_ingest_connection_error(e, ingest_units)
470
- else:
471
- self._logger.error(f"Error Pay-i async ingesting: exception {e}, request {ingest_units}")
472
-
473
- return None
495
+ return ingest_response.xproxy_result
496
+
497
+ except APIConnectionError as api_ex:
498
+ return self._process_ingest_connection_error(api_ex, ingest_units)
499
+
500
+ except APIStatusError as api_status_ex:
501
+ return self._process_api_status_error(api_status_ex)
502
+
503
+ except Exception as ex:
504
+ self._logger.error(f"Error Pay-i async ingesting: exception {ex}, request {ingest_units}")
505
+ return XproxyError(code="unknown_error", message=str(ex))
474
506
 
507
+ async def _aingest_units(self, request: _ProviderRequest) -> Optional[Union[XproxyResult, XproxyError]]:
508
+ return self.set_xproxy_result(await self._aingest_units_worker(request))
509
+
475
510
  def _call_async_use_case_definition_create(self, use_case_name: str, use_case_description: str) -> None:
476
511
  if not self._apayi:
477
512
  return
@@ -491,7 +526,7 @@ class _PayiInstrumentor:
491
526
  except Exception as e:
492
527
  self._logger.error(f"Error calling async use_cases.definitions.create synchronously: {e}")
493
528
 
494
- def _call_aingest_sync(self, request: _ProviderRequest) -> Optional[IngestResponse]:
529
+ def _call_aingest_sync(self, request: _ProviderRequest) -> Optional[Union[XproxyResult, XproxyError]]:
495
530
  try:
496
531
  loop = asyncio.get_running_loop()
497
532
  except RuntimeError:
@@ -500,7 +535,7 @@ class _PayiInstrumentor:
500
535
  try:
501
536
  if loop and loop.is_running():
502
537
  nest_asyncio.apply(loop) # type: ignore
503
- return asyncio.run(self._aingest_units(request))
538
+ return asyncio.run(self._aingest_units(request))
504
539
  else:
505
540
  # When there's no running loop, create a new one
506
541
  return asyncio.run(self._aingest_units(request))
@@ -508,7 +543,41 @@ class _PayiInstrumentor:
508
543
  self._logger.error(f"Error calling aingest_units synchronously: {e}")
509
544
  return None
510
545
 
511
- def _ingest_units(self, request: _ProviderRequest) -> Optional[IngestResponse]:
546
+ def _process_api_status_error(self, e: APIStatusError) -> Optional[XproxyError]:
547
+ try:
548
+ body_dict: dict[str, Any] = {}
549
+
550
+ # Try to get the response body as JSON
551
+ body = e.body
552
+ if body is None:
553
+ self._logger.error("APIStatusError response has no body attribute")
554
+ return XproxyError(code="unknown_error", message=str(e))
555
+
556
+ # If body is bytes, decode to string
557
+ if isinstance(body, bytes):
558
+ body = body.decode("utf-8")
559
+ if isinstance(body, dict):
560
+ body_dict = body # type: ignore
561
+ else:
562
+ body = str(body)
563
+
564
+ if not body_dict:
565
+ try:
566
+ body_dict = json.loads(body) # type: ignore
567
+ except Exception as json_ex:
568
+ self._logger.error(f"Failed to parse response body as JSON: {json_ex}")
569
+ return XproxyError(code="invalid_json", message=str(e))
570
+
571
+ xproxy_error = body_dict.get("xproxy_error", {})
572
+ code = xproxy_error.get("code", "unknown_error")
573
+ message = xproxy_error.get("message", str(e))
574
+ return XproxyError(code=code, message=message)
575
+
576
+ except Exception as ex:
577
+ self._logger.error(f"Exception in _process_api_status_error: {ex}")
578
+ return XproxyError(code="exception", message=str(ex))
579
+
580
+ def _ingest_units_worker(self, request: _ProviderRequest) -> Optional[Union[XproxyResult, XproxyError]]:
512
581
  ingest_response: Optional[IngestResponse] = None
513
582
  ingest_units = request._ingest
514
583
 
@@ -516,16 +585,15 @@ class _PayiInstrumentor:
516
585
 
517
586
  # return early if there are no units to ingest and on a successul ingest request
518
587
  log_data: 'dict[str,str]' = {}
519
- if not self._process_ingest_units(request, log_data):
520
- self._logger.debug(f"_ingest_units: exit early")
521
- return None
588
+ extra_headers: 'dict[str, str]' = {}
589
+ self._process_ingest_units(request, log_data=log_data, extra_headers=extra_headers)
522
590
 
523
591
  try:
524
592
  if self._payi:
525
593
  if self._logger.isEnabledFor(logging.DEBUG):
526
594
  self._logger.debug(f"_ingest_units: sending ({self._create_logged_ingest_units(ingest_units)})")
527
595
 
528
- ingest_response = self._payi.ingest.units(**ingest_units)
596
+ ingest_response = self._payi.ingest.units(**ingest_units, extra_headers=extra_headers)
529
597
  self._logger.debug(f"_ingest_units: success ({ingest_response})")
530
598
 
531
599
  self._process_ingest_units_response(ingest_response)
@@ -534,22 +602,28 @@ class _PayiInstrumentor:
534
602
  request_id = ingest_response.xproxy_result.request_id
535
603
  self._prompt_and_response_logger(request_id, log_data) # type: ignore
536
604
 
537
- return ingest_response
605
+ return ingest_response.xproxy_result
538
606
  elif self._apayi:
539
607
  # task runs async. aingest_units will invoke the callback and post process
540
- ingest_response = self._call_aingest_sync(request)
541
- self._logger.debug(f"_ingest_units: apayi success ({ingest_response})")
542
- return ingest_response
608
+ sync_response = self._call_aingest_sync(request)
609
+ self._logger.debug(f"_ingest_units: apayi success ({sync_response})")
610
+ return sync_response
543
611
  else:
544
612
  self._logger.error("No payi instance to ingest units")
613
+ return XproxyError(code="configuration_error", message="No Payi or AsyncPayi instance configured for ingesting units")
545
614
 
546
- except Exception as e:
547
- if isinstance(e, APIConnectionError) and self._api_connection_error_window > 0:
548
- self._process_ingest_connection_error(e, ingest_units)
549
- else:
550
- self._logger.error(f"Error Pay-i ingesting: exception {e}, request {ingest_units}")
615
+ except APIConnectionError as api_ex:
616
+ return self._process_ingest_connection_error(api_ex, ingest_units)
617
+
618
+ except APIStatusError as api_status_ex:
619
+ return self._process_api_status_error(api_status_ex)
620
+
621
+ except Exception as ex:
622
+ self._logger.error(f"Error Pay-i async ingesting: exception {ex}, request {ingest_units}")
623
+ return XproxyError(code="unknown_error", message=str(ex))
551
624
 
552
- return None
625
+ def _ingest_units(self, request: _ProviderRequest) -> Optional[Union[XproxyResult, XproxyError]]:
626
+ return self.set_xproxy_result(self._ingest_units_worker(request))
553
627
 
554
628
  def _setup_call_func(
555
629
  self
@@ -1040,6 +1114,10 @@ class _PayiInstrumentor:
1040
1114
 
1041
1115
  return extra_headers
1042
1116
 
1117
+ def set_xproxy_result(self, response: Optional[Union[XproxyResult, XproxyError]]) -> Optional[Union[XproxyResult, XproxyError]]:
1118
+ self._last_result = response
1119
+ return response
1120
+
1043
1121
  @staticmethod
1044
1122
  def _update_extra_headers(
1045
1123
  context: _Context,
@@ -1617,7 +1695,7 @@ def track_context(
1617
1695
  price_as_resource: Optional[str] = None,
1618
1696
  resource_scope: Optional[str] = None,
1619
1697
  proxy: Optional[bool] = None,
1620
- ) -> _TrackContext:
1698
+ ) -> _InternalTrackContext:
1621
1699
  # Create a new context for tracking
1622
1700
  context: _Context = {}
1623
1701
 
@@ -1637,4 +1715,22 @@ def track_context(
1637
1715
  context["price_as_resource"] = price_as_resource
1638
1716
  context["resource_scope"] = resource_scope
1639
1717
 
1640
- return _TrackContext(context)
1718
+ return _InternalTrackContext(context)
1719
+
1720
+ def get_context() -> PayiContext:
1721
+ """
1722
+ Returns the current tracking context from calls to @track and with track_context().
1723
+ If no context is active, returns an empty context.
1724
+ """
1725
+ if not _instrumentor:
1726
+ return PayiContext()
1727
+ internal_context = _instrumentor.get_context() or {}
1728
+
1729
+ context_dict = {
1730
+ key: value
1731
+ for key, value in internal_context.items()
1732
+ if key in PayiContext.__annotations__ and value is not None
1733
+ }
1734
+ if _instrumentor._last_result:
1735
+ context_dict["last_result"] = _instrumentor._last_result
1736
+ return PayiContext(**dict(context_dict)) # type: ignore
@@ -0,0 +1,21 @@
1
+ from importlib.metadata import version
2
+
3
+
4
+ def get_version_helper(module:str) -> str:
5
+ """
6
+ Get the version of the specified module.
7
+
8
+ Args:
9
+ module (str): The name of the module to query.
10
+
11
+ Returns:
12
+ str: The version of the module, or an empty string if not found.
13
+ """
14
+ try:
15
+ return version(module)
16
+ except Exception:
17
+ try:
18
+ imported_module = __import__(module)
19
+ return getattr(imported_module, "__version__", "")
20
+ except Exception:
21
+ return ""
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: payi
3
- Version: 0.1.0a96
3
+ Version: 0.1.0a98
4
4
  Summary: The official Python library for the payi API
5
5
  Project-URL: Homepage, https://github.com/Pay-i/pay-i-python
6
6
  Project-URL: Repository, https://github.com/Pay-i/pay-i-python
@@ -18,6 +18,7 @@ Classifier: Programming Language :: Python :: 3.9
18
18
  Classifier: Programming Language :: Python :: 3.10
19
19
  Classifier: Programming Language :: Python :: 3.11
20
20
  Classifier: Programming Language :: Python :: 3.12
21
+ Classifier: Programming Language :: Python :: 3.13
21
22
  Classifier: Topic :: Software Development :: Libraries :: Python Modules
22
23
  Classifier: Typing :: Typed
23
24
  Requires-Python: >=3.8
@@ -37,7 +38,8 @@ Description-Content-Type: text/markdown
37
38
 
38
39
  # Payi Python API library
39
40
 
40
- [![PyPI version](https://github.com/Pay-i/pay-i-python/tree/main/<https://img.shields.io/pypi/v/payi.svg?label=pypi%20(stable)>)](https://pypi.org/project/payi/)
41
+ <!-- prettier-ignore -->
42
+ [![PyPI version](https://img.shields.io/pypi/v/payi.svg?label=pypi%20(stable))](https://pypi.org/project/payi/)
41
43
 
42
44
  The Payi Python library provides convenient access to the Payi REST API from any Python 3.8+
43
45
  application. The library includes type definitions for all request params and response fields,
@@ -5,13 +5,13 @@ payi/_compat.py,sha256=VWemUKbj6DDkQ-O4baSpHVLJafotzeXmCQGJugfVTIw,6580
5
5
  payi/_constants.py,sha256=S14PFzyN9-I31wiV7SmIlL5Ga0MLHxdvegInGdXH7tM,462
6
6
  payi/_exceptions.py,sha256=ItygKNrNXIVY0H6LsGVZvFuAHB3Vtm_VZXmWzCnpHy0,3216
7
7
  payi/_files.py,sha256=mf4dOgL4b0ryyZlbqLhggD3GVgDf6XxdGFAgce01ugE,3549
8
- payi/_models.py,sha256=G1vczEodX0vUySeVKbF-mbzlaObNL1oVAYH4c65agRk,29131
8
+ payi/_models.py,sha256=viD5E6aDMhxslcFHDYvkHaKzE8YLcNmsPsMe8STixvs,29294
9
9
  payi/_qs.py,sha256=AOkSz4rHtK4YI3ZU_kzea-zpwBUgEY8WniGmTPyEimc,4846
10
10
  payi/_resource.py,sha256=j2jIkTr8OIC8sU6-05nxSaCyj4MaFlbZrwlyg4_xJos,1088
11
11
  payi/_response.py,sha256=rh9oJAvCKcPwQFm4iqH_iVrmK8bNx--YP_A2a4kN1OU,28776
12
12
  payi/_streaming.py,sha256=Z_wIyo206T6Jqh2rolFg2VXZgX24PahLmpURp0-NssU,10092
13
13
  payi/_types.py,sha256=7jE5MoQQFVoVxw5vVzvZ2Ao0kcjfNOGsBgyJfLBEnMo,6195
14
- payi/_version.py,sha256=HrUZ97eXJDQhDfJm78BH6NvNVuDnKXoia2Jj2xHrnm0,165
14
+ payi/_version.py,sha256=38kV-DSbhe__PIX2CraSgjbmWYlEpux8E-hfcRDYeKE,165
15
15
  payi/pagination.py,sha256=k2356QGPOUSjRF2vHpwLBdF6P-2vnQzFfRIJQAHGQ7A,1258
16
16
  payi/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
17
17
  payi/_utils/__init__.py,sha256=PNZ_QJuzZEgyYXqkO1HVhGkj5IU9bglVUcw7H-Knjzw,2062
@@ -25,15 +25,16 @@ payi/_utils/_transform.py,sha256=n7kskEWz6o__aoNvhFoGVyDoalNe6mJwp-g7BWkdj88,156
25
25
  payi/_utils/_typing.py,sha256=D0DbbNu8GnYQTSICnTSHDGsYXj8TcAKyhejb0XcnjtY,4602
26
26
  payi/_utils/_utils.py,sha256=ts4CiiuNpFiGB6YMdkQRh2SZvYvsl7mAF-JWHCcLDf4,12312
27
27
  payi/lib/.keep,sha256=wuNrz-5SXo3jJaJOJgz4vFHM41YH_g20F5cRQo0vLes,224
28
- payi/lib/AnthropicInstrumentor.py,sha256=ahYEPqQ5r7K_K5Zt6Q7rLl8jThFthRo3sc15l9o2VPs,14123
29
- payi/lib/BedrockInstrumentor.py,sha256=3f79Av7reWENDcX7_9D5YBwH0Zd-RDVgrZ8vh051yIY,17594
30
- payi/lib/GoogleGenAiInstrumentor.py,sha256=DLbmlwmyOwooe7FuBkEAO_Z3xjPKpvQyO4VwLBeZnn4,8477
31
- payi/lib/OpenAIInstrumentor.py,sha256=6HENrdN6kLMUwC00wNSdeGeK4fUDBWo2dCbVC9koi0c,22581
28
+ payi/lib/AnthropicInstrumentor.py,sha256=jL9GG56lebZF2b9a5uM4F0cYjTRpxa9tTa8rX3az_po,14405
29
+ payi/lib/BedrockInstrumentor.py,sha256=dMelEiVq5lV1vYh0K7YfXimtFWfZO_zQ3Mhu3y4bjlM,17926
30
+ payi/lib/GoogleGenAiInstrumentor.py,sha256=LHiEZ7G5IhCcDlpVzQlXW9Ok96MHLeq7emEhFzPBTm0,8836
31
+ payi/lib/OpenAIInstrumentor.py,sha256=_ULwIli11XP1yZK_pMGXuaSmHZ5pozuEt_v5DfhNuGw,22914
32
32
  payi/lib/Stopwatch.py,sha256=7OJlxvr2Jyb6Zr1LYCYKczRB7rDVKkIR7gc4YoleNdE,764
33
- payi/lib/VertexInstrumentor.py,sha256=q-oYr3YuwPc95xYn6KhjvKs5ZweGRg6mOIkxSTz-lic,7081
34
- payi/lib/VertexRequest.py,sha256=NNH6S86nCHaU54Rb1rxai-b3UiJvatmzKkDKBmqMeKY,10530
33
+ payi/lib/VertexInstrumentor.py,sha256=OWuMPiW4LdLhj6DSAAy5qZiosVo8DSAuFWGxYpEucoE,7431
34
+ payi/lib/VertexRequest.py,sha256=edv14HR5QtKaf07gmOqzWdhoJGdWCos85uCZASwsGL4,11710
35
35
  payi/lib/helpers.py,sha256=FPzNSSHGf9bgD6CanB7yVx_U8t4lm2c0jlZKrsziYlc,4242
36
- payi/lib/instrument.py,sha256=VEyEyjM0GGIdFqv9ogwBwyWQ8kRJgJWUxlWcQc6798Q,64801
36
+ payi/lib/instrument.py,sha256=R3NSYXtoZX5nCvPtSWxREF_qlMkjkDQCF2RIxlO2E_s,69224
37
+ payi/lib/version_helper.py,sha256=v0lC3kuaXn6PBDolE3mkmwJiA8Ot3z4RkVR7wlBuZCs,540
37
38
  payi/resources/__init__.py,sha256=B2bn1ZfCf6TbHlzZvy5TpFPtALnFcBRPYVKQH3S5qfQ,2457
38
39
  payi/resources/ingest.py,sha256=awE7xDdKOUL2Yhcrhcc11hSNycBlj8QRlahptjana5Q,23040
39
40
  payi/resources/categories/__init__.py,sha256=WeotN_d-0Ri8ohsrNPbve7RyViD9_N0NA9DrV3WYg3w,1701
@@ -132,7 +133,7 @@ payi/types/use_cases/definitions/kpi_retrieve_response.py,sha256=uQXliSvS3k-yDYw
132
133
  payi/types/use_cases/definitions/kpi_update_params.py,sha256=jbawdWAdMnsTWVH0qfQGb8W7_TXe3lq4zjSRu44d8p8,373
133
134
  payi/types/use_cases/definitions/kpi_update_response.py,sha256=zLyEoT0S8d7XHsnXZYT8tM7yDw0Aze0Mk-_Z6QeMtc8,459
134
135
  payi/types/use_cases/definitions/limit_config_create_params.py,sha256=pzQza_16N3z8cFNEKr6gPbFvuGFrwNuGxAYb--Kbo2M,449
135
- payi-0.1.0a96.dist-info/METADATA,sha256=VVawI7dTiqYjzxZQOhcwSagnNaBhaseMbtBybj4HUeY,16333
136
- payi-0.1.0a96.dist-info/WHEEL,sha256=C2FUgwZgiLbznR-k0b_5k3Ai_1aASOXDss3lzCUsUug,87
137
- payi-0.1.0a96.dist-info/licenses/LICENSE,sha256=CQt03aM-P4a3Yg5qBg3JSLVoQS3smMyvx7tYg_6V7Gk,11334
138
- payi-0.1.0a96.dist-info/RECORD,,
136
+ payi-0.1.0a98.dist-info/METADATA,sha256=MX8Kgs_X43l0QT3wWsLL-SQdW3Am4j5SZJNZf0djYGY,16359
137
+ payi-0.1.0a98.dist-info/WHEEL,sha256=C2FUgwZgiLbznR-k0b_5k3Ai_1aASOXDss3lzCUsUug,87
138
+ payi-0.1.0a98.dist-info/licenses/LICENSE,sha256=CQt03aM-P4a3Yg5qBg3JSLVoQS3smMyvx7tYg_6V7Gk,11334
139
+ payi-0.1.0a98.dist-info/RECORD,,