payi 0.1.0a82__py3-none-any.whl → 0.1.0a84__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of payi might be problematic. Click here for more details.
- payi/_version.py +1 -1
- payi/lib/AnthropicInstrumentor.py +92 -62
- payi/lib/BedrockInstrumentor.py +95 -108
- payi/lib/GoogleGenAiInstrumentor.py +31 -115
- payi/lib/OpenAIInstrumentor.py +13 -9
- payi/lib/VertexInstrumentor.py +168 -111
- payi/lib/instrument.py +135 -73
- payi/resources/categories/__init__.py +14 -0
- payi/resources/categories/categories.py +32 -0
- payi/resources/categories/fixed_cost_resources.py +196 -0
- payi/resources/ingest.py +14 -0
- payi/resources/limits/limits.py +4 -0
- payi/types/categories/__init__.py +1 -0
- payi/types/categories/fixed_cost_resource_create_params.py +21 -0
- payi/types/ingest_event_param.py +13 -1
- payi/types/ingest_units_params.py +11 -1
- payi/types/limit_create_params.py +2 -0
- payi/types/limit_history_response.py +3 -3
- {payi-0.1.0a82.dist-info → payi-0.1.0a84.dist-info}/METADATA +1 -1
- {payi-0.1.0a82.dist-info → payi-0.1.0a84.dist-info}/RECORD +22 -20
- {payi-0.1.0a82.dist-info → payi-0.1.0a84.dist-info}/WHEEL +0 -0
- {payi-0.1.0a82.dist-info → payi-0.1.0a84.dist-info}/licenses/LICENSE +0 -0
payi/lib/instrument.py
CHANGED
|
@@ -10,6 +10,7 @@ from abc import abstractmethod
|
|
|
10
10
|
from enum import Enum
|
|
11
11
|
from typing import Any, Set, Union, Callable, Optional, Sequence, TypedDict
|
|
12
12
|
from datetime import datetime, timezone
|
|
13
|
+
from dataclasses import dataclass
|
|
13
14
|
from typing_extensions import deprecated
|
|
14
15
|
|
|
15
16
|
import nest_asyncio # type: ignore
|
|
@@ -28,6 +29,11 @@ from .Stopwatch import Stopwatch
|
|
|
28
29
|
global _g_logger
|
|
29
30
|
_g_logger: logging.Logger = logging.getLogger("payi.instrument")
|
|
30
31
|
|
|
32
|
+
@dataclass
|
|
33
|
+
class _ChunkResult:
|
|
34
|
+
send_chunk_to_caller: bool
|
|
35
|
+
ingest: bool = False
|
|
36
|
+
|
|
31
37
|
class _ProviderRequest:
|
|
32
38
|
def __init__(self, instrumentor: '_PayiInstrumentor', category: str, streaming_type: '_StreamingType'):
|
|
33
39
|
self._instrumentor: '_PayiInstrumentor' = instrumentor
|
|
@@ -36,8 +42,8 @@ class _ProviderRequest:
|
|
|
36
42
|
self._ingest: IngestUnitsParams = { "category": category, "units": {} } # type: ignore
|
|
37
43
|
self._streaming_type: '_StreamingType' = streaming_type
|
|
38
44
|
|
|
39
|
-
def process_chunk(self, _chunk: Any) ->
|
|
40
|
-
return True
|
|
45
|
+
def process_chunk(self, _chunk: Any) -> _ChunkResult:
|
|
46
|
+
return _ChunkResult(send_chunk_to_caller=True)
|
|
41
47
|
|
|
42
48
|
def process_synchronous_response(self, response: Any, log_prompt_and_response: bool, kwargs: Any) -> Optional[object]: # noqa: ARG002
|
|
43
49
|
return None
|
|
@@ -49,9 +55,14 @@ class _ProviderRequest:
|
|
|
49
55
|
def process_request_prompt(self, prompt: 'dict[str, Any]', args: Sequence[Any], kwargs: 'dict[str, Any]') -> None:
|
|
50
56
|
...
|
|
51
57
|
|
|
58
|
+
def process_initial_stream_response(self, response: Any) -> None:
|
|
59
|
+
pass
|
|
60
|
+
|
|
61
|
+
@property
|
|
52
62
|
def is_bedrock(self) -> bool:
|
|
53
63
|
return self._category == PayiCategories.aws_bedrock
|
|
54
64
|
|
|
65
|
+
@property
|
|
55
66
|
def is_vertex(self) -> bool:
|
|
56
67
|
return self._category == PayiCategories.google_vertex
|
|
57
68
|
|
|
@@ -59,6 +70,10 @@ class _ProviderRequest:
|
|
|
59
70
|
self.exception_to_semantic_failure(exception)
|
|
60
71
|
return True
|
|
61
72
|
|
|
73
|
+
@property
|
|
74
|
+
def supports_extra_headers(self) -> bool:
|
|
75
|
+
return not self.is_bedrock and not self.is_vertex
|
|
76
|
+
|
|
62
77
|
@property
|
|
63
78
|
def streaming_type(self) -> '_StreamingType':
|
|
64
79
|
return self._streaming_type
|
|
@@ -275,8 +290,7 @@ class _PayiInstrumentor:
|
|
|
275
290
|
if int(ingest_units.get("http_status_code") or 0) < 400:
|
|
276
291
|
units = ingest_units.get("units", {})
|
|
277
292
|
if not units or all(unit.get("input", 0) == 0 and unit.get("output", 0) == 0 for unit in units.values()):
|
|
278
|
-
self._logger.
|
|
279
|
-
return False
|
|
293
|
+
self._logger.info('ingesting with no token counts')
|
|
280
294
|
|
|
281
295
|
if self._log_prompt_and_response and self._prompt_and_response_logger:
|
|
282
296
|
response_json = ingest_units.pop("provider_response_json", None)
|
|
@@ -341,7 +355,7 @@ class _PayiInstrumentor:
|
|
|
341
355
|
|
|
342
356
|
return ingest_response
|
|
343
357
|
except Exception as e:
|
|
344
|
-
self._logger.error(f"Error Pay-i ingesting
|
|
358
|
+
self._logger.error(f"Error Pay-i async ingesting: exception {e}, request {ingest_units}")
|
|
345
359
|
|
|
346
360
|
return None
|
|
347
361
|
|
|
@@ -413,7 +427,7 @@ class _PayiInstrumentor:
|
|
|
413
427
|
self._logger.error("No payi instance to ingest units")
|
|
414
428
|
|
|
415
429
|
except Exception as e:
|
|
416
|
-
self._logger.error(f"Error Pay-i ingesting
|
|
430
|
+
self._logger.error(f"Error Pay-i ingesting: exception {e}, request {ingest_units}")
|
|
417
431
|
|
|
418
432
|
return None
|
|
419
433
|
|
|
@@ -801,8 +815,7 @@ class _PayiInstrumentor:
|
|
|
801
815
|
context = self.get_context()
|
|
802
816
|
|
|
803
817
|
if not context:
|
|
804
|
-
if request.
|
|
805
|
-
# boto3 doesn't allow extra_headers
|
|
818
|
+
if not request.supports_extra_headers:
|
|
806
819
|
kwargs.pop("extra_headers", None)
|
|
807
820
|
|
|
808
821
|
self._logger.debug(f"invoke_wrapper: no instrumentation context, exit early")
|
|
@@ -817,8 +830,7 @@ class _PayiInstrumentor:
|
|
|
817
830
|
self._update_extra_headers(context, extra_headers)
|
|
818
831
|
|
|
819
832
|
if context.get("proxy", self._proxy_default):
|
|
820
|
-
if request.
|
|
821
|
-
# boto3 doesn't allow extra_headers
|
|
833
|
+
if not request.supports_extra_headers:
|
|
822
834
|
kwargs.pop("extra_headers", None)
|
|
823
835
|
elif "extra_headers" not in kwargs and extra_headers:
|
|
824
836
|
# assumes anthropic and openai clients
|
|
@@ -894,7 +906,7 @@ class _PayiInstrumentor:
|
|
|
894
906
|
request=request,
|
|
895
907
|
)
|
|
896
908
|
|
|
897
|
-
if request.is_bedrock
|
|
909
|
+
if request.is_bedrock:
|
|
898
910
|
if "body" in response:
|
|
899
911
|
response["body"] = stream_result
|
|
900
912
|
else:
|
|
@@ -1079,9 +1091,10 @@ class _StreamIteratorWrapper(ObjectProxy): # type: ignore
|
|
|
1079
1091
|
|
|
1080
1092
|
instrumentor._logger.debug(f"StreamIteratorWrapper: instance {instance}, category {request._category}")
|
|
1081
1093
|
|
|
1094
|
+
request.process_initial_stream_response(response)
|
|
1095
|
+
|
|
1082
1096
|
bedrock_from_stream: bool = False
|
|
1083
|
-
if request.is_bedrock
|
|
1084
|
-
request._ingest["provider_response_id"] = response["ResponseMetadata"]["RequestId"]
|
|
1097
|
+
if request.is_bedrock:
|
|
1085
1098
|
stream = response.get("stream", None)
|
|
1086
1099
|
|
|
1087
1100
|
if stream:
|
|
@@ -1103,8 +1116,9 @@ class _StreamIteratorWrapper(ObjectProxy): # type: ignore
|
|
|
1103
1116
|
self._request: _ProviderRequest = request
|
|
1104
1117
|
|
|
1105
1118
|
self._first_token: bool = True
|
|
1106
|
-
self._is_bedrock: bool = request.is_bedrock()
|
|
1107
1119
|
self._bedrock_from_stream: bool = bedrock_from_stream
|
|
1120
|
+
self._ingested: bool = False
|
|
1121
|
+
self._iter_started: bool = False
|
|
1108
1122
|
|
|
1109
1123
|
def __enter__(self) -> Any:
|
|
1110
1124
|
self._instrumentor._logger.debug(f"StreamIteratorWrapper: __enter__")
|
|
@@ -1123,7 +1137,8 @@ class _StreamIteratorWrapper(ObjectProxy): # type: ignore
|
|
|
1123
1137
|
await self.__wrapped__.__aexit__(exc_type, exc_val, exc_tb) # type: ignore
|
|
1124
1138
|
|
|
1125
1139
|
def __iter__(self) -> Any:
|
|
1126
|
-
|
|
1140
|
+
self._iter_started = True
|
|
1141
|
+
if self._request.is_bedrock:
|
|
1127
1142
|
# MUST reside in a separate function so that the yield statement (e.g. the generator) doesn't implicitly return its own iterator and overriding self
|
|
1128
1143
|
self._instrumentor._logger.debug(f"StreamIteratorWrapper: bedrock __iter__")
|
|
1129
1144
|
return self._iter_bedrock()
|
|
@@ -1134,13 +1149,19 @@ class _StreamIteratorWrapper(ObjectProxy): # type: ignore
|
|
|
1134
1149
|
def _iter_bedrock(self) -> Any:
|
|
1135
1150
|
# botocore EventStream doesn't have a __next__ method so iterate over the wrapped object in place
|
|
1136
1151
|
for event in self.__wrapped__: # type: ignore
|
|
1152
|
+
result: Optional[_ChunkResult] = None
|
|
1153
|
+
|
|
1137
1154
|
if (self._bedrock_from_stream):
|
|
1138
|
-
self._evaluate_chunk(event)
|
|
1155
|
+
result = self._evaluate_chunk(event)
|
|
1139
1156
|
else:
|
|
1140
1157
|
chunk = event.get('chunk') # type: ignore
|
|
1141
1158
|
if chunk:
|
|
1142
1159
|
decode = chunk.get('bytes').decode() # type: ignore
|
|
1143
|
-
self._evaluate_chunk(decode)
|
|
1160
|
+
result = self._evaluate_chunk(decode)
|
|
1161
|
+
|
|
1162
|
+
if result and result.ingest:
|
|
1163
|
+
self._stop_iteration()
|
|
1164
|
+
|
|
1144
1165
|
yield event
|
|
1145
1166
|
|
|
1146
1167
|
self._instrumentor._logger.debug(f"StreamIteratorWrapper: bedrock iter finished")
|
|
@@ -1148,40 +1169,60 @@ class _StreamIteratorWrapper(ObjectProxy): # type: ignore
|
|
|
1148
1169
|
self._stop_iteration()
|
|
1149
1170
|
|
|
1150
1171
|
def __aiter__(self) -> Any:
|
|
1172
|
+
self._iter_started = True
|
|
1151
1173
|
self._instrumentor._logger.debug(f"StreamIteratorWrapper: __aiter__")
|
|
1152
1174
|
return self
|
|
1153
1175
|
|
|
1154
1176
|
def __next__(self) -> object:
|
|
1155
1177
|
try:
|
|
1156
1178
|
chunk: object = self.__wrapped__.__next__() # type: ignore
|
|
1179
|
+
|
|
1180
|
+
if self._ingested:
|
|
1181
|
+
self._instrumentor._logger.debug(f"StreamIteratorWrapper: __next__ already ingested, not processing chunk {chunk}")
|
|
1182
|
+
return chunk # type: ignore
|
|
1183
|
+
|
|
1184
|
+
result = self._evaluate_chunk(chunk)
|
|
1185
|
+
|
|
1186
|
+
if result.ingest:
|
|
1187
|
+
self._stop_iteration()
|
|
1188
|
+
|
|
1189
|
+
if result.send_chunk_to_caller:
|
|
1190
|
+
return chunk # type: ignore
|
|
1191
|
+
else:
|
|
1192
|
+
return self.__next__()
|
|
1157
1193
|
except Exception as e:
|
|
1158
1194
|
if isinstance(e, StopIteration):
|
|
1159
1195
|
self._stop_iteration()
|
|
1160
1196
|
else:
|
|
1161
1197
|
self._instrumentor._logger.debug(f"StreamIteratorWrapper: __next__ exception {e}")
|
|
1162
1198
|
raise e
|
|
1163
|
-
else:
|
|
1164
|
-
if self._evaluate_chunk(chunk) == False:
|
|
1165
|
-
return self.__next__()
|
|
1166
|
-
|
|
1167
|
-
return chunk # type: ignore
|
|
1168
1199
|
|
|
1169
1200
|
async def __anext__(self) -> object:
|
|
1170
1201
|
try:
|
|
1171
1202
|
chunk: object = await self.__wrapped__.__anext__() # type: ignore
|
|
1203
|
+
|
|
1204
|
+
if self._ingested:
|
|
1205
|
+
self._instrumentor._logger.debug(f"StreamIteratorWrapper: __next__ already ingested, not processing chunk {chunk}")
|
|
1206
|
+
return chunk # type: ignore
|
|
1207
|
+
|
|
1208
|
+
result = self._evaluate_chunk(chunk)
|
|
1209
|
+
|
|
1210
|
+
if result.ingest:
|
|
1211
|
+
await self._astop_iteration()
|
|
1212
|
+
|
|
1213
|
+
if result.send_chunk_to_caller:
|
|
1214
|
+
return chunk # type: ignore
|
|
1215
|
+
else:
|
|
1216
|
+
return await self.__anext__()
|
|
1217
|
+
|
|
1172
1218
|
except Exception as e:
|
|
1173
1219
|
if isinstance(e, StopAsyncIteration):
|
|
1174
1220
|
await self._astop_iteration()
|
|
1175
1221
|
else:
|
|
1176
1222
|
self._instrumentor._logger.debug(f"StreamIteratorWrapper: __anext__ exception {e}")
|
|
1177
1223
|
raise e
|
|
1178
|
-
else:
|
|
1179
|
-
if self._evaluate_chunk(chunk) == False:
|
|
1180
|
-
return await self.__anext__()
|
|
1181
|
-
|
|
1182
|
-
return chunk # type: ignore
|
|
1183
1224
|
|
|
1184
|
-
def _evaluate_chunk(self, chunk: Any) ->
|
|
1225
|
+
def _evaluate_chunk(self, chunk: Any) -> _ChunkResult:
|
|
1185
1226
|
if self._first_token:
|
|
1186
1227
|
self._request._ingest["time_to_first_token_ms"] = self._stopwatch.elapsed_ms_int()
|
|
1187
1228
|
self._first_token = False
|
|
@@ -1192,7 +1233,7 @@ class _StreamIteratorWrapper(ObjectProxy): # type: ignore
|
|
|
1192
1233
|
return self._request.process_chunk(chunk)
|
|
1193
1234
|
|
|
1194
1235
|
def _process_stop_iteration(self) -> None:
|
|
1195
|
-
self._instrumentor._logger.debug(f"StreamIteratorWrapper: stop iteration")
|
|
1236
|
+
self._instrumentor._logger.debug(f"StreamIteratorWrapper: process stop iteration")
|
|
1196
1237
|
|
|
1197
1238
|
self._stopwatch.stop()
|
|
1198
1239
|
self._request._ingest["end_to_end_latency_ms"] = self._stopwatch.elapsed_ms_int()
|
|
@@ -1202,12 +1243,23 @@ class _StreamIteratorWrapper(ObjectProxy): # type: ignore
|
|
|
1202
1243
|
self._request._ingest["provider_response_json"] = self._responses
|
|
1203
1244
|
|
|
1204
1245
|
async def _astop_iteration(self) -> None:
|
|
1246
|
+
if self._ingested:
|
|
1247
|
+
self._instrumentor._logger.debug(f"StreamIteratorWrapper: astop iteration already ingested, skipping")
|
|
1248
|
+
return
|
|
1249
|
+
|
|
1205
1250
|
self._process_stop_iteration()
|
|
1251
|
+
|
|
1206
1252
|
await self._instrumentor._aingest_units(self._request._ingest)
|
|
1253
|
+
self._ingested = True
|
|
1207
1254
|
|
|
1208
1255
|
def _stop_iteration(self) -> None:
|
|
1256
|
+
if self._ingested:
|
|
1257
|
+
self._instrumentor._logger.debug(f"StreamIteratorWrapper: stop iteration already ingested, skipping")
|
|
1258
|
+
return
|
|
1259
|
+
|
|
1209
1260
|
self._process_stop_iteration()
|
|
1210
1261
|
self._instrumentor._ingest_units(self._request._ingest)
|
|
1262
|
+
self._ingested = True
|
|
1211
1263
|
|
|
1212
1264
|
@staticmethod
|
|
1213
1265
|
def chunk_to_json(chunk: Any) -> str:
|
|
@@ -1241,7 +1293,6 @@ class _StreamManagerWrapper(ObjectProxy): # type: ignore
|
|
|
1241
1293
|
self._responses: list[str] = []
|
|
1242
1294
|
self._request: _ProviderRequest = request
|
|
1243
1295
|
self._first_token: bool = True
|
|
1244
|
-
self._done: bool = False
|
|
1245
1296
|
|
|
1246
1297
|
def __enter__(self) -> _StreamIteratorWrapper:
|
|
1247
1298
|
self._instrumentor._logger.debug(f"_StreamManagerWrapper: __enter__")
|
|
@@ -1275,92 +1326,103 @@ class _GeneratorWrapper: # type: ignore
|
|
|
1275
1326
|
self._responses: list[str] = []
|
|
1276
1327
|
self._request: _ProviderRequest = request
|
|
1277
1328
|
self._first_token: bool = True
|
|
1278
|
-
self.
|
|
1279
|
-
|
|
1329
|
+
self._ingested: bool = False
|
|
1330
|
+
self._iter_started: bool = False
|
|
1331
|
+
|
|
1280
1332
|
def __iter__(self) -> Any:
|
|
1333
|
+
self._iter_started = True
|
|
1281
1334
|
self._instrumentor._logger.debug(f"GeneratorWrapper: __iter__")
|
|
1282
1335
|
return self
|
|
1283
1336
|
|
|
1284
1337
|
def __aiter__(self) -> Any:
|
|
1285
1338
|
self._instrumentor._logger.debug(f"GeneratorWrapper: __aiter__")
|
|
1286
1339
|
return self
|
|
1287
|
-
|
|
1288
|
-
def
|
|
1289
|
-
if self.
|
|
1290
|
-
|
|
1340
|
+
|
|
1341
|
+
def _process_chunk(self, chunk: Any) -> _ChunkResult:
|
|
1342
|
+
if self._first_token:
|
|
1343
|
+
self._request._ingest["time_to_first_token_ms"] = self._stopwatch.elapsed_ms_int()
|
|
1344
|
+
self._first_token = False
|
|
1291
1345
|
|
|
1346
|
+
if self._log_prompt_and_response:
|
|
1347
|
+
dict = self._chunk_to_dict(chunk)
|
|
1348
|
+
self._responses.append(json.dumps(dict))
|
|
1349
|
+
|
|
1350
|
+
return self._request.process_chunk(chunk)
|
|
1351
|
+
|
|
1352
|
+
def __next__(self) -> Any:
|
|
1292
1353
|
try:
|
|
1293
1354
|
chunk = next(self._generator)
|
|
1294
|
-
|
|
1355
|
+
result = self._process_chunk(chunk)
|
|
1356
|
+
|
|
1357
|
+
if result.ingest:
|
|
1358
|
+
self._stop_iteration()
|
|
1359
|
+
|
|
1360
|
+
# ignore result.send_chunk_to_caller:
|
|
1361
|
+
return chunk
|
|
1295
1362
|
|
|
1296
1363
|
except Exception as e:
|
|
1297
1364
|
if isinstance(e, StopIteration):
|
|
1298
|
-
self.
|
|
1365
|
+
self._stop_iteration()
|
|
1299
1366
|
else:
|
|
1300
1367
|
self._instrumentor._logger.debug(f"GeneratorWrapper: __next__ exception {e}")
|
|
1301
1368
|
raise e
|
|
1302
1369
|
|
|
1303
1370
|
async def __anext__(self) -> Any:
|
|
1304
|
-
if self._done:
|
|
1305
|
-
raise StopAsyncIteration
|
|
1306
|
-
|
|
1307
1371
|
try:
|
|
1308
1372
|
chunk = await anext(self._generator) # type: ignore
|
|
1309
|
-
|
|
1373
|
+
result = self._process_chunk(chunk)
|
|
1374
|
+
|
|
1375
|
+
if result.ingest:
|
|
1376
|
+
await self._astop_iteration()
|
|
1377
|
+
|
|
1378
|
+
# ignore result.send_chunk_to_caller:
|
|
1379
|
+
return chunk # type: ignore
|
|
1310
1380
|
|
|
1311
1381
|
except Exception as e:
|
|
1312
1382
|
if isinstance(e, StopAsyncIteration):
|
|
1313
|
-
await self.
|
|
1383
|
+
await self._astop_iteration()
|
|
1314
1384
|
else:
|
|
1315
1385
|
self._instrumentor._logger.debug(f"GeneratorWrapper: __anext__ exception {e}")
|
|
1316
1386
|
raise e
|
|
1317
1387
|
|
|
1318
1388
|
@staticmethod
|
|
1319
1389
|
def _chunk_to_dict(chunk: Any) -> 'dict[str, object]':
|
|
1320
|
-
if hasattr(chunk, "
|
|
1321
|
-
return chunk.
|
|
1390
|
+
if hasattr(chunk, "to_dict"):
|
|
1391
|
+
return chunk.to_dict() # type: ignore
|
|
1322
1392
|
elif hasattr(chunk, "to_json_dict"):
|
|
1323
1393
|
return chunk.to_json_dict() # type: ignore
|
|
1324
1394
|
else:
|
|
1325
1395
|
return {}
|
|
1326
1396
|
|
|
1327
|
-
def
|
|
1328
|
-
if self.
|
|
1329
|
-
self.
|
|
1330
|
-
|
|
1331
|
-
|
|
1332
|
-
if self._log_prompt_and_response:
|
|
1333
|
-
dict = self._chunk_to_dict(chunk)
|
|
1334
|
-
self._responses.append(json.dumps(dict))
|
|
1335
|
-
|
|
1336
|
-
self._request.process_chunk(chunk)
|
|
1337
|
-
return chunk
|
|
1397
|
+
def _stop_iteration(self) -> None:
|
|
1398
|
+
if self._ingested:
|
|
1399
|
+
self._instrumentor._logger.debug(f"GeneratorWrapper: stop iteration already ingested, skipping")
|
|
1400
|
+
return
|
|
1338
1401
|
|
|
1339
|
-
|
|
1340
|
-
self._instrumentor._logger.debug(f"GeneratorWrapper: stop iteration")
|
|
1402
|
+
self._process_stop_iteration()
|
|
1341
1403
|
|
|
1342
|
-
self._stopwatch.stop()
|
|
1343
|
-
self._request._ingest["end_to_end_latency_ms"] = self._stopwatch.elapsed_ms_int()
|
|
1344
|
-
self._request._ingest["http_status_code"] = 200
|
|
1345
|
-
|
|
1346
|
-
if self._log_prompt_and_response:
|
|
1347
|
-
self._request._ingest["provider_response_json"] = self._responses
|
|
1348
|
-
|
|
1349
1404
|
self._instrumentor._ingest_units(self._request._ingest)
|
|
1350
|
-
self.
|
|
1405
|
+
self._ingested = True
|
|
1351
1406
|
|
|
1352
|
-
async def
|
|
1353
|
-
self.
|
|
1407
|
+
async def _astop_iteration(self) -> None:
|
|
1408
|
+
if self._ingested:
|
|
1409
|
+
self._instrumentor._logger.debug(f"GeneratorWrapper: astop iteration already ingested, skipping")
|
|
1410
|
+
return
|
|
1411
|
+
|
|
1412
|
+
self._process_stop_iteration()
|
|
1354
1413
|
|
|
1355
|
-
self.
|
|
1414
|
+
await self._instrumentor._aingest_units(self._request._ingest)
|
|
1415
|
+
self._ingested = True
|
|
1416
|
+
|
|
1417
|
+
def _process_stop_iteration(self) -> None:
|
|
1418
|
+
self._instrumentor._logger.debug(f"GeneratorWrapper: stop iteration")
|
|
1419
|
+
|
|
1420
|
+
self._stopwatch.stop()
|
|
1356
1421
|
self._request._ingest["end_to_end_latency_ms"] = self._stopwatch.elapsed_ms_int()
|
|
1357
1422
|
self._request._ingest["http_status_code"] = 200
|
|
1358
1423
|
|
|
1359
1424
|
if self._log_prompt_and_response:
|
|
1360
1425
|
self._request._ingest["provider_response_json"] = self._responses
|
|
1361
|
-
|
|
1362
|
-
await self._instrumentor._aingest_units(self._request._ingest)
|
|
1363
|
-
self._done = True
|
|
1364
1426
|
|
|
1365
1427
|
global _instrumentor
|
|
1366
1428
|
_instrumentor: Optional[_PayiInstrumentor] = None
|
|
@@ -1630,4 +1692,4 @@ def proxy(
|
|
|
1630
1692
|
|
|
1631
1693
|
return _proxy_wrapper
|
|
1632
1694
|
|
|
1633
|
-
return _proxy
|
|
1695
|
+
return _proxy
|
|
@@ -16,6 +16,14 @@ from .categories import (
|
|
|
16
16
|
CategoriesResourceWithStreamingResponse,
|
|
17
17
|
AsyncCategoriesResourceWithStreamingResponse,
|
|
18
18
|
)
|
|
19
|
+
from .fixed_cost_resources import (
|
|
20
|
+
FixedCostResourcesResource,
|
|
21
|
+
AsyncFixedCostResourcesResource,
|
|
22
|
+
FixedCostResourcesResourceWithRawResponse,
|
|
23
|
+
AsyncFixedCostResourcesResourceWithRawResponse,
|
|
24
|
+
FixedCostResourcesResourceWithStreamingResponse,
|
|
25
|
+
AsyncFixedCostResourcesResourceWithStreamingResponse,
|
|
26
|
+
)
|
|
19
27
|
|
|
20
28
|
__all__ = [
|
|
21
29
|
"ResourcesResource",
|
|
@@ -24,6 +32,12 @@ __all__ = [
|
|
|
24
32
|
"AsyncResourcesResourceWithRawResponse",
|
|
25
33
|
"ResourcesResourceWithStreamingResponse",
|
|
26
34
|
"AsyncResourcesResourceWithStreamingResponse",
|
|
35
|
+
"FixedCostResourcesResource",
|
|
36
|
+
"AsyncFixedCostResourcesResource",
|
|
37
|
+
"FixedCostResourcesResourceWithRawResponse",
|
|
38
|
+
"AsyncFixedCostResourcesResourceWithRawResponse",
|
|
39
|
+
"FixedCostResourcesResourceWithStreamingResponse",
|
|
40
|
+
"AsyncFixedCostResourcesResourceWithStreamingResponse",
|
|
27
41
|
"CategoriesResource",
|
|
28
42
|
"AsyncCategoriesResource",
|
|
29
43
|
"CategoriesResourceWithRawResponse",
|
|
@@ -25,6 +25,14 @@ from ..._response import (
|
|
|
25
25
|
)
|
|
26
26
|
from ...pagination import SyncCursorPage, AsyncCursorPage
|
|
27
27
|
from ..._base_client import AsyncPaginator, make_request_options
|
|
28
|
+
from .fixed_cost_resources import (
|
|
29
|
+
FixedCostResourcesResource,
|
|
30
|
+
AsyncFixedCostResourcesResource,
|
|
31
|
+
FixedCostResourcesResourceWithRawResponse,
|
|
32
|
+
AsyncFixedCostResourcesResourceWithRawResponse,
|
|
33
|
+
FixedCostResourcesResourceWithStreamingResponse,
|
|
34
|
+
AsyncFixedCostResourcesResourceWithStreamingResponse,
|
|
35
|
+
)
|
|
28
36
|
from ...types.category_response import CategoryResponse
|
|
29
37
|
from ...types.category_delete_response import CategoryDeleteResponse
|
|
30
38
|
from ...types.category_resource_response import CategoryResourceResponse
|
|
@@ -38,6 +46,10 @@ class CategoriesResource(SyncAPIResource):
|
|
|
38
46
|
def resources(self) -> ResourcesResource:
|
|
39
47
|
return ResourcesResource(self._client)
|
|
40
48
|
|
|
49
|
+
@cached_property
|
|
50
|
+
def fixed_cost_resources(self) -> FixedCostResourcesResource:
|
|
51
|
+
return FixedCostResourcesResource(self._client)
|
|
52
|
+
|
|
41
53
|
@cached_property
|
|
42
54
|
def with_raw_response(self) -> CategoriesResourceWithRawResponse:
|
|
43
55
|
"""
|
|
@@ -225,6 +237,10 @@ class AsyncCategoriesResource(AsyncAPIResource):
|
|
|
225
237
|
def resources(self) -> AsyncResourcesResource:
|
|
226
238
|
return AsyncResourcesResource(self._client)
|
|
227
239
|
|
|
240
|
+
@cached_property
|
|
241
|
+
def fixed_cost_resources(self) -> AsyncFixedCostResourcesResource:
|
|
242
|
+
return AsyncFixedCostResourcesResource(self._client)
|
|
243
|
+
|
|
228
244
|
@cached_property
|
|
229
245
|
def with_raw_response(self) -> AsyncCategoriesResourceWithRawResponse:
|
|
230
246
|
"""
|
|
@@ -428,6 +444,10 @@ class CategoriesResourceWithRawResponse:
|
|
|
428
444
|
def resources(self) -> ResourcesResourceWithRawResponse:
|
|
429
445
|
return ResourcesResourceWithRawResponse(self._categories.resources)
|
|
430
446
|
|
|
447
|
+
@cached_property
|
|
448
|
+
def fixed_cost_resources(self) -> FixedCostResourcesResourceWithRawResponse:
|
|
449
|
+
return FixedCostResourcesResourceWithRawResponse(self._categories.fixed_cost_resources)
|
|
450
|
+
|
|
431
451
|
|
|
432
452
|
class AsyncCategoriesResourceWithRawResponse:
|
|
433
453
|
def __init__(self, categories: AsyncCategoriesResource) -> None:
|
|
@@ -450,6 +470,10 @@ class AsyncCategoriesResourceWithRawResponse:
|
|
|
450
470
|
def resources(self) -> AsyncResourcesResourceWithRawResponse:
|
|
451
471
|
return AsyncResourcesResourceWithRawResponse(self._categories.resources)
|
|
452
472
|
|
|
473
|
+
@cached_property
|
|
474
|
+
def fixed_cost_resources(self) -> AsyncFixedCostResourcesResourceWithRawResponse:
|
|
475
|
+
return AsyncFixedCostResourcesResourceWithRawResponse(self._categories.fixed_cost_resources)
|
|
476
|
+
|
|
453
477
|
|
|
454
478
|
class CategoriesResourceWithStreamingResponse:
|
|
455
479
|
def __init__(self, categories: CategoriesResource) -> None:
|
|
@@ -472,6 +496,10 @@ class CategoriesResourceWithStreamingResponse:
|
|
|
472
496
|
def resources(self) -> ResourcesResourceWithStreamingResponse:
|
|
473
497
|
return ResourcesResourceWithStreamingResponse(self._categories.resources)
|
|
474
498
|
|
|
499
|
+
@cached_property
|
|
500
|
+
def fixed_cost_resources(self) -> FixedCostResourcesResourceWithStreamingResponse:
|
|
501
|
+
return FixedCostResourcesResourceWithStreamingResponse(self._categories.fixed_cost_resources)
|
|
502
|
+
|
|
475
503
|
|
|
476
504
|
class AsyncCategoriesResourceWithStreamingResponse:
|
|
477
505
|
def __init__(self, categories: AsyncCategoriesResource) -> None:
|
|
@@ -493,3 +521,7 @@ class AsyncCategoriesResourceWithStreamingResponse:
|
|
|
493
521
|
@cached_property
|
|
494
522
|
def resources(self) -> AsyncResourcesResourceWithStreamingResponse:
|
|
495
523
|
return AsyncResourcesResourceWithStreamingResponse(self._categories.resources)
|
|
524
|
+
|
|
525
|
+
@cached_property
|
|
526
|
+
def fixed_cost_resources(self) -> AsyncFixedCostResourcesResourceWithStreamingResponse:
|
|
527
|
+
return AsyncFixedCostResourcesResourceWithStreamingResponse(self._categories.fixed_cost_resources)
|