payi 0.1.0a39__py3-none-any.whl → 0.1.0a41__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of payi might be problematic. Click here for more details.
- payi/_constants.py +1 -1
- payi/_models.py +1 -1
- payi/_version.py +1 -1
- payi/lib/AnthropicInstrumentor.py +54 -13
- payi/lib/BedrockInstrumentor.py +288 -0
- payi/lib/Instruments.py +1 -0
- payi/lib/OpenAIInstrumentor.py +50 -11
- payi/lib/instrument.py +126 -18
- {payi-0.1.0a39.dist-info → payi-0.1.0a41.dist-info}/METADATA +2 -1
- {payi-0.1.0a39.dist-info → payi-0.1.0a41.dist-info}/RECORD +12 -11
- {payi-0.1.0a39.dist-info → payi-0.1.0a41.dist-info}/WHEEL +0 -0
- {payi-0.1.0a39.dist-info → payi-0.1.0a41.dist-info}/licenses/LICENSE +0 -0
payi/_constants.py
CHANGED
|
@@ -6,7 +6,7 @@ RAW_RESPONSE_HEADER = "X-Stainless-Raw-Response"
|
|
|
6
6
|
OVERRIDE_CAST_TO_HEADER = "____stainless_override_cast_to"
|
|
7
7
|
|
|
8
8
|
# default timeout is 1 minute
|
|
9
|
-
DEFAULT_TIMEOUT = httpx.Timeout(timeout=60
|
|
9
|
+
DEFAULT_TIMEOUT = httpx.Timeout(timeout=60, connect=5.0)
|
|
10
10
|
DEFAULT_MAX_RETRIES = 2
|
|
11
11
|
DEFAULT_CONNECTION_LIMITS = httpx.Limits(max_connections=100, max_keepalive_connections=20)
|
|
12
12
|
|
payi/_models.py
CHANGED
|
@@ -172,7 +172,7 @@ class BaseModel(pydantic.BaseModel):
|
|
|
172
172
|
@override
|
|
173
173
|
def __str__(self) -> str:
|
|
174
174
|
# mypy complains about an invalid self arg
|
|
175
|
-
return f
|
|
175
|
+
return f"{self.__repr_name__()}({self.__repr_str__(', ')})" # type: ignore[misc]
|
|
176
176
|
|
|
177
177
|
# Override the 'construct' method in a way that supports recursive parsing without validation.
|
|
178
178
|
# Based on https://github.com/samuelcolvin/pydantic/issues/1168#issuecomment-817742836.
|
payi/_version.py
CHANGED
|
@@ -1,12 +1,13 @@
|
|
|
1
1
|
import logging
|
|
2
|
-
from typing import Any
|
|
2
|
+
from typing import Any, Union
|
|
3
3
|
|
|
4
|
+
import tiktoken
|
|
4
5
|
from wrapt import wrap_function_wrapper # type: ignore
|
|
5
6
|
|
|
6
7
|
from payi.types import IngestUnitsParams
|
|
7
8
|
from payi.types.ingest_units_params import Units
|
|
8
9
|
|
|
9
|
-
from .instrument import PayiInstrumentor
|
|
10
|
+
from .instrument import IsStreaming, PayiInstrumentor
|
|
10
11
|
|
|
11
12
|
|
|
12
13
|
class AnthropicIntrumentor:
|
|
@@ -47,13 +48,15 @@ def chat_wrapper(
|
|
|
47
48
|
kwargs: Any,
|
|
48
49
|
) -> Any:
|
|
49
50
|
return instrumentor.chat_wrapper(
|
|
50
|
-
"system.anthropic",
|
|
51
|
-
process_chunk,
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
51
|
+
category="system.anthropic",
|
|
52
|
+
process_chunk=process_chunk,
|
|
53
|
+
process_request=process_request,
|
|
54
|
+
process_synchronous_response=process_synchronous_response,
|
|
55
|
+
is_streaming=IsStreaming.kwargs,
|
|
56
|
+
wrapped=wrapped,
|
|
57
|
+
instance=instance,
|
|
58
|
+
args=args,
|
|
59
|
+
kwargs=kwargs,
|
|
57
60
|
)
|
|
58
61
|
|
|
59
62
|
|
|
@@ -62,7 +65,9 @@ def process_chunk(chunk: Any, ingest: IngestUnitsParams) -> None:
|
|
|
62
65
|
usage = chunk.message.usage
|
|
63
66
|
units = ingest["units"]
|
|
64
67
|
|
|
65
|
-
|
|
68
|
+
input = PayiInstrumentor.update_for_vision(usage.input_tokens, units)
|
|
69
|
+
|
|
70
|
+
units["text"] = Units(input=input, output=0)
|
|
66
71
|
|
|
67
72
|
if hasattr(usage, "cache_creation_input_tokens") and usage.cache_creation_input_tokens > 0:
|
|
68
73
|
text_cache_write = usage.cache_creation_input_tokens
|
|
@@ -77,10 +82,10 @@ def process_chunk(chunk: Any, ingest: IngestUnitsParams) -> None:
|
|
|
77
82
|
ingest["units"]["text"]["output"] = usage.output_tokens
|
|
78
83
|
|
|
79
84
|
|
|
80
|
-
def process_synchronous_response(response: Any, ingest: IngestUnitsParams, log_prompt_and_response: bool) ->
|
|
85
|
+
def process_synchronous_response(response: Any, ingest: IngestUnitsParams, log_prompt_and_response: bool, *args: Any, **kwargs: 'dict[str, Any]') -> Any: # noqa: ARG001
|
|
81
86
|
usage = response.usage
|
|
82
87
|
input = usage.input_tokens
|
|
83
|
-
|
|
88
|
+
output = usage.output_tokens
|
|
84
89
|
units: dict[str, Units] = ingest["units"]
|
|
85
90
|
|
|
86
91
|
if hasattr(usage, "cache_creation_input_tokens") and usage.cache_creation_input_tokens > 0:
|
|
@@ -91,7 +96,43 @@ def process_synchronous_response(response: Any, ingest: IngestUnitsParams, log_p
|
|
|
91
96
|
text_cache_read = usage.cache_read_input_tokens
|
|
92
97
|
units["text_cache_read"] = Units(input=text_cache_read, output=0)
|
|
93
98
|
|
|
94
|
-
|
|
99
|
+
input = PayiInstrumentor.update_for_vision(input, units)
|
|
100
|
+
|
|
101
|
+
units["text"] = Units(input=input, output=output)
|
|
95
102
|
|
|
96
103
|
if log_prompt_and_response:
|
|
97
104
|
ingest["provider_response_json"] = response.to_json()
|
|
105
|
+
|
|
106
|
+
return None
|
|
107
|
+
|
|
108
|
+
def has_image_and_get_texts(encoding: tiktoken.Encoding, content: Union[str, 'list[Any]']) -> 'tuple[bool, int]':
|
|
109
|
+
if isinstance(content, str):
|
|
110
|
+
return False, 0
|
|
111
|
+
elif isinstance(content, list): # type: ignore
|
|
112
|
+
has_image = any(item.get("type") == "image" for item in content)
|
|
113
|
+
if has_image is False:
|
|
114
|
+
return has_image, 0
|
|
115
|
+
|
|
116
|
+
token_count = sum(len(encoding.encode(item.get("text", ""))) for item in content if item.get("type") == "text")
|
|
117
|
+
return has_image, token_count
|
|
118
|
+
|
|
119
|
+
def process_request(ingest: IngestUnitsParams, kwargs: Any) -> None:
|
|
120
|
+
messages = kwargs.get("messages")
|
|
121
|
+
if not messages or len(messages) == 0:
|
|
122
|
+
return
|
|
123
|
+
|
|
124
|
+
estimated_token_count = 0
|
|
125
|
+
has_image = False
|
|
126
|
+
|
|
127
|
+
enc = tiktoken.get_encoding("cl100k_base")
|
|
128
|
+
|
|
129
|
+
for message in messages:
|
|
130
|
+
msg_has_image, msg_prompt_tokens = has_image_and_get_texts(enc, message.get('content', ''))
|
|
131
|
+
if msg_has_image:
|
|
132
|
+
has_image = True
|
|
133
|
+
estimated_token_count += msg_prompt_tokens
|
|
134
|
+
|
|
135
|
+
if not has_image or estimated_token_count == 0:
|
|
136
|
+
return
|
|
137
|
+
|
|
138
|
+
ingest["units"][PayiInstrumentor.estimated_prompt_tokens] = Units(input=estimated_token_count, output=0)
|
|
@@ -0,0 +1,288 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import logging
|
|
3
|
+
from typing import Any
|
|
4
|
+
from functools import wraps
|
|
5
|
+
|
|
6
|
+
from wrapt import ObjectProxy, wrap_function_wrapper # type: ignore
|
|
7
|
+
|
|
8
|
+
from payi.types.ingest_units_params import Units, IngestUnitsParams
|
|
9
|
+
from payi.types.pay_i_common_models_api_router_header_info_param import PayICommonModelsAPIRouterHeaderInfoParam
|
|
10
|
+
|
|
11
|
+
from .instrument import IsStreaming, PayiInstrumentor
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class BedrockInstrumentor:
|
|
15
|
+
@staticmethod
|
|
16
|
+
def instrument(instrumentor: PayiInstrumentor) -> None:
|
|
17
|
+
try:
|
|
18
|
+
import boto3 # type: ignore # noqa: F401 I001
|
|
19
|
+
|
|
20
|
+
# wrap_function_wrapper(
|
|
21
|
+
# "anthropic.resources.completions",
|
|
22
|
+
# "Completions.create",
|
|
23
|
+
# chat_wrapper(instrumentor),
|
|
24
|
+
# )
|
|
25
|
+
|
|
26
|
+
wrap_function_wrapper(
|
|
27
|
+
"botocore.client",
|
|
28
|
+
"ClientCreator.create_client",
|
|
29
|
+
create_client_wrapper(instrumentor),
|
|
30
|
+
)
|
|
31
|
+
|
|
32
|
+
wrap_function_wrapper(
|
|
33
|
+
"botocore.session",
|
|
34
|
+
"Session.create_client",
|
|
35
|
+
create_client_wrapper(instrumentor),
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
except Exception as e:
|
|
39
|
+
logging.debug(f"Error instrumenting bedrock: {e}")
|
|
40
|
+
return
|
|
41
|
+
|
|
42
|
+
@PayiInstrumentor.payi_wrapper
|
|
43
|
+
def create_client_wrapper(instrumentor: PayiInstrumentor, wrapped: Any, instance: Any, args: Any, kwargs: Any) -> Any: # noqa: ARG001
|
|
44
|
+
if kwargs.get("service_name") != "bedrock-runtime":
|
|
45
|
+
return wrapped(*args, **kwargs)
|
|
46
|
+
|
|
47
|
+
try:
|
|
48
|
+
client: Any = wrapped(*args, **kwargs)
|
|
49
|
+
client.invoke_model = wrap_invoke(instrumentor, client.invoke_model)
|
|
50
|
+
client.invoke_model_with_response_stream = wrap_invoke_stream(instrumentor, client.invoke_model_with_response_stream)
|
|
51
|
+
client.converse = wrap_converse(instrumentor, client.converse)
|
|
52
|
+
client.converse_stream = wrap_converse_stream(instrumentor, client.converse_stream)
|
|
53
|
+
|
|
54
|
+
return client
|
|
55
|
+
except Exception as e:
|
|
56
|
+
logging.debug(f"Error instrumenting bedrock client: {e}")
|
|
57
|
+
|
|
58
|
+
return wrapped(*args, **kwargs)
|
|
59
|
+
|
|
60
|
+
class InvokeResponseWrapper(ObjectProxy): # type: ignore
|
|
61
|
+
def __init__(
|
|
62
|
+
self,
|
|
63
|
+
response: Any,
|
|
64
|
+
instrumentor: PayiInstrumentor,
|
|
65
|
+
ingest: IngestUnitsParams,
|
|
66
|
+
log_prompt_and_response: bool
|
|
67
|
+
) -> None:
|
|
68
|
+
|
|
69
|
+
super().__init__(response) # type: ignore
|
|
70
|
+
self._response = response
|
|
71
|
+
self._instrumentor = instrumentor
|
|
72
|
+
self._ingest = ingest
|
|
73
|
+
self._log_prompt_and_response = log_prompt_and_response
|
|
74
|
+
|
|
75
|
+
def read(self, amt: Any =None): # type: ignore
|
|
76
|
+
# data is array of bytes
|
|
77
|
+
data: Any = self.__wrapped__.read(amt) # type: ignore
|
|
78
|
+
response = json.loads(data)
|
|
79
|
+
|
|
80
|
+
resource = self._ingest["resource"]
|
|
81
|
+
if not resource:
|
|
82
|
+
return
|
|
83
|
+
|
|
84
|
+
input: int = 0
|
|
85
|
+
output: int = 0
|
|
86
|
+
units: dict[str, Units] = self._ingest["units"]
|
|
87
|
+
|
|
88
|
+
if resource.startswith("meta.llama3"):
|
|
89
|
+
input = response['prompt_token_count']
|
|
90
|
+
output = response['generation_token_count']
|
|
91
|
+
elif resource.startswith("anthropic."):
|
|
92
|
+
usage = response['usage']
|
|
93
|
+
input = usage['input_tokens']
|
|
94
|
+
output = usage['output_tokens']
|
|
95
|
+
units["text"] = Units(input=input, output=output)
|
|
96
|
+
|
|
97
|
+
if self._log_prompt_and_response:
|
|
98
|
+
self._ingest["provider_response_json"] = data.decode('utf-8')
|
|
99
|
+
|
|
100
|
+
self._instrumentor._ingest_units(self._ingest)
|
|
101
|
+
|
|
102
|
+
return data
|
|
103
|
+
|
|
104
|
+
def wrap_invoke(instrumentor: PayiInstrumentor, wrapped: Any) -> Any:
|
|
105
|
+
@wraps(wrapped)
|
|
106
|
+
def invoke_wrapper(*args: Any, **kwargs: 'dict[str, Any]') -> Any:
|
|
107
|
+
modelId:str = kwargs.get("modelId", "") # type: ignore
|
|
108
|
+
|
|
109
|
+
if modelId.startswith("meta.llama3") or modelId.startswith("anthropic."):
|
|
110
|
+
return instrumentor.chat_wrapper(
|
|
111
|
+
category="system.aws.bedrock",
|
|
112
|
+
process_chunk=None,
|
|
113
|
+
process_request=process_invoke_request,
|
|
114
|
+
process_synchronous_response=process_synchronous_invoke_response,
|
|
115
|
+
is_streaming=IsStreaming.false,
|
|
116
|
+
wrapped=wrapped,
|
|
117
|
+
instance=None,
|
|
118
|
+
args=args,
|
|
119
|
+
kwargs=kwargs,
|
|
120
|
+
)
|
|
121
|
+
return wrapped(*args, **kwargs)
|
|
122
|
+
|
|
123
|
+
return invoke_wrapper
|
|
124
|
+
|
|
125
|
+
def wrap_invoke_stream(instrumentor: PayiInstrumentor, wrapped: Any) -> Any:
|
|
126
|
+
@wraps(wrapped)
|
|
127
|
+
def invoke_wrapper(*args: Any, **kwargs: Any) -> Any:
|
|
128
|
+
modelId:str = kwargs.get("modelId", "") # type: ignore
|
|
129
|
+
|
|
130
|
+
if modelId.startswith("meta.llama3") or modelId.startswith("anthropic."):
|
|
131
|
+
return instrumentor.chat_wrapper(
|
|
132
|
+
category="system.aws.bedrock",
|
|
133
|
+
process_chunk=process_invoke_streaming_anthropic_chunk if modelId.startswith("anthropic.") else process_invoke_streaming_llama_chunk,
|
|
134
|
+
process_request=process_invoke_request,
|
|
135
|
+
process_synchronous_response=None,
|
|
136
|
+
is_streaming=IsStreaming.true,
|
|
137
|
+
wrapped=wrapped,
|
|
138
|
+
instance=None,
|
|
139
|
+
args=args,
|
|
140
|
+
kwargs=kwargs,
|
|
141
|
+
)
|
|
142
|
+
return wrapped(*args, **kwargs)
|
|
143
|
+
|
|
144
|
+
return invoke_wrapper
|
|
145
|
+
|
|
146
|
+
def wrap_converse(instrumentor: PayiInstrumentor, wrapped: Any) -> Any:
|
|
147
|
+
@wraps(wrapped)
|
|
148
|
+
def invoke_wrapper(*args: Any, **kwargs: 'dict[str, Any]') -> Any:
|
|
149
|
+
modelId:str = kwargs.get("modelId", "") # type: ignore
|
|
150
|
+
|
|
151
|
+
if modelId.startswith("meta.llama3") or modelId.startswith("anthropic."):
|
|
152
|
+
return instrumentor.chat_wrapper(
|
|
153
|
+
category="system.aws.bedrock",
|
|
154
|
+
process_chunk=None,
|
|
155
|
+
process_request=process_converse_request,
|
|
156
|
+
process_synchronous_response=process_synchronous_converse_response,
|
|
157
|
+
is_streaming=IsStreaming.false,
|
|
158
|
+
wrapped=wrapped,
|
|
159
|
+
instance=None,
|
|
160
|
+
args=args,
|
|
161
|
+
kwargs=kwargs,
|
|
162
|
+
)
|
|
163
|
+
return wrapped(*args, **kwargs)
|
|
164
|
+
|
|
165
|
+
return invoke_wrapper
|
|
166
|
+
|
|
167
|
+
def wrap_converse_stream(instrumentor: PayiInstrumentor, wrapped: Any) -> Any:
|
|
168
|
+
@wraps(wrapped)
|
|
169
|
+
def invoke_wrapper(*args: Any, **kwargs: Any) -> Any:
|
|
170
|
+
modelId:str = kwargs.get("modelId", "") # type: ignore
|
|
171
|
+
|
|
172
|
+
if modelId.startswith("meta.llama3") or modelId.startswith("anthropic."):
|
|
173
|
+
return instrumentor.chat_wrapper(
|
|
174
|
+
category="system.aws.bedrock",
|
|
175
|
+
process_chunk=process_converse_streaming_chunk,
|
|
176
|
+
process_request=process_converse_request,
|
|
177
|
+
process_synchronous_response=None,
|
|
178
|
+
is_streaming=IsStreaming.true,
|
|
179
|
+
wrapped=wrapped,
|
|
180
|
+
instance=None,
|
|
181
|
+
args=args,
|
|
182
|
+
kwargs=kwargs,
|
|
183
|
+
)
|
|
184
|
+
return wrapped(*args, **kwargs)
|
|
185
|
+
|
|
186
|
+
return invoke_wrapper
|
|
187
|
+
|
|
188
|
+
def process_invoke_streaming_anthropic_chunk(chunk: str, ingest: IngestUnitsParams) -> None:
|
|
189
|
+
chunk_dict = json.loads(chunk)
|
|
190
|
+
type = chunk_dict.get("type", "")
|
|
191
|
+
|
|
192
|
+
if type == "message_start":
|
|
193
|
+
usage = chunk_dict['message']['usage']
|
|
194
|
+
units = ingest["units"]
|
|
195
|
+
|
|
196
|
+
input = PayiInstrumentor.update_for_vision(usage['input_tokens'], units)
|
|
197
|
+
|
|
198
|
+
units["text"] = Units(input=input, output=0)
|
|
199
|
+
|
|
200
|
+
text_cache_write: int = usage.get("cache_creation_input_tokens", 0)
|
|
201
|
+
if text_cache_write > 0:
|
|
202
|
+
units["text_cache_write"] = Units(input=text_cache_write, output=0)
|
|
203
|
+
|
|
204
|
+
text_cache_read: int = usage.get("cache_read_input_tokens", 0)
|
|
205
|
+
if text_cache_read > 0:
|
|
206
|
+
units["text_cache_read"] = Units(input=text_cache_read, output=0)
|
|
207
|
+
|
|
208
|
+
elif type == "message_delta":
|
|
209
|
+
usage = chunk_dict['usage']
|
|
210
|
+
ingest["units"]["text"]["output"] = usage['output_tokens']
|
|
211
|
+
|
|
212
|
+
def process_invoke_streaming_llama_chunk(chunk: str, ingest: IngestUnitsParams) -> None:
|
|
213
|
+
chunk_dict = json.loads(chunk)
|
|
214
|
+
metrics = chunk_dict.get("amazon-bedrock-invocationMetrics", {})
|
|
215
|
+
if metrics:
|
|
216
|
+
input = metrics.get("inputTokenCount", 0)
|
|
217
|
+
output = metrics.get("outputTokenCount", 0)
|
|
218
|
+
ingest["units"]["text"] = Units(input=input, output=output)
|
|
219
|
+
|
|
220
|
+
def process_synchronous_invoke_response(
|
|
221
|
+
response: Any,
|
|
222
|
+
ingest: IngestUnitsParams,
|
|
223
|
+
log_prompt_and_response: bool,
|
|
224
|
+
instrumentor: PayiInstrumentor,
|
|
225
|
+
**kargs: Any) -> Any: # noqa: ARG001
|
|
226
|
+
|
|
227
|
+
metadata = response.get("ResponseMetadata", {})
|
|
228
|
+
|
|
229
|
+
# request_id = metadata.get("RequestId", "")
|
|
230
|
+
# if request_id:
|
|
231
|
+
# ingest["provider_request_id"] = request_id
|
|
232
|
+
|
|
233
|
+
response_headers = metadata.get("HTTPHeaders", {}).copy()
|
|
234
|
+
if response_headers:
|
|
235
|
+
ingest["provider_response_headers"] = [PayICommonModelsAPIRouterHeaderInfoParam(name=k, value=v) for k, v in response_headers.items()]
|
|
236
|
+
|
|
237
|
+
response["body"] = InvokeResponseWrapper(
|
|
238
|
+
response=response["body"],
|
|
239
|
+
instrumentor=instrumentor,
|
|
240
|
+
ingest=ingest,
|
|
241
|
+
log_prompt_and_response=log_prompt_and_response)
|
|
242
|
+
|
|
243
|
+
return response
|
|
244
|
+
|
|
245
|
+
def process_invoke_request(ingest: IngestUnitsParams, kwargs: Any) -> None: # noqa: ARG001
|
|
246
|
+
return
|
|
247
|
+
|
|
248
|
+
def process_converse_streaming_chunk(chunk: 'dict[str, Any]', ingest: IngestUnitsParams) -> None:
|
|
249
|
+
metadata = chunk.get("metadata", {})
|
|
250
|
+
|
|
251
|
+
if metadata:
|
|
252
|
+
usage = metadata['usage']
|
|
253
|
+
input = usage["inputTokens"]
|
|
254
|
+
output = usage["outputTokens"]
|
|
255
|
+
ingest["units"]["text"] = Units(input=input, output=output)
|
|
256
|
+
|
|
257
|
+
def process_synchronous_converse_response(
|
|
258
|
+
response: 'dict[str, Any]',
|
|
259
|
+
ingest: IngestUnitsParams,
|
|
260
|
+
log_prompt_and_response: bool,
|
|
261
|
+
**kargs: Any) -> Any: # noqa: ARG001
|
|
262
|
+
|
|
263
|
+
usage = response["usage"]
|
|
264
|
+
input = usage["inputTokens"]
|
|
265
|
+
output = usage["outputTokens"]
|
|
266
|
+
|
|
267
|
+
units: dict[str, Units] = ingest["units"]
|
|
268
|
+
units["text"] = Units(input=input, output=output)
|
|
269
|
+
|
|
270
|
+
metadata = response.get("ResponseMetadata", {})
|
|
271
|
+
|
|
272
|
+
# request_id = metadata.get("RequestId", "")
|
|
273
|
+
# if request_id:
|
|
274
|
+
# ingest["provider_request_id"] = request_id
|
|
275
|
+
|
|
276
|
+
response_headers = metadata.get("HTTPHeaders", {})
|
|
277
|
+
if response_headers:
|
|
278
|
+
ingest["provider_response_headers"] = [PayICommonModelsAPIRouterHeaderInfoParam(name=k, value=v) for k, v in response_headers.items()]
|
|
279
|
+
|
|
280
|
+
if log_prompt_and_response:
|
|
281
|
+
response_without_metadata = response.copy()
|
|
282
|
+
response_without_metadata.pop("ResponseMetadata", None)
|
|
283
|
+
ingest["provider_response_json"] = json.dumps(response_without_metadata)
|
|
284
|
+
|
|
285
|
+
return None
|
|
286
|
+
|
|
287
|
+
def process_converse_request(ingest: IngestUnitsParams, kwargs: Any) -> None: # noqa: ARG001
|
|
288
|
+
return
|
payi/lib/Instruments.py
CHANGED
payi/lib/OpenAIInstrumentor.py
CHANGED
|
@@ -1,14 +1,15 @@
|
|
|
1
1
|
import json
|
|
2
2
|
import logging
|
|
3
|
-
from typing import Any
|
|
3
|
+
from typing import Any, Union
|
|
4
4
|
from importlib.metadata import version
|
|
5
5
|
|
|
6
|
+
import tiktoken
|
|
6
7
|
from wrapt import wrap_function_wrapper # type: ignore
|
|
7
8
|
|
|
8
9
|
from payi.types import IngestUnitsParams
|
|
9
10
|
from payi.types.ingest_units_params import Units
|
|
10
11
|
|
|
11
|
-
from .instrument import PayiInstrumentor
|
|
12
|
+
from .instrument import IsStreaming, PayiInstrumentor
|
|
12
13
|
|
|
13
14
|
|
|
14
15
|
class OpenAiInstrumentor:
|
|
@@ -36,17 +37,19 @@ def chat_wrapper(
|
|
|
36
37
|
kwargs: Any,
|
|
37
38
|
) -> Any:
|
|
38
39
|
return instrumentor.chat_wrapper(
|
|
39
|
-
"system.openai",
|
|
40
|
-
process_chat_chunk,
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
40
|
+
category="system.openai",
|
|
41
|
+
process_chunk=process_chat_chunk,
|
|
42
|
+
process_request=process_request,
|
|
43
|
+
process_synchronous_response=process_chat_synchronous_response,
|
|
44
|
+
is_streaming=IsStreaming.kwargs,
|
|
45
|
+
wrapped=wrapped,
|
|
46
|
+
instance=instance,
|
|
47
|
+
args=args,
|
|
48
|
+
kwargs=kwargs,
|
|
46
49
|
)
|
|
47
50
|
|
|
48
51
|
|
|
49
|
-
def process_chat_synchronous_response(response: str, ingest: IngestUnitsParams, log_prompt_and_response: bool) ->
|
|
52
|
+
def process_chat_synchronous_response(response: str, ingest: IngestUnitsParams, log_prompt_and_response: bool, **kwargs: Any) -> Any: # noqa: ARG001
|
|
50
53
|
response_dict = model_to_dict(response)
|
|
51
54
|
|
|
52
55
|
add_usage_units(response_dict["usage"], ingest["units"])
|
|
@@ -54,6 +57,7 @@ def process_chat_synchronous_response(response: str, ingest: IngestUnitsParams,
|
|
|
54
57
|
if log_prompt_and_response:
|
|
55
58
|
ingest["provider_response_json"] = [json.dumps(response_dict)]
|
|
56
59
|
|
|
60
|
+
return None
|
|
57
61
|
|
|
58
62
|
def process_chat_chunk(chunk: Any, ingest: IngestUnitsParams) -> None:
|
|
59
63
|
model = model_to_dict(chunk)
|
|
@@ -84,6 +88,41 @@ def add_usage_units(usage: "dict[str, Any]", units: "dict[str, Units]") -> None:
|
|
|
84
88
|
if input_cache != 0:
|
|
85
89
|
units["text_cache_read"] = Units(input=input_cache, output=0)
|
|
86
90
|
|
|
87
|
-
input
|
|
91
|
+
input = PayiInstrumentor.update_for_vision(input - input_cache, units)
|
|
88
92
|
|
|
89
93
|
units["text"] = Units(input=input, output=output)
|
|
94
|
+
|
|
95
|
+
def has_image_and_get_texts(encoding: tiktoken.Encoding, content: Union[str, 'list[Any]']) -> 'tuple[bool, int]':
|
|
96
|
+
if isinstance(content, str):
|
|
97
|
+
return False, 0
|
|
98
|
+
elif isinstance(content, list): # type: ignore
|
|
99
|
+
has_image = any(item.get("type") == "image_url" for item in content)
|
|
100
|
+
if has_image is False:
|
|
101
|
+
return has_image, 0
|
|
102
|
+
|
|
103
|
+
token_count = sum(len(encoding.encode(item.get("text", ""))) for item in content if item.get("type") == "text")
|
|
104
|
+
return has_image, token_count
|
|
105
|
+
|
|
106
|
+
def process_request(ingest: IngestUnitsParams, kwargs: Any) -> None:
|
|
107
|
+
messages = kwargs.get("messages")
|
|
108
|
+
if not messages or len(messages) == 0:
|
|
109
|
+
return
|
|
110
|
+
|
|
111
|
+
estimated_token_count = 0
|
|
112
|
+
has_image = False
|
|
113
|
+
|
|
114
|
+
try:
|
|
115
|
+
enc = tiktoken.encoding_for_model(kwargs.get("model"))
|
|
116
|
+
except KeyError:
|
|
117
|
+
enc = tiktoken.get_encoding("o200k_base")
|
|
118
|
+
|
|
119
|
+
for message in messages:
|
|
120
|
+
msg_has_image, msg_prompt_tokens = has_image_and_get_texts(enc, message.get('content', ''))
|
|
121
|
+
if msg_has_image:
|
|
122
|
+
has_image = True
|
|
123
|
+
estimated_token_count += msg_prompt_tokens
|
|
124
|
+
|
|
125
|
+
if not has_image or estimated_token_count == 0:
|
|
126
|
+
return
|
|
127
|
+
|
|
128
|
+
ingest["units"][PayiInstrumentor.estimated_prompt_tokens] = Units(input=estimated_token_count, output=0)
|
payi/lib/instrument.py
CHANGED
|
@@ -4,18 +4,28 @@ import asyncio
|
|
|
4
4
|
import inspect
|
|
5
5
|
import logging
|
|
6
6
|
import traceback
|
|
7
|
+
from enum import Enum
|
|
7
8
|
from typing import Any, Set, Union, Callable, Optional
|
|
8
9
|
|
|
9
10
|
from wrapt import ObjectProxy # type: ignore
|
|
10
11
|
|
|
11
12
|
from payi import Payi, AsyncPayi
|
|
12
13
|
from payi.types import IngestUnitsParams
|
|
14
|
+
from payi.types.ingest_units_params import Units
|
|
15
|
+
from payi.types.pay_i_common_models_api_router_header_info_param import PayICommonModelsAPIRouterHeaderInfoParam
|
|
13
16
|
|
|
14
17
|
from .Stopwatch import Stopwatch
|
|
15
18
|
from .Instruments import Instruments
|
|
16
19
|
|
|
17
20
|
|
|
21
|
+
class IsStreaming(Enum):
|
|
22
|
+
false = 0
|
|
23
|
+
true = 1
|
|
24
|
+
kwargs = 2
|
|
25
|
+
|
|
18
26
|
class PayiInstrumentor:
|
|
27
|
+
estimated_prompt_tokens: str = "estimated_prompt_tokens"
|
|
28
|
+
|
|
19
29
|
def __init__(
|
|
20
30
|
self,
|
|
21
31
|
payi: Union[Payi, AsyncPayi, None] = None,
|
|
@@ -41,12 +51,15 @@ class PayiInstrumentor:
|
|
|
41
51
|
def _instrument_all(self) -> None:
|
|
42
52
|
self._instrument_openai()
|
|
43
53
|
self._instrument_anthropic()
|
|
54
|
+
self._instrument_aws_bedrock()
|
|
44
55
|
|
|
45
56
|
def _instrument_specific(self, instruments: Set[Instruments]) -> None:
|
|
46
57
|
if Instruments.OPENAI in instruments:
|
|
47
58
|
self._instrument_openai()
|
|
48
59
|
if Instruments.ANTHROPIC in instruments:
|
|
49
60
|
self._instrument_anthropic()
|
|
61
|
+
if Instruments.AWS_BEDROCK in instruments:
|
|
62
|
+
self._instrument_aws_bedrock()
|
|
50
63
|
|
|
51
64
|
def _instrument_openai(self) -> None:
|
|
52
65
|
from .OpenAIInstrumentor import OpenAiInstrumentor
|
|
@@ -66,6 +79,15 @@ class PayiInstrumentor:
|
|
|
66
79
|
except Exception as e:
|
|
67
80
|
logging.error(f"Error instrumenting Anthropic: {e}")
|
|
68
81
|
|
|
82
|
+
def _instrument_aws_bedrock(self) -> None:
|
|
83
|
+
from .BedrockInstrumentor import BedrockInstrumentor
|
|
84
|
+
|
|
85
|
+
try:
|
|
86
|
+
BedrockInstrumentor.instrument(self)
|
|
87
|
+
|
|
88
|
+
except Exception as e:
|
|
89
|
+
logging.error(f"Error instrumenting AWS bedrock: {e}")
|
|
90
|
+
|
|
69
91
|
def _ingest_units(self, ingest_units: IngestUnitsParams) -> None:
|
|
70
92
|
# return early if there are no units to ingest and on a successul ingest request
|
|
71
93
|
if int(ingest_units.get("http_status_code") or 0) < 400:
|
|
@@ -203,8 +225,10 @@ class PayiInstrumentor:
|
|
|
203
225
|
def chat_wrapper(
|
|
204
226
|
self,
|
|
205
227
|
category: str,
|
|
206
|
-
process_chunk: Callable[[Any, IngestUnitsParams], None],
|
|
207
|
-
|
|
228
|
+
process_chunk: Optional[Callable[[Any, IngestUnitsParams], None]],
|
|
229
|
+
process_request: Optional[Callable[[IngestUnitsParams, Any], None]],
|
|
230
|
+
process_synchronous_response: Any,
|
|
231
|
+
is_streaming: IsStreaming,
|
|
208
232
|
wrapped: Any,
|
|
209
233
|
instance: Any,
|
|
210
234
|
args: Any,
|
|
@@ -212,8 +236,14 @@ class PayiInstrumentor:
|
|
|
212
236
|
) -> Any:
|
|
213
237
|
context = self.get_context()
|
|
214
238
|
|
|
239
|
+
is_bedrock:bool = category == "system.aws.bedrock"
|
|
240
|
+
|
|
215
241
|
if not context:
|
|
216
|
-
|
|
242
|
+
if is_bedrock:
|
|
243
|
+
# boto3 doesn't allow extra_headers
|
|
244
|
+
kwargs.pop("extra_headers", None)
|
|
245
|
+
|
|
246
|
+
# wrapped function invoked outside of decorator scope
|
|
217
247
|
return wrapped(*args, **kwargs)
|
|
218
248
|
|
|
219
249
|
# after _udpate_headers, all metadata to add to ingest is in extra_headers, keyed by the xproxy-xxx header name
|
|
@@ -226,20 +256,32 @@ class PayiInstrumentor:
|
|
|
226
256
|
|
|
227
257
|
return wrapped(*args, **kwargs)
|
|
228
258
|
|
|
229
|
-
ingest: IngestUnitsParams = {"category": category, "
|
|
259
|
+
ingest: IngestUnitsParams = {"category": category, "units": {}} # type: ignore
|
|
260
|
+
if is_bedrock:
|
|
261
|
+
# boto3 doesn't allow extra_headers
|
|
262
|
+
kwargs.pop("extra_headers", None)
|
|
263
|
+
ingest["resource"] = kwargs.get("modelId", "")
|
|
264
|
+
else:
|
|
265
|
+
ingest["resource"] = kwargs.get("model", "")
|
|
230
266
|
|
|
231
|
-
# blocked_limit = next((limit for limit in (context.get('limit_ids') or []) if limit in self._blocked_limits), None)
|
|
232
|
-
# if blocked_limit:
|
|
233
|
-
# raise Exception(f"Limit {blocked_limit} is blocked")
|
|
234
267
|
current_frame = inspect.currentframe()
|
|
235
268
|
# f_back excludes the current frame, strip() cleans up whitespace and newlines
|
|
236
269
|
stack = [frame.strip() for frame in traceback.format_stack(current_frame.f_back)] # type: ignore
|
|
237
270
|
|
|
238
|
-
|
|
239
|
-
|
|
271
|
+
ingest['properties'] = { 'system.stack_trace': json.dumps(stack) }
|
|
272
|
+
|
|
273
|
+
if process_request:
|
|
274
|
+
process_request(ingest, kwargs)
|
|
240
275
|
|
|
241
276
|
sw = Stopwatch()
|
|
242
|
-
stream =
|
|
277
|
+
stream: bool = False
|
|
278
|
+
|
|
279
|
+
if is_streaming == IsStreaming.kwargs:
|
|
280
|
+
stream = kwargs.get("stream", False)
|
|
281
|
+
elif is_streaming == IsStreaming.true:
|
|
282
|
+
stream = True
|
|
283
|
+
else:
|
|
284
|
+
stream = False
|
|
243
285
|
|
|
244
286
|
try:
|
|
245
287
|
limit_ids = extra_headers.pop("xProxy-Limit-IDs", None)
|
|
@@ -260,7 +302,7 @@ class PayiInstrumentor:
|
|
|
260
302
|
ingest["user_id"] = user_id
|
|
261
303
|
|
|
262
304
|
if len(extra_headers) > 0:
|
|
263
|
-
ingest["provider_request_headers"] =
|
|
305
|
+
ingest["provider_request_headers"] = [PayICommonModelsAPIRouterHeaderInfoParam(name=k, value=v) for k, v in extra_headers.items()]
|
|
264
306
|
|
|
265
307
|
provider_prompt = {}
|
|
266
308
|
for k, v in kwargs.items():
|
|
@@ -286,7 +328,7 @@ class PayiInstrumentor:
|
|
|
286
328
|
raise e
|
|
287
329
|
|
|
288
330
|
if stream:
|
|
289
|
-
|
|
331
|
+
stream_result = ChatStreamWrapper(
|
|
290
332
|
response=response,
|
|
291
333
|
instance=instance,
|
|
292
334
|
instrumentor=self,
|
|
@@ -294,15 +336,31 @@ class PayiInstrumentor:
|
|
|
294
336
|
ingest=ingest,
|
|
295
337
|
stopwatch=sw,
|
|
296
338
|
process_chunk=process_chunk,
|
|
339
|
+
is_bedrock=is_bedrock,
|
|
297
340
|
)
|
|
298
341
|
|
|
342
|
+
if is_bedrock:
|
|
343
|
+
if "body" in response:
|
|
344
|
+
response["body"] = stream_result
|
|
345
|
+
else:
|
|
346
|
+
response["stream"] = stream_result
|
|
347
|
+
return response
|
|
348
|
+
|
|
349
|
+
return stream_result
|
|
350
|
+
|
|
299
351
|
sw.stop()
|
|
300
352
|
duration = sw.elapsed_ms_int()
|
|
301
353
|
ingest["end_to_end_latency_ms"] = duration
|
|
302
354
|
ingest["http_status_code"] = 200
|
|
303
355
|
|
|
304
356
|
if process_synchronous_response:
|
|
305
|
-
process_synchronous_response(
|
|
357
|
+
return_result: Any = process_synchronous_response(
|
|
358
|
+
response=response,
|
|
359
|
+
ingest=ingest,
|
|
360
|
+
log_prompt_and_response=self._log_prompt_and_response,
|
|
361
|
+
instrumentor=self)
|
|
362
|
+
if return_result:
|
|
363
|
+
return return_result
|
|
306
364
|
|
|
307
365
|
self._ingest_units(ingest)
|
|
308
366
|
|
|
@@ -354,6 +412,17 @@ class PayiInstrumentor:
|
|
|
354
412
|
if experience_id is not None:
|
|
355
413
|
extra_headers["xProxy-Experience-ID"] = experience_id
|
|
356
414
|
|
|
415
|
+
@staticmethod
|
|
416
|
+
def update_for_vision(input: int, units: 'dict[str, Units]') -> int:
|
|
417
|
+
if PayiInstrumentor.estimated_prompt_tokens in units:
|
|
418
|
+
prompt_token_estimate: int = units.pop(PayiInstrumentor.estimated_prompt_tokens)["input"] # type: ignore
|
|
419
|
+
vision = input - prompt_token_estimate
|
|
420
|
+
if (vision > 0):
|
|
421
|
+
units["vision"] = Units(input=vision, output=0)
|
|
422
|
+
input = prompt_token_estimate
|
|
423
|
+
|
|
424
|
+
return input
|
|
425
|
+
|
|
357
426
|
@staticmethod
|
|
358
427
|
def payi_wrapper(func: Any) -> Any:
|
|
359
428
|
def _payi_wrapper(o: Any) -> Any:
|
|
@@ -370,7 +439,6 @@ class PayiInstrumentor:
|
|
|
370
439
|
|
|
371
440
|
return _payi_wrapper
|
|
372
441
|
|
|
373
|
-
|
|
374
442
|
class ChatStreamWrapper(ObjectProxy): # type: ignore
|
|
375
443
|
def __init__(
|
|
376
444
|
self,
|
|
@@ -381,7 +449,19 @@ class ChatStreamWrapper(ObjectProxy): # type: ignore
|
|
|
381
449
|
stopwatch: Stopwatch,
|
|
382
450
|
process_chunk: Optional[Callable[[Any, IngestUnitsParams], None]] = None,
|
|
383
451
|
log_prompt_and_response: bool = True,
|
|
452
|
+
is_bedrock: bool = False,
|
|
384
453
|
) -> None:
|
|
454
|
+
|
|
455
|
+
bedrock_from_stream: bool = False
|
|
456
|
+
if is_bedrock:
|
|
457
|
+
stream = response.get("stream", None)
|
|
458
|
+
if stream:
|
|
459
|
+
response = stream
|
|
460
|
+
bedrock_from_stream = True
|
|
461
|
+
else:
|
|
462
|
+
response = response.get("body")
|
|
463
|
+
bedrock_from_stream = False
|
|
464
|
+
|
|
385
465
|
super().__init__(response) # type: ignore
|
|
386
466
|
|
|
387
467
|
self._response = response
|
|
@@ -396,6 +476,8 @@ class ChatStreamWrapper(ObjectProxy): # type: ignore
|
|
|
396
476
|
self._process_chunk: Optional[Callable[[Any, IngestUnitsParams], None]] = process_chunk
|
|
397
477
|
|
|
398
478
|
self._first_token: bool = True
|
|
479
|
+
self._is_bedrock: bool = is_bedrock
|
|
480
|
+
self._bedrock_from_stream: bool = bedrock_from_stream
|
|
399
481
|
|
|
400
482
|
def __enter__(self) -> Any:
|
|
401
483
|
return self
|
|
@@ -409,9 +491,26 @@ class ChatStreamWrapper(ObjectProxy): # type: ignore
|
|
|
409
491
|
async def __aexit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
|
|
410
492
|
await self.__wrapped__.__aexit__(exc_type, exc_val, exc_tb) # type: ignore
|
|
411
493
|
|
|
412
|
-
def __iter__(self) -> Any:
|
|
494
|
+
def __iter__(self) -> Any:
|
|
495
|
+
if self._is_bedrock:
|
|
496
|
+
# MUST be reside in a separate function so that the yield statement doesn't implicitly return its own iterator and overriding self
|
|
497
|
+
return self._iter_bedrock()
|
|
413
498
|
return self
|
|
414
499
|
|
|
500
|
+
def _iter_bedrock(self) -> Any:
|
|
501
|
+
# botocore EventStream doesn't have a __next__ method so iterate over the wrapped object in place
|
|
502
|
+
for event in self.__wrapped__: # type: ignore
|
|
503
|
+
if (self._bedrock_from_stream):
|
|
504
|
+
self._evaluate_chunk(event)
|
|
505
|
+
else:
|
|
506
|
+
chunk = event.get('chunk') # type: ignore
|
|
507
|
+
if chunk:
|
|
508
|
+
decode = chunk.get('bytes').decode() # type: ignore
|
|
509
|
+
self._evaluate_chunk(decode)
|
|
510
|
+
yield event
|
|
511
|
+
|
|
512
|
+
self._stop_iteration()
|
|
513
|
+
|
|
415
514
|
def __aiter__(self) -> Any:
|
|
416
515
|
return self
|
|
417
516
|
|
|
@@ -443,7 +542,7 @@ class ChatStreamWrapper(ObjectProxy): # type: ignore
|
|
|
443
542
|
self._first_token = False
|
|
444
543
|
|
|
445
544
|
if self._log_prompt_and_response:
|
|
446
|
-
self._responses.append(
|
|
545
|
+
self._responses.append(self.chunk_to_json(chunk))
|
|
447
546
|
|
|
448
547
|
if self._process_chunk:
|
|
449
548
|
self._process_chunk(chunk, self._ingest)
|
|
@@ -458,11 +557,21 @@ class ChatStreamWrapper(ObjectProxy): # type: ignore
|
|
|
458
557
|
|
|
459
558
|
self._instrumentor._ingest_units(self._ingest)
|
|
460
559
|
|
|
560
|
+
@staticmethod
|
|
561
|
+
def chunk_to_json(chunk: Any) -> str:
|
|
562
|
+
if hasattr(chunk, "to_json"):
|
|
563
|
+
return str(chunk.to_json())
|
|
564
|
+
elif isinstance(chunk, bytes):
|
|
565
|
+
return chunk.decode()
|
|
566
|
+
elif isinstance(chunk, str):
|
|
567
|
+
return chunk
|
|
568
|
+
else:
|
|
569
|
+
# assume dict
|
|
570
|
+
return json.dumps(chunk)
|
|
461
571
|
|
|
462
572
|
global _instrumentor
|
|
463
573
|
_instrumentor: PayiInstrumentor
|
|
464
574
|
|
|
465
|
-
|
|
466
575
|
def payi_instrument(
|
|
467
576
|
payi: Optional[Union[Payi, AsyncPayi]] = None,
|
|
468
577
|
instruments: Optional[Set[Instruments]] = None,
|
|
@@ -503,7 +612,6 @@ def ingest(
|
|
|
503
612
|
|
|
504
613
|
return _ingest
|
|
505
614
|
|
|
506
|
-
|
|
507
615
|
def proxy(
|
|
508
616
|
limit_ids: Optional["list[str]"] = None,
|
|
509
617
|
request_tags: Optional["list[str]"] = None,
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: payi
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.0a41
|
|
4
4
|
Summary: The official Python library for the payi API
|
|
5
5
|
Project-URL: Homepage, https://github.com/Pay-i/pay-i-python
|
|
6
6
|
Project-URL: Repository, https://github.com/Pay-i/pay-i-python
|
|
@@ -27,6 +27,7 @@ Requires-Dist: distro<2,>=1.7.0
|
|
|
27
27
|
Requires-Dist: httpx<1,>=0.23.0
|
|
28
28
|
Requires-Dist: pydantic<3,>=1.9.0
|
|
29
29
|
Requires-Dist: sniffio
|
|
30
|
+
Requires-Dist: tiktoken>=0.8.0
|
|
30
31
|
Requires-Dist: typing-extensions<5,>=4.10
|
|
31
32
|
Requires-Dist: wrapt>=1.17.2
|
|
32
33
|
Description-Content-Type: text/markdown
|
|
@@ -2,16 +2,16 @@ payi/__init__.py,sha256=_eeZx9fx2Wp81adXh7qrpkmXCso7TiRSvIlLkQ0sQhY,2399
|
|
|
2
2
|
payi/_base_client.py,sha256=CiHJoJuzSweHB7oidXWIlWdU0vrLCpPc9hMlj_S-anE,68128
|
|
3
3
|
payi/_client.py,sha256=aUtMEmV02nTs3_pYYAR-OchCkofUHeXhhRs43tyDHLE,18760
|
|
4
4
|
payi/_compat.py,sha256=VWemUKbj6DDkQ-O4baSpHVLJafotzeXmCQGJugfVTIw,6580
|
|
5
|
-
payi/_constants.py,sha256=
|
|
5
|
+
payi/_constants.py,sha256=S14PFzyN9-I31wiV7SmIlL5Ga0MLHxdvegInGdXH7tM,462
|
|
6
6
|
payi/_exceptions.py,sha256=ItygKNrNXIVY0H6LsGVZvFuAHB3Vtm_VZXmWzCnpHy0,3216
|
|
7
7
|
payi/_files.py,sha256=mf4dOgL4b0ryyZlbqLhggD3GVgDf6XxdGFAgce01ugE,3549
|
|
8
|
-
payi/_models.py,sha256=
|
|
8
|
+
payi/_models.py,sha256=uZvPAaaeDwCYwvB-yq7nxZnZ70I2Na_KjSAqaPQWfh0,28659
|
|
9
9
|
payi/_qs.py,sha256=AOkSz4rHtK4YI3ZU_kzea-zpwBUgEY8WniGmTPyEimc,4846
|
|
10
10
|
payi/_resource.py,sha256=j2jIkTr8OIC8sU6-05nxSaCyj4MaFlbZrwlyg4_xJos,1088
|
|
11
11
|
payi/_response.py,sha256=CfrNS_3wbL8o9dRyRVfZQ5E1GUlA4CUIUEK8olmfGqE,28777
|
|
12
12
|
payi/_streaming.py,sha256=Z_wIyo206T6Jqh2rolFg2VXZgX24PahLmpURp0-NssU,10092
|
|
13
13
|
payi/_types.py,sha256=2mbMK86K3W1aMTW7sOGQ-VND6-A2IuXKm8p4sYFztBU,6141
|
|
14
|
-
payi/_version.py,sha256=
|
|
14
|
+
payi/_version.py,sha256=4Z7oXmyN8CqDXoYFdr9Fz53f3-pCQaVF2JTZFXE0NCo,165
|
|
15
15
|
payi/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
16
16
|
payi/_utils/__init__.py,sha256=PNZ_QJuzZEgyYXqkO1HVhGkj5IU9bglVUcw7H-Knjzw,2062
|
|
17
17
|
payi/_utils/_logs.py,sha256=fmnf5D9TOgkgZKfgYmSa3PiUc3SZgkchn6CzJUeo0SQ,768
|
|
@@ -23,12 +23,13 @@ payi/_utils/_transform.py,sha256=Dkkyr7OveGmOolepcvXmVJWE3kqim4b0nM0h7yWbgeY,134
|
|
|
23
23
|
payi/_utils/_typing.py,sha256=nTJz0jcrQbEgxwy4TtAkNxuU0QHHlmc6mQtA6vIR8tg,4501
|
|
24
24
|
payi/_utils/_utils.py,sha256=8UmbPOy_AAr2uUjjFui-VZSrVBHRj6bfNEKRp5YZP2A,12004
|
|
25
25
|
payi/lib/.keep,sha256=wuNrz-5SXo3jJaJOJgz4vFHM41YH_g20F5cRQo0vLes,224
|
|
26
|
-
payi/lib/AnthropicInstrumentor.py,sha256=
|
|
27
|
-
payi/lib/
|
|
28
|
-
payi/lib/
|
|
26
|
+
payi/lib/AnthropicInstrumentor.py,sha256=uiobG6N6QOKyhyS_kSZPCrKCnsRnKSLPBPNn3sfcUYg,4771
|
|
27
|
+
payi/lib/BedrockInstrumentor.py,sha256=r5h01WJqx9PGszpwKWwVgPdCqUlWIbf6IiS01drN0qc,10684
|
|
28
|
+
payi/lib/Instruments.py,sha256=bapmVS9jbHtFknXCKDzsFFWvf5XLtzEpdlvI7iEWY-o,147
|
|
29
|
+
payi/lib/OpenAIInstrumentor.py,sha256=qYZJ-m723GCK3nGmI-JmRrNMpcDWmr4t4KYR1zWP-Ug,4195
|
|
29
30
|
payi/lib/Stopwatch.py,sha256=vFyGVRvkppamP7W0IuZyypKLMIaqjhB7fcRG0dNyfnQ,757
|
|
30
31
|
payi/lib/helpers.py,sha256=ZgkY8UE2YRc7ok2Pmxg_T9UMqKI8D8542JY3CP8RZCM,1597
|
|
31
|
-
payi/lib/instrument.py,sha256=
|
|
32
|
+
payi/lib/instrument.py,sha256=FH2bcmcQyFCd6GAc7tiikag9RwPfd5FR5QCCNMomt4E,23462
|
|
32
33
|
payi/resources/__init__.py,sha256=isHGXSl9kOrZDduKrX3UenTwrdTpuKJVBjw6NYSBV20,3592
|
|
33
34
|
payi/resources/billing_models.py,sha256=5w3RfGXtGlyq5vbTw6hQrx1UlzRBtlq8ArcFlf5e3TY,20152
|
|
34
35
|
payi/resources/ingest.py,sha256=SvQspsYled4_ErOZKzVtazBIk0tUC1e34Lw8qw4SNEM,15484
|
|
@@ -109,7 +110,7 @@ payi/types/requests/request_result.py,sha256=phYQiqhwNaR9igP-Fhs34Y-__dlT7L4wq-r
|
|
|
109
110
|
payi/types/shared/__init__.py,sha256=-xz5dxK5LBjLnsi2LpLq5btaGDFp-mSjJ0y2qKy0Yus,264
|
|
110
111
|
payi/types/shared/evaluation_response.py,sha256=ejEToMA57PUu1SldEtJ5z9r4fAO3U0tvdjbsyIoVX1s,214
|
|
111
112
|
payi/types/shared/pay_i_common_models_budget_management_cost_details_base.py,sha256=XmIzJXy4zAi-mfrDvEXiYjO3qF1EvugGUl-Gijj4TA4,268
|
|
112
|
-
payi-0.1.
|
|
113
|
-
payi-0.1.
|
|
114
|
-
payi-0.1.
|
|
115
|
-
payi-0.1.
|
|
113
|
+
payi-0.1.0a41.dist-info/METADATA,sha256=QTeHU7hACFAIb9eEk3z9jPIbSjQwiRcuCrNtzIkr3FU,12625
|
|
114
|
+
payi-0.1.0a41.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
115
|
+
payi-0.1.0a41.dist-info/licenses/LICENSE,sha256=CQt03aM-P4a3Yg5qBg3JSLVoQS3smMyvx7tYg_6V7Gk,11334
|
|
116
|
+
payi-0.1.0a41.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|