payi 0.1.0a41__py3-none-any.whl → 0.1.0a42__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of payi might be problematic. Click here for more details.

payi/_version.py CHANGED
@@ -1,4 +1,4 @@
1
1
  # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
2
2
 
3
3
  __title__ = "payi"
4
- __version__ = "0.1.0-alpha.41" # x-release-please-version
4
+ __version__ = "0.1.0-alpha.42" # x-release-please-version
@@ -16,12 +16,6 @@ class AnthropicIntrumentor:
16
16
  try:
17
17
  import anthropic # type: ignore # noqa: F401 I001
18
18
 
19
- # wrap_function_wrapper(
20
- # "anthropic.resources.completions",
21
- # "Completions.create",
22
- # chat_wrapper(instrumentor),
23
- # )
24
-
25
19
  wrap_function_wrapper(
26
20
  "anthropic.resources.messages",
27
21
  "Messages.create",
@@ -34,6 +28,18 @@ class AnthropicIntrumentor:
34
28
  chat_wrapper(instrumentor),
35
29
  )
36
30
 
31
+ wrap_function_wrapper(
32
+ "anthropic.resources.messages",
33
+ "AsyncMessages.create",
34
+ achat_wrapper(instrumentor),
35
+ )
36
+
37
+ wrap_function_wrapper(
38
+ "anthropic.resources.messages",
39
+ "AsyncMessages.stream",
40
+ achat_wrapper(instrumentor),
41
+ )
42
+
37
43
  except Exception as e:
38
44
  logging.debug(f"Error instrumenting anthropic: {e}")
39
45
  return
@@ -44,19 +50,39 @@ def chat_wrapper(
44
50
  instrumentor: PayiInstrumentor,
45
51
  wrapped: Any,
46
52
  instance: Any,
47
- args: Any,
48
- kwargs: Any,
53
+ *args: Any,
54
+ **kwargs: Any,
49
55
  ) -> Any:
50
56
  return instrumentor.chat_wrapper(
51
- category="system.anthropic",
52
- process_chunk=process_chunk,
53
- process_request=process_request,
54
- process_synchronous_response=process_synchronous_response,
55
- is_streaming=IsStreaming.kwargs,
56
- wrapped=wrapped,
57
- instance=instance,
58
- args=args,
59
- kwargs=kwargs,
57
+ "system.anthropic",
58
+ process_chunk,
59
+ process_request,
60
+ process_synchronous_response,
61
+ IsStreaming.kwargs,
62
+ wrapped,
63
+ instance,
64
+ args,
65
+ kwargs,
66
+ )
67
+
68
+ @PayiInstrumentor.payi_awrapper
69
+ async def achat_wrapper(
70
+ instrumentor: PayiInstrumentor,
71
+ wrapped: Any,
72
+ instance: Any,
73
+ *args: Any,
74
+ **kwargs: Any,
75
+ ) -> Any:
76
+ return await instrumentor.achat_wrapper(
77
+ "system.anthropic",
78
+ process_chunk,
79
+ process_request,
80
+ process_synchronous_response,
81
+ IsStreaming.kwargs,
82
+ wrapped,
83
+ instance,
84
+ args,
85
+ kwargs,
60
86
  )
61
87
 
62
88
 
@@ -116,7 +142,7 @@ def has_image_and_get_texts(encoding: tiktoken.Encoding, content: Union[str, 'li
116
142
  token_count = sum(len(encoding.encode(item.get("text", ""))) for item in content if item.get("type") == "text")
117
143
  return has_image, token_count
118
144
 
119
- def process_request(ingest: IngestUnitsParams, kwargs: Any) -> None:
145
+ def process_request(ingest: IngestUnitsParams, *args: Any, **kwargs: Any) -> None: # noqa: ARG001
120
146
  messages = kwargs.get("messages")
121
147
  if not messages or len(messages) == 0:
122
148
  return
@@ -17,12 +17,6 @@ class BedrockInstrumentor:
17
17
  try:
18
18
  import boto3 # type: ignore # noqa: F401 I001
19
19
 
20
- # wrap_function_wrapper(
21
- # "anthropic.resources.completions",
22
- # "Completions.create",
23
- # chat_wrapper(instrumentor),
24
- # )
25
-
26
20
  wrap_function_wrapper(
27
21
  "botocore.client",
28
22
  "ClientCreator.create_client",
@@ -40,7 +34,7 @@ class BedrockInstrumentor:
40
34
  return
41
35
 
42
36
  @PayiInstrumentor.payi_wrapper
43
- def create_client_wrapper(instrumentor: PayiInstrumentor, wrapped: Any, instance: Any, args: Any, kwargs: Any) -> Any: # noqa: ARG001
37
+ def create_client_wrapper(instrumentor: PayiInstrumentor, wrapped: Any, instance: Any, *args: Any, **kwargs: Any) -> Any: # noqa: ARG001
44
38
  if kwargs.get("service_name") != "bedrock-runtime":
45
39
  return wrapped(*args, **kwargs)
46
40
 
@@ -108,15 +102,15 @@ def wrap_invoke(instrumentor: PayiInstrumentor, wrapped: Any) -> Any:
108
102
 
109
103
  if modelId.startswith("meta.llama3") or modelId.startswith("anthropic."):
110
104
  return instrumentor.chat_wrapper(
111
- category="system.aws.bedrock",
112
- process_chunk=None,
113
- process_request=process_invoke_request,
114
- process_synchronous_response=process_synchronous_invoke_response,
115
- is_streaming=IsStreaming.false,
116
- wrapped=wrapped,
117
- instance=None,
118
- args=args,
119
- kwargs=kwargs,
105
+ "system.aws.bedrock",
106
+ None,
107
+ process_invoke_request,
108
+ process_synchronous_invoke_response,
109
+ IsStreaming.false,
110
+ wrapped,
111
+ None,
112
+ args,
113
+ kwargs,
120
114
  )
121
115
  return wrapped(*args, **kwargs)
122
116
 
@@ -129,15 +123,15 @@ def wrap_invoke_stream(instrumentor: PayiInstrumentor, wrapped: Any) -> Any:
129
123
 
130
124
  if modelId.startswith("meta.llama3") or modelId.startswith("anthropic."):
131
125
  return instrumentor.chat_wrapper(
132
- category="system.aws.bedrock",
133
- process_chunk=process_invoke_streaming_anthropic_chunk if modelId.startswith("anthropic.") else process_invoke_streaming_llama_chunk,
134
- process_request=process_invoke_request,
135
- process_synchronous_response=None,
136
- is_streaming=IsStreaming.true,
137
- wrapped=wrapped,
138
- instance=None,
139
- args=args,
140
- kwargs=kwargs,
126
+ "system.aws.bedrock",
127
+ process_invoke_streaming_anthropic_chunk if modelId.startswith("anthropic.") else process_invoke_streaming_llama_chunk,
128
+ process_invoke_request,
129
+ None,
130
+ IsStreaming.true,
131
+ wrapped,
132
+ None,
133
+ args,
134
+ kwargs,
141
135
  )
142
136
  return wrapped(*args, **kwargs)
143
137
 
@@ -150,15 +144,15 @@ def wrap_converse(instrumentor: PayiInstrumentor, wrapped: Any) -> Any:
150
144
 
151
145
  if modelId.startswith("meta.llama3") or modelId.startswith("anthropic."):
152
146
  return instrumentor.chat_wrapper(
153
- category="system.aws.bedrock",
154
- process_chunk=None,
155
- process_request=process_converse_request,
156
- process_synchronous_response=process_synchronous_converse_response,
157
- is_streaming=IsStreaming.false,
158
- wrapped=wrapped,
159
- instance=None,
160
- args=args,
161
- kwargs=kwargs,
147
+ "system.aws.bedrock",
148
+ None,
149
+ process_converse_request,
150
+ process_synchronous_converse_response,
151
+ IsStreaming.false,
152
+ wrapped,
153
+ None,
154
+ args,
155
+ kwargs,
162
156
  )
163
157
  return wrapped(*args, **kwargs)
164
158
 
@@ -171,15 +165,15 @@ def wrap_converse_stream(instrumentor: PayiInstrumentor, wrapped: Any) -> Any:
171
165
 
172
166
  if modelId.startswith("meta.llama3") or modelId.startswith("anthropic."):
173
167
  return instrumentor.chat_wrapper(
174
- category="system.aws.bedrock",
175
- process_chunk=process_converse_streaming_chunk,
176
- process_request=process_converse_request,
177
- process_synchronous_response=None,
178
- is_streaming=IsStreaming.true,
179
- wrapped=wrapped,
180
- instance=None,
181
- args=args,
182
- kwargs=kwargs,
168
+ "system.aws.bedrock",
169
+ process_converse_streaming_chunk,
170
+ process_converse_request,
171
+ None,
172
+ IsStreaming.true,
173
+ wrapped,
174
+ None,
175
+ args,
176
+ kwargs,
183
177
  )
184
178
  return wrapped(*args, **kwargs)
185
179
 
@@ -242,7 +236,7 @@ def process_synchronous_invoke_response(
242
236
 
243
237
  return response
244
238
 
245
- def process_invoke_request(ingest: IngestUnitsParams, kwargs: Any) -> None: # noqa: ARG001
239
+ def process_invoke_request(ingest: IngestUnitsParams, *args: Any, **kwargs: Any) -> None: # noqa: ARG001
246
240
  return
247
241
 
248
242
  def process_converse_streaming_chunk(chunk: 'dict[str, Any]', ingest: IngestUnitsParams) -> None:
@@ -284,5 +278,5 @@ def process_synchronous_converse_response(
284
278
 
285
279
  return None
286
280
 
287
- def process_converse_request(ingest: IngestUnitsParams, kwargs: Any) -> None: # noqa: ARG001
281
+ def process_converse_request(ingest: IngestUnitsParams, *args: Any, **kwargs: Any) -> None: # noqa: ARG001
288
282
  return
@@ -3,7 +3,7 @@ import logging
3
3
  from typing import Any, Union
4
4
  from importlib.metadata import version
5
5
 
6
- import tiktoken
6
+ import tiktoken # type: ignore
7
7
  from wrapt import wrap_function_wrapper # type: ignore
8
8
 
9
9
  from payi.types import IngestUnitsParams
@@ -23,6 +23,13 @@ class OpenAiInstrumentor:
23
23
  "Completions.create",
24
24
  chat_wrapper(instrumentor),
25
25
  )
26
+
27
+ wrap_function_wrapper(
28
+ "openai.resources.chat.completions",
29
+ "AsyncCompletions.create",
30
+ achat_wrapper(instrumentor),
31
+ )
32
+
26
33
  except Exception as e:
27
34
  logging.debug(f"Error instrumenting openai: {e}")
28
35
  return
@@ -33,19 +40,39 @@ def chat_wrapper(
33
40
  instrumentor: PayiInstrumentor,
34
41
  wrapped: Any,
35
42
  instance: Any,
36
- args: Any,
37
- kwargs: Any,
43
+ *args: Any,
44
+ **kwargs: Any,
38
45
  ) -> Any:
39
46
  return instrumentor.chat_wrapper(
40
- category="system.openai",
41
- process_chunk=process_chat_chunk,
42
- process_request=process_request,
43
- process_synchronous_response=process_chat_synchronous_response,
44
- is_streaming=IsStreaming.kwargs,
45
- wrapped=wrapped,
46
- instance=instance,
47
- args=args,
48
- kwargs=kwargs,
47
+ "system.openai",
48
+ process_chat_chunk,
49
+ process_request,
50
+ process_chat_synchronous_response,
51
+ IsStreaming.kwargs,
52
+ wrapped,
53
+ instance,
54
+ args,
55
+ kwargs,
56
+ )
57
+
58
+ @PayiInstrumentor.payi_awrapper
59
+ async def achat_wrapper(
60
+ instrumentor: PayiInstrumentor,
61
+ wrapped: Any,
62
+ instance: Any,
63
+ *args: Any,
64
+ **kwargs: Any,
65
+ ) -> Any:
66
+ return await instrumentor.achat_wrapper(
67
+ "system.openai",
68
+ process_chat_chunk,
69
+ process_request,
70
+ process_chat_synchronous_response,
71
+ IsStreaming.kwargs,
72
+ wrapped,
73
+ instance,
74
+ args,
75
+ kwargs,
49
76
  )
50
77
 
51
78
 
@@ -103,7 +130,7 @@ def has_image_and_get_texts(encoding: tiktoken.Encoding, content: Union[str, 'li
103
130
  token_count = sum(len(encoding.encode(item.get("text", ""))) for item in content if item.get("type") == "text")
104
131
  return has_image, token_count
105
132
 
106
- def process_request(ingest: IngestUnitsParams, kwargs: Any) -> None:
133
+ def process_request(ingest: IngestUnitsParams, *args: Any, **kwargs: Any) -> None: # noqa: ARG001
107
134
  messages = kwargs.get("messages")
108
135
  if not messages or len(messages) == 0:
109
136
  return
@@ -112,9 +139,9 @@ def process_request(ingest: IngestUnitsParams, kwargs: Any) -> None:
112
139
  has_image = False
113
140
 
114
141
  try:
115
- enc = tiktoken.encoding_for_model(kwargs.get("model"))
142
+ enc = tiktoken.encoding_for_model(kwargs.get("model")) # type: ignore
116
143
  except KeyError:
117
- enc = tiktoken.get_encoding("o200k_base")
144
+ enc = tiktoken.get_encoding("o200k_base") # type: ignore
118
145
 
119
146
  for message in messages:
120
147
  msg_has_image, msg_prompt_tokens = has_image_and_get_texts(enc, message.get('content', ''))
payi/lib/Stopwatch.py CHANGED
@@ -15,7 +15,7 @@ class Stopwatch:
15
15
 
16
16
  def elapsed_s(self) -> float:
17
17
  if self.start_time is None:
18
- raise ValueError("Stopwatch has not been started")
18
+ return 0.0 # ValueError("Stopwatch has not been started")
19
19
  if self.end_time is None:
20
20
  return time.perf_counter() - self.start_time
21
21
  return self.end_time - self.start_time