prediction-market-agent-tooling 0.69.8__py3-none-any.whl → 0.69.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- prediction_market_agent_tooling/tools/caches/db_cache.py +215 -113
- {prediction_market_agent_tooling-0.69.8.dist-info → prediction_market_agent_tooling-0.69.9.dist-info}/METADATA +1 -1
- {prediction_market_agent_tooling-0.69.8.dist-info → prediction_market_agent_tooling-0.69.9.dist-info}/RECORD +6 -6
- {prediction_market_agent_tooling-0.69.8.dist-info → prediction_market_agent_tooling-0.69.9.dist-info}/WHEEL +0 -0
- {prediction_market_agent_tooling-0.69.8.dist-info → prediction_market_agent_tooling-0.69.9.dist-info}/entry_points.txt +0 -0
- {prediction_market_agent_tooling-0.69.8.dist-info → prediction_market_agent_tooling-0.69.9.dist-info}/licenses/LICENSE +0 -0
@@ -1,6 +1,8 @@
|
|
1
|
+
import asyncio
|
1
2
|
import hashlib
|
2
3
|
import inspect
|
3
4
|
import json
|
5
|
+
from dataclasses import dataclass
|
4
6
|
from datetime import timedelta
|
5
7
|
from functools import wraps
|
6
8
|
from types import UnionType
|
@@ -12,6 +14,7 @@ from typing import (
|
|
12
14
|
cast,
|
13
15
|
get_args,
|
14
16
|
get_origin,
|
17
|
+
get_type_hints,
|
15
18
|
overload,
|
16
19
|
)
|
17
20
|
|
@@ -101,136 +104,235 @@ def db_cache(
|
|
101
104
|
|
102
105
|
api_keys = api_keys if api_keys is not None else APIKeys()
|
103
106
|
|
104
|
-
|
105
|
-
|
106
|
-
# If caching is disabled, just call the function and return it
|
107
|
-
if not api_keys.ENABLE_CACHE:
|
108
|
-
return func(*args, **kwargs)
|
107
|
+
# Check if the decorated function is async
|
108
|
+
if inspect.iscoroutinefunction(func):
|
109
109
|
|
110
|
-
|
111
|
-
|
112
|
-
|
110
|
+
@wraps(func)
|
111
|
+
async def async_wrapper(*args: Any, **kwargs: Any) -> Any:
|
112
|
+
# If caching is disabled, just call the function and return it
|
113
|
+
if not api_keys.ENABLE_CACHE:
|
114
|
+
return await func(*args, **kwargs)
|
113
115
|
|
114
|
-
|
115
|
-
signature = inspect.signature(func)
|
116
|
-
bound_arguments = signature.bind(*args, **kwargs)
|
117
|
-
bound_arguments.apply_defaults()
|
118
|
-
|
119
|
-
# Convert any argument that is Pydantic model into classic dictionary, otherwise it won't be json-serializable.
|
120
|
-
args_dict: dict[str, Any] = bound_arguments.arguments
|
121
|
-
|
122
|
-
# Remove `self` or `cls` if present (in case of class' methods)
|
123
|
-
if "self" in args_dict:
|
124
|
-
del args_dict["self"]
|
125
|
-
if "cls" in args_dict:
|
126
|
-
del args_dict["cls"]
|
127
|
-
|
128
|
-
# Remove ignored arguments
|
129
|
-
if ignore_args:
|
130
|
-
for arg in ignore_args:
|
131
|
-
if arg in args_dict:
|
132
|
-
del args_dict[arg]
|
133
|
-
|
134
|
-
# Remove arguments of ignored types
|
135
|
-
if ignore_arg_types:
|
136
|
-
args_dict = {
|
137
|
-
k: v
|
138
|
-
for k, v in args_dict.items()
|
139
|
-
if not isinstance(v, tuple(ignore_arg_types))
|
140
|
-
}
|
116
|
+
# Run blocking database operations in thread pool
|
141
117
|
|
142
|
-
|
143
|
-
|
144
|
-
args_hash = hashlib.md5(arg_string.encode()).hexdigest()
|
118
|
+
# Ensure tables in thread pool
|
119
|
+
await asyncio.to_thread(_ensure_tables, api_keys)
|
145
120
|
|
146
|
-
|
147
|
-
full_function_name = func.__module__ + "." + func.__qualname__
|
148
|
-
# But also get the standard function name to easily search for it in database
|
149
|
-
function_name = func.__name__
|
121
|
+
ctx = _build_context(func, args, kwargs, ignore_args, ignore_arg_types)
|
150
122
|
|
151
|
-
|
152
|
-
|
153
|
-
is_pydantic_model = return_type is not None and contains_pydantic_model(
|
154
|
-
return_type
|
155
|
-
)
|
123
|
+
# Fetch cached result in thread pool
|
124
|
+
lookup = await asyncio.to_thread(_fetch_cached, api_keys, ctx, max_age)
|
156
125
|
|
157
|
-
|
158
|
-
|
159
|
-
|
160
|
-
# Try to get cached result
|
161
|
-
statement = (
|
162
|
-
select(FunctionCache)
|
163
|
-
.where(
|
164
|
-
FunctionCache.function_name == function_name,
|
165
|
-
FunctionCache.full_function_name == full_function_name,
|
166
|
-
FunctionCache.args_hash == args_hash,
|
126
|
+
if lookup.hit:
|
127
|
+
logger.debug(
|
128
|
+
f"{DB_CACHE_LOG_PREFIX} [cache-hit] Cache hit for {ctx.full_function_name}"
|
167
129
|
)
|
168
|
-
.
|
169
|
-
|
170
|
-
|
171
|
-
|
172
|
-
|
173
|
-
cached_result = session.exec(statement).first()
|
174
|
-
|
175
|
-
if cached_result:
|
176
|
-
logger.info(
|
177
|
-
# Keep the special [case-hit] identifier so we can easily track it in GCP.
|
178
|
-
f"{DB_CACHE_LOG_PREFIX} [cache-hit] Cache hit for {full_function_name} with args {args_dict} and output {cached_result.result}"
|
130
|
+
return lookup.value
|
131
|
+
|
132
|
+
computed_result = await func(*args, **kwargs)
|
133
|
+
logger.debug(
|
134
|
+
f"{DB_CACHE_LOG_PREFIX} [cache-miss] Cache miss for {ctx.full_function_name}"
|
179
135
|
)
|
180
|
-
|
181
|
-
|
182
|
-
|
183
|
-
|
184
|
-
|
185
|
-
|
186
|
-
|
187
|
-
|
188
|
-
|
189
|
-
|
136
|
+
|
137
|
+
if cache_none or computed_result is not None:
|
138
|
+
# Save cached result in thread pool (fire-and-forget)
|
139
|
+
asyncio.create_task(
|
140
|
+
asyncio.to_thread(
|
141
|
+
_save_cached,
|
142
|
+
api_keys,
|
143
|
+
ctx,
|
144
|
+
computed_result,
|
145
|
+
log_error_on_unsavable_data,
|
190
146
|
)
|
191
|
-
|
192
|
-
|
193
|
-
|
147
|
+
)
|
148
|
+
|
149
|
+
return computed_result
|
150
|
+
|
151
|
+
return cast(FunctionT, async_wrapper)
|
152
|
+
|
153
|
+
@wraps(func)
|
154
|
+
def sync_wrapper(*args: Any, **kwargs: Any) -> Any:
|
155
|
+
if not api_keys.ENABLE_CACHE:
|
156
|
+
return func(*args, **kwargs)
|
157
|
+
|
158
|
+
_ensure_tables(api_keys)
|
159
|
+
|
160
|
+
ctx = _build_context(func, args, kwargs, ignore_args, ignore_arg_types)
|
161
|
+
lookup = _fetch_cached(api_keys, ctx, max_age)
|
162
|
+
|
163
|
+
if lookup.hit:
|
164
|
+
logger.debug(
|
165
|
+
f"{DB_CACHE_LOG_PREFIX} [cache-hit] Cache hit for {ctx.full_function_name}"
|
166
|
+
)
|
167
|
+
return lookup.value
|
194
168
|
|
195
|
-
# On cache miss, compute the result
|
196
169
|
computed_result = func(*args, **kwargs)
|
197
|
-
|
198
|
-
|
199
|
-
f"{DB_CACHE_LOG_PREFIX} [cache-miss] Cache miss for {full_function_name} with args {args_dict}, computed the output {computed_result}"
|
170
|
+
logger.debug(
|
171
|
+
f"{DB_CACHE_LOG_PREFIX} [cache-miss] Cache miss for {ctx.full_function_name}"
|
200
172
|
)
|
201
173
|
|
202
|
-
# If postgres access was specified, save it.
|
203
174
|
if cache_none or computed_result is not None:
|
204
|
-
|
205
|
-
function_name=function_name,
|
206
|
-
full_function_name=full_function_name,
|
207
|
-
args_hash=args_hash,
|
208
|
-
args=args_dict,
|
209
|
-
result=computed_result,
|
210
|
-
created_at=utcnow(),
|
211
|
-
)
|
212
|
-
# Do not raise an exception if saving to the database fails, just log it and let the agent continue the work.
|
213
|
-
try:
|
214
|
-
with DBManager(
|
215
|
-
api_keys.sqlalchemy_db_url.get_secret_value()
|
216
|
-
).get_session() as session:
|
217
|
-
logger.info(
|
218
|
-
f"{DB_CACHE_LOG_PREFIX} [cache-info] Saving {cache_entry} into database."
|
219
|
-
)
|
220
|
-
session.add(cache_entry)
|
221
|
-
session.commit()
|
222
|
-
except (DataError, psycopg2.errors.UntranslatableCharacter) as e:
|
223
|
-
(logger.error if log_error_on_unsavable_data else logger.warning)(
|
224
|
-
f"{DB_CACHE_LOG_PREFIX} [cache-error] Failed to save {cache_entry} into database, ignoring, because: {e}"
|
225
|
-
)
|
226
|
-
except Exception:
|
227
|
-
logger.exception(
|
228
|
-
f"{DB_CACHE_LOG_PREFIX} [cache-error] Failed to save {cache_entry} into database, ignoring."
|
229
|
-
)
|
175
|
+
_save_cached(api_keys, ctx, computed_result, log_error_on_unsavable_data)
|
230
176
|
|
231
177
|
return computed_result
|
232
178
|
|
233
|
-
return cast(FunctionT,
|
179
|
+
return cast(FunctionT, sync_wrapper)
|
180
|
+
|
181
|
+
|
182
|
+
@dataclass
|
183
|
+
class CallContext:
|
184
|
+
args_dict: dict[str, Any]
|
185
|
+
args_hash: str
|
186
|
+
function_name: str
|
187
|
+
full_function_name: str
|
188
|
+
return_type: Any
|
189
|
+
|
190
|
+
@property
|
191
|
+
def is_pydantic_model(self) -> bool:
|
192
|
+
return self.return_type is not None and contains_pydantic_model(
|
193
|
+
self.return_type
|
194
|
+
)
|
195
|
+
|
196
|
+
|
197
|
+
@dataclass
|
198
|
+
class CacheLookup:
|
199
|
+
hit: bool
|
200
|
+
value: Any | None = None
|
201
|
+
|
202
|
+
|
203
|
+
def _ensure_tables(api_keys: APIKeys) -> None:
|
204
|
+
DBManager(api_keys.sqlalchemy_db_url.get_secret_value()).create_tables(
|
205
|
+
[FunctionCache]
|
206
|
+
)
|
207
|
+
|
208
|
+
|
209
|
+
def _build_context(
|
210
|
+
func: Callable[..., Any],
|
211
|
+
args: tuple[Any, ...],
|
212
|
+
kwargs: dict[str, Any],
|
213
|
+
ignore_args: Sequence[str] | None,
|
214
|
+
ignore_arg_types: Sequence[type] | None,
|
215
|
+
) -> CallContext:
|
216
|
+
signature = inspect.signature(func)
|
217
|
+
bound_arguments = signature.bind(*args, **kwargs)
|
218
|
+
bound_arguments.apply_defaults()
|
219
|
+
|
220
|
+
args_dict: dict[str, Any] = bound_arguments.arguments
|
221
|
+
|
222
|
+
if "self" in args_dict:
|
223
|
+
del args_dict["self"]
|
224
|
+
if "cls" in args_dict:
|
225
|
+
del args_dict["cls"]
|
226
|
+
|
227
|
+
if ignore_args:
|
228
|
+
for arg in ignore_args:
|
229
|
+
if arg in args_dict:
|
230
|
+
del args_dict[arg]
|
231
|
+
|
232
|
+
if ignore_arg_types:
|
233
|
+
args_dict = {
|
234
|
+
k: v
|
235
|
+
for k, v in args_dict.items()
|
236
|
+
if not isinstance(v, tuple(ignore_arg_types))
|
237
|
+
}
|
238
|
+
|
239
|
+
arg_string = json.dumps(args_dict, sort_keys=True, default=str)
|
240
|
+
args_hash = hashlib.md5(arg_string.encode()).hexdigest()
|
241
|
+
|
242
|
+
full_function_name = func.__module__ + "." + func.__qualname__
|
243
|
+
function_name = func.__name__
|
244
|
+
|
245
|
+
# Use get_type_hints to resolve forward references instead of __annotations__
|
246
|
+
try:
|
247
|
+
type_hints = get_type_hints(func)
|
248
|
+
return_type = type_hints.get("return", None)
|
249
|
+
except (NameError, AttributeError, TypeError) as e:
|
250
|
+
# Fallback to raw annotations if get_type_hints fails
|
251
|
+
logger.debug(
|
252
|
+
f"{DB_CACHE_LOG_PREFIX} Failed to resolve type hints for {full_function_name}, falling back to raw annotations: {e}"
|
253
|
+
)
|
254
|
+
return_type = func.__annotations__.get("return", None)
|
255
|
+
|
256
|
+
return CallContext(
|
257
|
+
args_dict=args_dict,
|
258
|
+
args_hash=args_hash,
|
259
|
+
function_name=function_name,
|
260
|
+
full_function_name=full_function_name,
|
261
|
+
return_type=return_type,
|
262
|
+
)
|
263
|
+
|
264
|
+
|
265
|
+
def _fetch_cached(
|
266
|
+
api_keys: APIKeys,
|
267
|
+
ctx: CallContext,
|
268
|
+
max_age: timedelta | None,
|
269
|
+
) -> CacheLookup:
|
270
|
+
with DBManager(
|
271
|
+
api_keys.sqlalchemy_db_url.get_secret_value()
|
272
|
+
).get_session() as session:
|
273
|
+
statement = (
|
274
|
+
select(FunctionCache)
|
275
|
+
.where(
|
276
|
+
FunctionCache.function_name == ctx.function_name,
|
277
|
+
FunctionCache.full_function_name == ctx.full_function_name,
|
278
|
+
FunctionCache.args_hash == ctx.args_hash,
|
279
|
+
)
|
280
|
+
.order_by(desc(FunctionCache.created_at))
|
281
|
+
)
|
282
|
+
if max_age is not None:
|
283
|
+
cutoff_time = utcnow() - max_age
|
284
|
+
statement = statement.where(FunctionCache.created_at >= cutoff_time)
|
285
|
+
cached_result = session.exec(statement).first()
|
286
|
+
|
287
|
+
if not cached_result:
|
288
|
+
return CacheLookup(hit=False)
|
289
|
+
|
290
|
+
if ctx.is_pydantic_model:
|
291
|
+
try:
|
292
|
+
value = convert_cached_output_to_pydantic(
|
293
|
+
ctx.return_type, cached_result.result
|
294
|
+
)
|
295
|
+
return CacheLookup(hit=True, value=value)
|
296
|
+
except (ValueError, TypeError) as e:
|
297
|
+
logger.warning(
|
298
|
+
f"{DB_CACHE_LOG_PREFIX} [cache-miss] Failed to validate cached result for {ctx.full_function_name}, treating as cache miss: {e}"
|
299
|
+
)
|
300
|
+
return CacheLookup(hit=False)
|
301
|
+
|
302
|
+
return CacheLookup(hit=True, value=cached_result.result)
|
303
|
+
|
304
|
+
|
305
|
+
def _save_cached(
|
306
|
+
api_keys: APIKeys,
|
307
|
+
ctx: CallContext,
|
308
|
+
computed_result: Any,
|
309
|
+
log_error_on_unsavable_data: bool,
|
310
|
+
) -> None:
|
311
|
+
cache_entry = FunctionCache(
|
312
|
+
function_name=ctx.function_name,
|
313
|
+
full_function_name=ctx.full_function_name,
|
314
|
+
args_hash=ctx.args_hash,
|
315
|
+
args=ctx.args_dict,
|
316
|
+
result=computed_result,
|
317
|
+
created_at=utcnow(),
|
318
|
+
)
|
319
|
+
try:
|
320
|
+
with DBManager(
|
321
|
+
api_keys.sqlalchemy_db_url.get_secret_value()
|
322
|
+
).get_session() as session:
|
323
|
+
logger.debug(
|
324
|
+
f"{DB_CACHE_LOG_PREFIX} [cache-save] Saving cache entry for {ctx.full_function_name}"
|
325
|
+
)
|
326
|
+
session.add(cache_entry)
|
327
|
+
session.commit()
|
328
|
+
except (DataError, psycopg2.errors.UntranslatableCharacter) as e:
|
329
|
+
(logger.error if log_error_on_unsavable_data else logger.warning)(
|
330
|
+
f"{DB_CACHE_LOG_PREFIX} [cache-error] Failed to save cache entry for {ctx.full_function_name}: {e}"
|
331
|
+
)
|
332
|
+
except Exception:
|
333
|
+
logger.exception(
|
334
|
+
f"{DB_CACHE_LOG_PREFIX} [cache-error] Failed to save cache entry for {ctx.full_function_name}"
|
335
|
+
)
|
234
336
|
|
235
337
|
|
236
338
|
def contains_pydantic_model(return_type: Any) -> bool:
|
@@ -89,7 +89,7 @@ prediction_market_agent_tooling/tools/balances.py,sha256=Osab21btfJDw2Y-jT_TV-KH
|
|
89
89
|
prediction_market_agent_tooling/tools/betting_strategies/kelly_criterion.py,sha256=o5ba633gKiDqV4t_C2d9FWwH-HkRAOZd8FcZTYvbj6g,14451
|
90
90
|
prediction_market_agent_tooling/tools/betting_strategies/stretch_bet_between.py,sha256=THMXwFlskvzbjnX_OiYtDSzI8XVFyULWfP2525_9UGc,429
|
91
91
|
prediction_market_agent_tooling/tools/betting_strategies/utils.py,sha256=MpS3FOMn0C7nbmbQRUT9QwSh3UzzsgGczP91iSMr9wo,261
|
92
|
-
prediction_market_agent_tooling/tools/caches/db_cache.py,sha256=
|
92
|
+
prediction_market_agent_tooling/tools/caches/db_cache.py,sha256=V6o6UdesjkKzSJMhqkUtD76cJGPaNhuwA4OL2chIYSI,13801
|
93
93
|
prediction_market_agent_tooling/tools/caches/inmemory_cache.py,sha256=ZW5iI5rmjqeAebu5T7ftRnlkxiL02IC-MxCfDB80x7w,1506
|
94
94
|
prediction_market_agent_tooling/tools/caches/serializers.py,sha256=vFDx4fsPxclXp2q0sv27j4al_M_Tj9aR2JJP-xNHQXA,2151
|
95
95
|
prediction_market_agent_tooling/tools/contract.py,sha256=BzpAFcbKl_KqwgAlaXx63Fg8jzr0EO3qEeOs1K11CPA,33905
|
@@ -137,8 +137,8 @@ prediction_market_agent_tooling/tools/tokens/usd.py,sha256=DPO-4HBTy1-TZHKL_9CnH
|
|
137
137
|
prediction_market_agent_tooling/tools/transaction_cache.py,sha256=K5YKNL2_tR10Iw2TD9fuP-CTGpBbZtNdgbd0B_R7pjg,1814
|
138
138
|
prediction_market_agent_tooling/tools/utils.py,sha256=ruq6P5TFs8CBHxeBLj1Plpx7kuNFPpDgMsJGQgDiRNs,8785
|
139
139
|
prediction_market_agent_tooling/tools/web3_utils.py,sha256=CDbaidlLeQ4VHzSg150L7QNfHfGveljSePGuDVFEYqc,13963
|
140
|
-
prediction_market_agent_tooling-0.69.
|
141
|
-
prediction_market_agent_tooling-0.69.
|
142
|
-
prediction_market_agent_tooling-0.69.
|
143
|
-
prediction_market_agent_tooling-0.69.
|
144
|
-
prediction_market_agent_tooling-0.69.
|
140
|
+
prediction_market_agent_tooling-0.69.9.dist-info/METADATA,sha256=fBezbmaBxLVzpMNtCsmCnE2Pc8ywbXJTnCRF7HMJcYw,8890
|
141
|
+
prediction_market_agent_tooling-0.69.9.dist-info/WHEEL,sha256=M5asmiAlL6HEcOq52Yi5mmk9KmTVjY2RDPtO4p9DMrc,88
|
142
|
+
prediction_market_agent_tooling-0.69.9.dist-info/entry_points.txt,sha256=m8PukHbeH5g0IAAmOf_1Ahm-sGAMdhSSRQmwtpmi2s8,81
|
143
|
+
prediction_market_agent_tooling-0.69.9.dist-info/licenses/LICENSE,sha256=6or154nLLU6bELzjh0mCreFjt0m2v72zLi3yHE0QbeE,7650
|
144
|
+
prediction_market_agent_tooling-0.69.9.dist-info/RECORD,,
|
File without changes
|
File without changes
|