prediction-market-agent-tooling 0.65.5__py3-none-any.whl → 0.69.17.dev1149__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (88) hide show
  1. prediction_market_agent_tooling/abis/agentresultmapping.abi.json +192 -0
  2. prediction_market_agent_tooling/abis/erc1155.abi.json +352 -0
  3. prediction_market_agent_tooling/abis/processor.abi.json +16 -0
  4. prediction_market_agent_tooling/abis/swapr_quoter.abi.json +221 -0
  5. prediction_market_agent_tooling/abis/swapr_router.abi.json +634 -0
  6. prediction_market_agent_tooling/benchmark/benchmark.py +1 -1
  7. prediction_market_agent_tooling/benchmark/utils.py +13 -0
  8. prediction_market_agent_tooling/chains.py +1 -0
  9. prediction_market_agent_tooling/config.py +61 -2
  10. prediction_market_agent_tooling/data_download/langfuse_data_downloader.py +405 -0
  11. prediction_market_agent_tooling/deploy/agent.py +199 -67
  12. prediction_market_agent_tooling/deploy/agent_example.py +1 -1
  13. prediction_market_agent_tooling/deploy/betting_strategy.py +412 -68
  14. prediction_market_agent_tooling/deploy/constants.py +6 -0
  15. prediction_market_agent_tooling/gtypes.py +11 -1
  16. prediction_market_agent_tooling/jobs/jobs_models.py +2 -2
  17. prediction_market_agent_tooling/jobs/omen/omen_jobs.py +19 -20
  18. prediction_market_agent_tooling/loggers.py +9 -1
  19. prediction_market_agent_tooling/logprobs_parser.py +2 -1
  20. prediction_market_agent_tooling/markets/agent_market.py +106 -18
  21. prediction_market_agent_tooling/markets/blockchain_utils.py +37 -19
  22. prediction_market_agent_tooling/markets/data_models.py +120 -7
  23. prediction_market_agent_tooling/markets/manifold/data_models.py +5 -3
  24. prediction_market_agent_tooling/markets/manifold/manifold.py +21 -2
  25. prediction_market_agent_tooling/markets/manifold/utils.py +8 -2
  26. prediction_market_agent_tooling/markets/market_type.py +74 -0
  27. prediction_market_agent_tooling/markets/markets.py +7 -99
  28. prediction_market_agent_tooling/markets/metaculus/data_models.py +3 -3
  29. prediction_market_agent_tooling/markets/metaculus/metaculus.py +5 -8
  30. prediction_market_agent_tooling/markets/omen/cow_contracts.py +5 -1
  31. prediction_market_agent_tooling/markets/omen/data_models.py +63 -32
  32. prediction_market_agent_tooling/markets/omen/omen.py +112 -23
  33. prediction_market_agent_tooling/markets/omen/omen_constants.py +8 -0
  34. prediction_market_agent_tooling/markets/omen/omen_contracts.py +18 -203
  35. prediction_market_agent_tooling/markets/omen/omen_resolving.py +33 -13
  36. prediction_market_agent_tooling/markets/omen/omen_subgraph_handler.py +23 -18
  37. prediction_market_agent_tooling/markets/polymarket/api.py +123 -100
  38. prediction_market_agent_tooling/markets/polymarket/clob_manager.py +156 -0
  39. prediction_market_agent_tooling/markets/polymarket/constants.py +15 -0
  40. prediction_market_agent_tooling/markets/polymarket/data_models.py +95 -19
  41. prediction_market_agent_tooling/markets/polymarket/polymarket.py +373 -29
  42. prediction_market_agent_tooling/markets/polymarket/polymarket_contracts.py +35 -0
  43. prediction_market_agent_tooling/markets/polymarket/polymarket_subgraph_handler.py +91 -0
  44. prediction_market_agent_tooling/markets/polymarket/utils.py +1 -22
  45. prediction_market_agent_tooling/markets/seer/data_models.py +111 -17
  46. prediction_market_agent_tooling/markets/seer/exceptions.py +2 -0
  47. prediction_market_agent_tooling/markets/seer/price_manager.py +165 -50
  48. prediction_market_agent_tooling/markets/seer/seer.py +393 -106
  49. prediction_market_agent_tooling/markets/seer/seer_api.py +28 -0
  50. prediction_market_agent_tooling/markets/seer/seer_contracts.py +115 -5
  51. prediction_market_agent_tooling/markets/seer/seer_subgraph_handler.py +297 -66
  52. prediction_market_agent_tooling/markets/seer/subgraph_data_models.py +43 -8
  53. prediction_market_agent_tooling/markets/seer/swap_pool_handler.py +80 -0
  54. prediction_market_agent_tooling/tools/_generic_value.py +8 -2
  55. prediction_market_agent_tooling/tools/betting_strategies/kelly_criterion.py +271 -8
  56. prediction_market_agent_tooling/tools/betting_strategies/utils.py +6 -1
  57. prediction_market_agent_tooling/tools/caches/db_cache.py +219 -117
  58. prediction_market_agent_tooling/tools/caches/serializers.py +11 -2
  59. prediction_market_agent_tooling/tools/contract.py +480 -38
  60. prediction_market_agent_tooling/tools/contract_utils.py +61 -0
  61. prediction_market_agent_tooling/tools/cow/cow_order.py +218 -45
  62. prediction_market_agent_tooling/tools/cow/models.py +122 -0
  63. prediction_market_agent_tooling/tools/cow/semaphore.py +104 -0
  64. prediction_market_agent_tooling/tools/datetime_utc.py +14 -2
  65. prediction_market_agent_tooling/tools/db/db_manager.py +59 -0
  66. prediction_market_agent_tooling/tools/hexbytes_custom.py +4 -1
  67. prediction_market_agent_tooling/tools/httpx_cached_client.py +15 -6
  68. prediction_market_agent_tooling/tools/langfuse_client_utils.py +21 -8
  69. prediction_market_agent_tooling/tools/openai_utils.py +31 -0
  70. prediction_market_agent_tooling/tools/perplexity/perplexity_client.py +86 -0
  71. prediction_market_agent_tooling/tools/perplexity/perplexity_models.py +26 -0
  72. prediction_market_agent_tooling/tools/perplexity/perplexity_search.py +73 -0
  73. prediction_market_agent_tooling/tools/rephrase.py +71 -0
  74. prediction_market_agent_tooling/tools/singleton.py +11 -6
  75. prediction_market_agent_tooling/tools/streamlit_utils.py +188 -0
  76. prediction_market_agent_tooling/tools/tokens/auto_deposit.py +64 -0
  77. prediction_market_agent_tooling/tools/tokens/auto_withdraw.py +8 -0
  78. prediction_market_agent_tooling/tools/tokens/slippage.py +21 -0
  79. prediction_market_agent_tooling/tools/tokens/usd.py +5 -2
  80. prediction_market_agent_tooling/tools/utils.py +61 -3
  81. prediction_market_agent_tooling/tools/web3_utils.py +63 -9
  82. {prediction_market_agent_tooling-0.65.5.dist-info → prediction_market_agent_tooling-0.69.17.dev1149.dist-info}/METADATA +13 -9
  83. {prediction_market_agent_tooling-0.65.5.dist-info → prediction_market_agent_tooling-0.69.17.dev1149.dist-info}/RECORD +86 -64
  84. {prediction_market_agent_tooling-0.65.5.dist-info → prediction_market_agent_tooling-0.69.17.dev1149.dist-info}/WHEEL +1 -1
  85. prediction_market_agent_tooling/abis/omen_agentresultmapping.abi.json +0 -171
  86. prediction_market_agent_tooling/markets/polymarket/data_models_web.py +0 -420
  87. {prediction_market_agent_tooling-0.65.5.dist-info → prediction_market_agent_tooling-0.69.17.dev1149.dist-info}/entry_points.txt +0 -0
  88. {prediction_market_agent_tooling-0.65.5.dist-info → prediction_market_agent_tooling-0.69.17.dev1149.dist-info/licenses}/LICENSE +0 -0
@@ -2,6 +2,7 @@ import json
2
2
  import typing as t
3
3
  from copy import deepcopy
4
4
 
5
+ import cachetools
5
6
  from eth_account.signers.local import LocalAccount
6
7
  from eth_typing import URI
7
8
  from pydantic import Field, model_validator
@@ -12,8 +13,13 @@ from safe_eth.eth import EthereumClient
12
13
  from safe_eth.safe.safe import SafeV141
13
14
  from web3 import Account, Web3
14
15
  from web3._utils.http import construct_user_agent
16
+ from web3.middleware import ExtraDataToPOAMiddleware
15
17
 
16
- from prediction_market_agent_tooling.chains import ETHEREUM_ID, GNOSIS_CHAIN_ID
18
+ from prediction_market_agent_tooling.chains import (
19
+ ETHEREUM_ID,
20
+ GNOSIS_CHAIN_ID,
21
+ POLYGON_CHAIN_ID,
22
+ )
17
23
  from prediction_market_agent_tooling.deploy.gcp.utils import gcp_get_secret_value
18
24
  from prediction_market_agent_tooling.gtypes import (
19
25
  ChainID,
@@ -65,6 +71,12 @@ class APIKeys(BaseSettings):
65
71
 
66
72
  SQLALCHEMY_DB_URL: t.Optional[SecretStr] = None
67
73
 
74
+ PERPLEXITY_API_KEY: t.Optional[SecretStr] = None
75
+
76
+ DUNE_API_KEY: t.Optional[SecretStr] = None
77
+
78
+ SLACK_WEBHOOK_URL: t.Optional[SecretStr] = None
79
+
68
80
  ENABLE_CACHE: bool = False
69
81
  CACHE_DIR: str = "./.cache"
70
82
 
@@ -246,6 +258,18 @@ class APIKeys(BaseSettings):
246
258
  self.SQLALCHEMY_DB_URL, "SQLALCHEMY_DB_URL missing in the environment."
247
259
  )
248
260
 
261
+ @property
262
+ def dune_api_key(self) -> SecretStr:
263
+ return check_not_none(
264
+ self.DUNE_API_KEY, "DUNE_API_KEY missing in the environment."
265
+ )
266
+
267
+ @property
268
+ def slack_webhook_url(self) -> SecretStr:
269
+ return check_not_none(
270
+ self.SLACK_WEBHOOK_URL, "SLACK_WEBHOOK_URL missing in the environment."
271
+ )
272
+
249
273
  def get_account(self) -> LocalAccount:
250
274
  acc: LocalAccount = Account.from_key(
251
275
  self.bet_from_private_key.get_secret_value()
@@ -259,6 +283,12 @@ class APIKeys(BaseSettings):
259
283
  if self.model_fields[k].annotation not in SECRET_TYPES and v is not None
260
284
  }
261
285
 
286
+ @property
287
+ def perplexity_api_key(self) -> SecretStr:
288
+ return check_not_none(
289
+ self.PERPLEXITY_API_KEY, "PERPLEXITY_API_KEY missing in the environment."
290
+ )
291
+
262
292
  def model_dump_secrets(self) -> dict[str, t.Any]:
263
293
  return {
264
294
  k: v.get_secret_value() if isinstance(v, SecretStr) else v
@@ -285,6 +315,8 @@ class RPCConfig(BaseSettings):
285
315
  GNOSIS_RPC_URL: URI = Field(default=URI("https://rpc.gnosis.gateway.fm"))
286
316
  GNOSIS_RPC_BEARER: SecretStr | None = None
287
317
  CHAIN_ID: ChainID = Field(default=GNOSIS_CHAIN_ID)
318
+ POLYGON_RPC_URL: URI = Field(default=URI("https://polygon-rpc.com"))
319
+ POLYGON_RPC_BEARER: SecretStr | None = None
288
320
 
289
321
  @property
290
322
  def ethereum_rpc_url(self) -> URI:
@@ -298,26 +330,40 @@ class RPCConfig(BaseSettings):
298
330
  self.GNOSIS_RPC_URL, "GNOSIS_RPC_URL missing in the environment."
299
331
  )
300
332
 
333
+ @property
334
+ def polygon_rpc_url(self) -> URI:
335
+ return check_not_none(
336
+ self.POLYGON_RPC_URL, "POLYGON_RPC_URL missing in the environment."
337
+ )
338
+
301
339
  @property
302
340
  def chain_id(self) -> ChainID:
303
341
  return check_not_none(self.CHAIN_ID, "CHAIN_ID missing in the environment.")
304
342
 
343
+ @property
344
+ def gnosis_chain_id(self) -> ChainID:
345
+ return GNOSIS_CHAIN_ID
346
+
305
347
  def chain_id_to_rpc_url(self, chain_id: ChainID) -> URI:
306
348
  return {
307
349
  ETHEREUM_ID: self.ethereum_rpc_url,
308
350
  GNOSIS_CHAIN_ID: self.gnosis_rpc_url,
351
+ POLYGON_CHAIN_ID: self.polygon_rpc_url,
309
352
  }[chain_id]
310
353
 
311
354
  def chain_id_to_rpc_bearer(self, chain_id: ChainID) -> SecretStr | None:
312
355
  return {
313
356
  ETHEREUM_ID: self.ETHEREUM_RPC_BEARER,
314
357
  GNOSIS_CHAIN_ID: self.GNOSIS_RPC_BEARER,
358
+ POLYGON_CHAIN_ID: self.POLYGON_RPC_BEARER,
315
359
  }[chain_id]
316
360
 
317
361
  def get_web3(self) -> Web3:
318
362
  headers = {
319
363
  "Content-Type": "application/json",
320
- "User-Agent": construct_user_agent(str(type(self))),
364
+ "User-Agent": construct_user_agent(
365
+ str(type(self)), self.__class__.__name__
366
+ ),
321
367
  }
322
368
  if bearer := self.chain_id_to_rpc_bearer(self.chain_id):
323
369
  headers["Authorization"] = f"Bearer {bearer.get_secret_value()}"
@@ -331,6 +377,19 @@ class RPCConfig(BaseSettings):
331
377
  )
332
378
  )
333
379
 
380
+ @cachetools.cached(
381
+ cachetools.TTLCache(maxsize=100, ttl=5 * 60),
382
+ key=lambda self: f"{self.model_dump_json()}",
383
+ )
384
+ def get_polygon_web3(self) -> Web3:
385
+ web3 = self.get_web3()
386
+ if self.chain_id != POLYGON_CHAIN_ID:
387
+ raise ValueError(f"Chain ID {self.chain_id} is not Polygon Mainnet")
388
+
389
+ # We need to inject middleware into the Polygon web3 instance (https://web3py.readthedocs.io/en/stable/middleware.html#proof-of-authority)
390
+ web3.middleware_onion.inject(ExtraDataToPOAMiddleware, layer=0)
391
+ return web3
392
+
334
393
 
335
394
  class CloudCredentials(BaseSettings):
336
395
  model_config = SettingsConfigDict(
@@ -0,0 +1,405 @@
1
+ import json
2
+ import os
3
+ from concurrent.futures import ThreadPoolExecutor, as_completed
4
+ from datetime import datetime, timedelta
5
+ from pathlib import Path
6
+ from typing import Any
7
+
8
+ import pandas as pd
9
+ import typer
10
+ from langfuse import Langfuse
11
+ from langfuse.client import TraceWithDetails
12
+ from pydantic import BaseModel
13
+
14
+ from prediction_market_agent_tooling.config import APIKeys
15
+ from prediction_market_agent_tooling.gtypes import DatetimeUTC, OutcomeStr, OutcomeToken
16
+ from prediction_market_agent_tooling.loggers import logger
17
+ from prediction_market_agent_tooling.markets.agent_market import AgentMarket
18
+ from prediction_market_agent_tooling.markets.data_models import Resolution
19
+ from prediction_market_agent_tooling.markets.market_type import MarketType
20
+ from prediction_market_agent_tooling.markets.omen.omen import OmenAgentMarket
21
+ from prediction_market_agent_tooling.markets.seer.seer import SeerAgentMarket
22
+ from prediction_market_agent_tooling.markets.seer.seer_subgraph_handler import (
23
+ SeerSubgraphHandler,
24
+ )
25
+ from prediction_market_agent_tooling.tools.hexbytes_custom import HexBytes
26
+ from prediction_market_agent_tooling.tools.httpx_cached_client import HttpxCachedClient
27
+ from prediction_market_agent_tooling.tools.langfuse_client_utils import (
28
+ get_traces_for_agent,
29
+ )
30
+
31
+ PREDICTION_STATES = [
32
+ "predict_market",
33
+ "_make_prediction_categorical",
34
+ "make_prediction",
35
+ ]
36
+ REPORT_STATES = ["prepare_report"]
37
+
38
+ TRADE_STATES = ["build_trades"]
39
+
40
+ MARKET_RESOLUTION_PROVIDERS = {
41
+ MarketType.OMEN: lambda market_id: OmenAgentMarket.get_binary_market(market_id),
42
+ MarketType.SEER: lambda market_id: SeerAgentMarket.from_data_model_with_subgraph(
43
+ model=SeerSubgraphHandler().get_market_by_id(HexBytes(market_id)),
44
+ seer_subgraph=SeerSubgraphHandler(),
45
+ must_have_prices=False,
46
+ ),
47
+ }
48
+
49
+
50
+ class TraceResult(BaseModel):
51
+ agent_name: str
52
+ trace_id: str
53
+ market_id: str
54
+ market_type: str
55
+ market_question: str
56
+ market_outcomes: list[str]
57
+ market_outcome_token_pool: dict[OutcomeStr, OutcomeToken] | None
58
+ market_created_time: DatetimeUTC | None
59
+ market_close_time: DatetimeUTC | None
60
+ analysis: str
61
+ prediction_reasoning: str
62
+ prediction_decision: str
63
+ prediction_p_yes: float
64
+ prediction_info_utility: float
65
+ prediction_confidence: float
66
+ market_resolution: str | None
67
+ resolution_is_valid: bool | None
68
+ full_market_json: str | None
69
+ prediction_json: str
70
+ trades: list[dict[str, Any]] | None
71
+
72
+
73
+ def get_langfuse_client() -> Langfuse:
74
+ api_keys = APIKeys()
75
+ return Langfuse(
76
+ secret_key=api_keys.langfuse_secret_key.get_secret_value(),
77
+ public_key=api_keys.langfuse_public_key,
78
+ host=api_keys.langfuse_host,
79
+ httpx_client=HttpxCachedClient().get_client(),
80
+ )
81
+
82
+
83
+ def create_output_file_path(
84
+ agent_name: str,
85
+ date_from: DatetimeUTC,
86
+ date_to: DatetimeUTC,
87
+ output_folder: str,
88
+ ) -> str:
89
+ """Create unique output file path, incrementing version if file exists."""
90
+ Path(output_folder).mkdir(parents=True, exist_ok=True)
91
+
92
+ default_file_name = f"{agent_name}_{date_from.date()}_{date_to.date()}"
93
+ output_file = os.path.join(output_folder, f"{default_file_name}.csv")
94
+
95
+ index = 0
96
+ while os.path.exists(output_file):
97
+ index += 1
98
+ output_file = os.path.join(output_folder, f"{default_file_name}_v{index}.csv")
99
+
100
+ return output_file
101
+
102
+
103
+ def download_data_daily(
104
+ agent_name: str,
105
+ date_from: DatetimeUTC,
106
+ date_to: DatetimeUTC,
107
+ only_resolved: bool,
108
+ output_file: str,
109
+ append_mode: bool = False,
110
+ ) -> tuple[int, int]:
111
+ """Download data for a single day/period and return (traces_downloaded, records_saved)."""
112
+ langfuse_client_for_traces = get_langfuse_client()
113
+
114
+ logger.info(f"Processing data for {date_from.date()} to {date_to.date()}")
115
+
116
+ traces = get_traces_for_agent(
117
+ agent_name=agent_name,
118
+ trace_name="process_market",
119
+ from_timestamp=date_from,
120
+ to_timestamp=date_to,
121
+ has_output=True,
122
+ client=langfuse_client_for_traces,
123
+ tags=["answered"],
124
+ )
125
+
126
+ traces_count = len(traces) if traces else 0
127
+ if not traces:
128
+ logger.info(f"No traces found for {date_from.date()}")
129
+ # If this is the first call and no traces, create empty CSV with header
130
+ if not append_mode:
131
+ df_empty = pd.DataFrame(columns=list(TraceResult.model_fields.keys()))
132
+ df_empty.to_csv(output_file, mode="w", header=True, index=False)
133
+ return 0, 0
134
+
135
+ # Use ThreadPoolExecutor with shared client (thread-safe)
136
+ results = []
137
+ with ThreadPoolExecutor(max_workers=3) as executor:
138
+ # Submit all tasks
139
+ future_to_trace = {
140
+ executor.submit(
141
+ process_trace, trace, only_resolved, langfuse_client_for_traces
142
+ ): trace
143
+ for trace in traces
144
+ }
145
+
146
+ # Collect results as they complete
147
+ for future in as_completed(future_to_trace):
148
+ try:
149
+ result = future.result()
150
+ results.append(result)
151
+ except Exception as e:
152
+ trace = future_to_trace[future]
153
+ logger.exception(f"Error processing trace {trace.id}: {e}")
154
+ results.append(None)
155
+
156
+ successful_results = [r for r in results if r is not None]
157
+ if successful_results:
158
+ results_data = [result.model_dump() for result in successful_results]
159
+ df = pd.DataFrame(results_data)
160
+
161
+ df.to_csv(
162
+ output_file,
163
+ mode="a" if append_mode else "w",
164
+ header=not append_mode,
165
+ index=False,
166
+ )
167
+ logger.info(f"Saved {len(successful_results)} records for {date_from.date()}")
168
+ elif not append_mode:
169
+ df_empty = pd.DataFrame(columns=list(TraceResult.model_fields.keys()))
170
+ df_empty.to_csv(output_file, mode="w", header=True, index=False)
171
+
172
+ return traces_count, len(successful_results)
173
+
174
+
175
+ def download_data(
176
+ agent_name: str,
177
+ date_from: DatetimeUTC,
178
+ date_to: DatetimeUTC,
179
+ only_resolved: bool,
180
+ output_folder: str,
181
+ ) -> None:
182
+ output_file = create_output_file_path(agent_name, date_from, date_to, output_folder)
183
+ total_traces = 0
184
+ total_saved = 0
185
+ daily_stats = []
186
+
187
+ current_date = date_from
188
+ first_call = True
189
+
190
+ while current_date < date_to:
191
+ next_date = DatetimeUTC.from_datetime(current_date + timedelta(days=1))
192
+ if next_date > date_to:
193
+ next_date = date_to
194
+
195
+ traces_downloaded, records_saved = download_data_daily(
196
+ agent_name=agent_name,
197
+ date_from=current_date,
198
+ date_to=next_date,
199
+ only_resolved=only_resolved,
200
+ output_file=output_file,
201
+ append_mode=not first_call,
202
+ )
203
+
204
+ daily_stats.append(
205
+ {
206
+ "date": current_date.date(),
207
+ "traces_downloaded": traces_downloaded,
208
+ "records_saved": records_saved,
209
+ }
210
+ )
211
+
212
+ total_traces += traces_downloaded
213
+ total_saved += records_saved
214
+ first_call = False
215
+ current_date = next_date
216
+
217
+ # Print daily report
218
+ logger.info("=" * 60)
219
+ logger.info("DAILY PROCESSING REPORT")
220
+ logger.info("=" * 60)
221
+ for stats in daily_stats:
222
+ total_traces_downloaded = int(stats["traces_downloaded"]) # type: ignore
223
+ total_records_saved = int(stats["records_saved"]) # type: ignore
224
+ success_rate = (
225
+ (total_records_saved / total_traces_downloaded * 100)
226
+ if total_traces_downloaded > 0
227
+ else 0
228
+ )
229
+ logger.info(
230
+ f"{stats['date']}: {total_traces_downloaded} traces downloaded, {total_records_saved} successfully processed ({success_rate:.1f}%)"
231
+ )
232
+
233
+ logger.info("=" * 60)
234
+ logger.info("OVERALL SUMMARY")
235
+ logger.info("=" * 60)
236
+ overall_success_rate = (total_saved / total_traces * 100) if total_traces > 0 else 0
237
+ logger.info(f"Total traces downloaded: {total_traces}")
238
+ logger.info(f"Total records saved: {total_saved}")
239
+ logger.info(f"Overall success rate: {overall_success_rate:.1f}%")
240
+
241
+ if total_saved == 0:
242
+ logger.warning("No results to save")
243
+ else:
244
+ logger.info(f"Output file: {output_file}")
245
+ logger.info("=" * 60)
246
+
247
+
248
+ def process_trace(
249
+ trace: TraceWithDetails,
250
+ only_resolved: bool,
251
+ langfuse_client: Langfuse,
252
+ include_market: bool = True,
253
+ ) -> TraceResult | None:
254
+ try:
255
+ logger.info(f"Processing trace {trace.id}")
256
+ observations = langfuse_client.fetch_observations(trace_id=trace.id)
257
+ logger.info(f"Observations downloaded for trace {trace.id}")
258
+ market_state, market_type = get_agent_market_state(trace.input)
259
+
260
+ prepare_report_obs = [
261
+ obs for obs in observations.data if obs.name in REPORT_STATES
262
+ ]
263
+ predict_market_obs = [
264
+ obs for obs in observations.data if obs.name in PREDICTION_STATES
265
+ ]
266
+ build_trades_obs = [
267
+ obs for obs in observations.data if obs.name in TRADE_STATES
268
+ ]
269
+ if not prepare_report_obs or not predict_market_obs:
270
+ raise ValueError(f"Missing required observations for trace {trace.id}")
271
+
272
+ analysis = prepare_report_obs[0].output
273
+ prediction = predict_market_obs[0].output
274
+
275
+ resolution = get_market_resolution(market_state.id, market_type)
276
+
277
+ if only_resolved and not resolution:
278
+ raise ValueError(f"No resolution found for market {market_state.id}")
279
+
280
+ result = TraceResult(
281
+ agent_name=trace.metadata["agent_class"],
282
+ trace_id=trace.id,
283
+ market_id=market_state.id,
284
+ market_type=market_type.value,
285
+ market_question=market_state.question,
286
+ market_outcomes=list(market_state.outcomes),
287
+ market_outcome_token_pool=market_state.outcome_token_pool,
288
+ market_created_time=market_state.created_time,
289
+ market_close_time=market_state.close_time,
290
+ analysis=analysis,
291
+ prediction_reasoning=prediction["reasoning"],
292
+ prediction_decision="y" if prediction["p_yes"] > 0.5 else "n",
293
+ prediction_p_yes=prediction["p_yes"],
294
+ prediction_info_utility=prediction["info_utility"],
295
+ prediction_confidence=prediction["confidence"],
296
+ prediction_json=json.dumps(prediction),
297
+ market_resolution=resolution.outcome if resolution else None,
298
+ resolution_is_valid=not resolution.invalid if resolution else None,
299
+ full_market_json=market_state.model_dump_json() if include_market else None,
300
+ trades=build_trades_obs[0].output if build_trades_obs else None,
301
+ )
302
+ logger.info(f"Downloaded trace {trace.id} finished")
303
+ return result
304
+
305
+ except Exception as e:
306
+ logger.exception(f"Error processing trace {trace.id}: {e}")
307
+ return None
308
+
309
+
310
+ def get_agent_market_state(
311
+ input_data: dict[str, Any]
312
+ ) -> tuple[AgentMarket, MarketType]:
313
+ if not input_data or "args" not in input_data:
314
+ raise ValueError("Invalid input data: missing args")
315
+
316
+ args = input_data["args"]
317
+ if len(args) < 2:
318
+ raise ValueError("Invalid args: expected at least 2 elements")
319
+
320
+ market_type = MarketType(args[0])
321
+ if market_type not in MARKET_RESOLUTION_PROVIDERS:
322
+ raise ValueError(f"Unknown market type: {market_type}")
323
+
324
+ market_data = args[1] # market object data
325
+
326
+ # recreate probabilities if not present
327
+ if "outcome_token_pool" in market_data and "probabilities" not in market_data:
328
+ market_data["probabilities"] = AgentMarket.build_probability_map(
329
+ [
330
+ OutcomeToken(
331
+ float(value["value"]) if isinstance(value, dict) else float(value)
332
+ ).as_outcome_wei
333
+ for value in market_data["outcome_token_pool"].values()
334
+ ],
335
+ list(market_data["outcome_token_pool"].keys()),
336
+ )
337
+
338
+ if market_type == MarketType.OMEN:
339
+ return OmenAgentMarket.model_validate(market_data), market_type
340
+ elif market_type == MarketType.SEER:
341
+ return SeerAgentMarket.model_validate(market_data), market_type
342
+ else:
343
+ return AgentMarket.model_validate(market_data), market_type
344
+
345
+
346
+ def get_market_resolution(market_id: str, market_type: MarketType) -> Resolution:
347
+ if market_type not in MARKET_RESOLUTION_PROVIDERS:
348
+ raise ValueError(f"Unknown market type: {market_type.market_class}")
349
+
350
+ try:
351
+ market: AgentMarket | None = MARKET_RESOLUTION_PROVIDERS[market_type](market_id)
352
+
353
+ if not market or not market.resolution:
354
+ raise ValueError(f"No resolution found for market: {market_id}")
355
+
356
+ return market.resolution
357
+
358
+ except Exception as e:
359
+ raise ValueError(
360
+ f"Failed to fetch {market_type.market_class} market {market_id} resolution: {e}"
361
+ ) from e
362
+
363
+
364
+ def parse_date(date_str: str, param_name: str) -> DatetimeUTC:
365
+ try:
366
+ return DatetimeUTC.to_datetime_utc(date_str)
367
+ except ValueError as e:
368
+ typer.echo(f"Error: Invalid date format for {param_name}: {date_str}")
369
+ typer.echo("Expected format: YYYY-MM-DD or YYYY-MM-DDTHH:MM:SS")
370
+ raise typer.Exit(1) from e
371
+
372
+
373
+ def main(
374
+ agent_name: str = "DeployablePredictionProphet",
375
+ only_resolved: bool = True,
376
+ date_from: str = typer.Option(
377
+ None, help="Start date in ISO format (YYYY-MM-DD or YYYY-MM-DDTHH:MM:SS)"
378
+ ),
379
+ date_to: str = typer.Option(
380
+ None, help="End date in ISO format (YYYY-MM-DD or YYYY-MM-DDTHH:MM:SS)"
381
+ ),
382
+ output_folder: str = "./agent_trades_output/",
383
+ ) -> None:
384
+ date_from_dt = (
385
+ parse_date(date_from, "date_from")
386
+ if date_from
387
+ else DatetimeUTC.from_datetime(datetime.now() - timedelta(days=1))
388
+ )
389
+ date_to_dt = (
390
+ parse_date(date_to, "date_to")
391
+ if date_to
392
+ else DatetimeUTC.from_datetime(datetime.now())
393
+ )
394
+
395
+ download_data(
396
+ agent_name=agent_name,
397
+ date_from=date_from_dt,
398
+ date_to=date_to_dt,
399
+ only_resolved=only_resolved,
400
+ output_folder=output_folder,
401
+ )
402
+
403
+
404
+ if __name__ == "__main__":
405
+ typer.run(main)