prediction-market-agent-tooling 0.65.12__py3-none-any.whl → 0.66.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- prediction_market_agent_tooling/abis/agentresultmapping.abi.json +192 -0
- prediction_market_agent_tooling/data_download/langfuse_data_downloader.py +188 -51
- prediction_market_agent_tooling/deploy/agent.py +26 -2
- prediction_market_agent_tooling/markets/agent_market.py +0 -6
- prediction_market_agent_tooling/markets/blockchain_utils.py +29 -13
- prediction_market_agent_tooling/markets/data_models.py +15 -4
- prediction_market_agent_tooling/markets/metaculus/metaculus.py +1 -8
- prediction_market_agent_tooling/markets/omen/data_models.py +43 -13
- prediction_market_agent_tooling/markets/omen/omen.py +5 -1
- prediction_market_agent_tooling/markets/omen/omen_constants.py +5 -0
- prediction_market_agent_tooling/markets/omen/omen_contracts.py +14 -6
- prediction_market_agent_tooling/markets/seer/seer.py +13 -1
- {prediction_market_agent_tooling-0.65.12.dist-info → prediction_market_agent_tooling-0.66.0.dist-info}/METADATA +1 -1
- {prediction_market_agent_tooling-0.65.12.dist-info → prediction_market_agent_tooling-0.66.0.dist-info}/RECORD +17 -17
- prediction_market_agent_tooling/abis/omen_agentresultmapping.abi.json +0 -171
- {prediction_market_agent_tooling-0.65.12.dist-info → prediction_market_agent_tooling-0.66.0.dist-info}/LICENSE +0 -0
- {prediction_market_agent_tooling-0.65.12.dist-info → prediction_market_agent_tooling-0.66.0.dist-info}/WHEEL +0 -0
- {prediction_market_agent_tooling-0.65.12.dist-info → prediction_market_agent_tooling-0.66.0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,192 @@
|
|
1
|
+
[
|
2
|
+
{
|
3
|
+
"inputs": [
|
4
|
+
{
|
5
|
+
"internalType": "string",
|
6
|
+
"name": "_marketPlatformName",
|
7
|
+
"type": "string"
|
8
|
+
}
|
9
|
+
],
|
10
|
+
"stateMutability": "nonpayable",
|
11
|
+
"type": "constructor"
|
12
|
+
},
|
13
|
+
{
|
14
|
+
"anonymous": false,
|
15
|
+
"inputs": [
|
16
|
+
{
|
17
|
+
"indexed": true,
|
18
|
+
"internalType": "address",
|
19
|
+
"name": "marketAddress",
|
20
|
+
"type": "address"
|
21
|
+
},
|
22
|
+
{
|
23
|
+
"indexed": true,
|
24
|
+
"internalType": "address",
|
25
|
+
"name": "publisherAddress",
|
26
|
+
"type": "address"
|
27
|
+
},
|
28
|
+
{
|
29
|
+
"indexed": false,
|
30
|
+
"internalType": "string[]",
|
31
|
+
"name": "outcomes",
|
32
|
+
"type": "string[]"
|
33
|
+
},
|
34
|
+
{
|
35
|
+
"indexed": false,
|
36
|
+
"internalType": "uint16[]",
|
37
|
+
"name": "estimatedProbabilitiesBps",
|
38
|
+
"type": "uint16[]"
|
39
|
+
},
|
40
|
+
{
|
41
|
+
"indexed": false,
|
42
|
+
"internalType": "bytes32[]",
|
43
|
+
"name": "txHashes",
|
44
|
+
"type": "bytes32[]"
|
45
|
+
},
|
46
|
+
{
|
47
|
+
"indexed": false,
|
48
|
+
"internalType": "bytes32",
|
49
|
+
"name": "ipfsHash",
|
50
|
+
"type": "bytes32"
|
51
|
+
}
|
52
|
+
],
|
53
|
+
"name": "PredictionAdded",
|
54
|
+
"type": "event"
|
55
|
+
},
|
56
|
+
{
|
57
|
+
"inputs": [
|
58
|
+
{ "internalType": "address", "name": "marketAddress", "type": "address" },
|
59
|
+
{
|
60
|
+
"components": [
|
61
|
+
{
|
62
|
+
"internalType": "address",
|
63
|
+
"name": "marketAddress",
|
64
|
+
"type": "address"
|
65
|
+
},
|
66
|
+
{
|
67
|
+
"internalType": "address",
|
68
|
+
"name": "publisherAddress",
|
69
|
+
"type": "address"
|
70
|
+
},
|
71
|
+
{ "internalType": "bytes32", "name": "ipfsHash", "type": "bytes32" },
|
72
|
+
{
|
73
|
+
"internalType": "bytes32[]",
|
74
|
+
"name": "txHashes",
|
75
|
+
"type": "bytes32[]"
|
76
|
+
},
|
77
|
+
{
|
78
|
+
"internalType": "string[]",
|
79
|
+
"name": "outcomes",
|
80
|
+
"type": "string[]"
|
81
|
+
},
|
82
|
+
{
|
83
|
+
"internalType": "uint16[]",
|
84
|
+
"name": "estimatedProbabilitiesBps",
|
85
|
+
"type": "uint16[]"
|
86
|
+
}
|
87
|
+
],
|
88
|
+
"internalType": "struct Prediction",
|
89
|
+
"name": "prediction",
|
90
|
+
"type": "tuple"
|
91
|
+
}
|
92
|
+
],
|
93
|
+
"name": "addPrediction",
|
94
|
+
"outputs": [],
|
95
|
+
"stateMutability": "nonpayable",
|
96
|
+
"type": "function"
|
97
|
+
},
|
98
|
+
{
|
99
|
+
"inputs": [
|
100
|
+
{ "internalType": "address", "name": "marketAddress", "type": "address" },
|
101
|
+
{ "internalType": "uint256", "name": "index", "type": "uint256" }
|
102
|
+
],
|
103
|
+
"name": "getPredictionByIndex",
|
104
|
+
"outputs": [
|
105
|
+
{
|
106
|
+
"components": [
|
107
|
+
{
|
108
|
+
"internalType": "address",
|
109
|
+
"name": "marketAddress",
|
110
|
+
"type": "address"
|
111
|
+
},
|
112
|
+
{
|
113
|
+
"internalType": "address",
|
114
|
+
"name": "publisherAddress",
|
115
|
+
"type": "address"
|
116
|
+
},
|
117
|
+
{ "internalType": "bytes32", "name": "ipfsHash", "type": "bytes32" },
|
118
|
+
{
|
119
|
+
"internalType": "bytes32[]",
|
120
|
+
"name": "txHashes",
|
121
|
+
"type": "bytes32[]"
|
122
|
+
},
|
123
|
+
{
|
124
|
+
"internalType": "string[]",
|
125
|
+
"name": "outcomes",
|
126
|
+
"type": "string[]"
|
127
|
+
},
|
128
|
+
{
|
129
|
+
"internalType": "uint16[]",
|
130
|
+
"name": "estimatedProbabilitiesBps",
|
131
|
+
"type": "uint16[]"
|
132
|
+
}
|
133
|
+
],
|
134
|
+
"internalType": "struct Prediction",
|
135
|
+
"name": "",
|
136
|
+
"type": "tuple"
|
137
|
+
}
|
138
|
+
],
|
139
|
+
"stateMutability": "view",
|
140
|
+
"type": "function"
|
141
|
+
},
|
142
|
+
{
|
143
|
+
"inputs": [
|
144
|
+
{ "internalType": "address", "name": "marketAddress", "type": "address" }
|
145
|
+
],
|
146
|
+
"name": "getPredictions",
|
147
|
+
"outputs": [
|
148
|
+
{
|
149
|
+
"components": [
|
150
|
+
{
|
151
|
+
"internalType": "address",
|
152
|
+
"name": "marketAddress",
|
153
|
+
"type": "address"
|
154
|
+
},
|
155
|
+
{
|
156
|
+
"internalType": "address",
|
157
|
+
"name": "publisherAddress",
|
158
|
+
"type": "address"
|
159
|
+
},
|
160
|
+
{ "internalType": "bytes32", "name": "ipfsHash", "type": "bytes32" },
|
161
|
+
{
|
162
|
+
"internalType": "bytes32[]",
|
163
|
+
"name": "txHashes",
|
164
|
+
"type": "bytes32[]"
|
165
|
+
},
|
166
|
+
{
|
167
|
+
"internalType": "string[]",
|
168
|
+
"name": "outcomes",
|
169
|
+
"type": "string[]"
|
170
|
+
},
|
171
|
+
{
|
172
|
+
"internalType": "uint16[]",
|
173
|
+
"name": "estimatedProbabilitiesBps",
|
174
|
+
"type": "uint16[]"
|
175
|
+
}
|
176
|
+
],
|
177
|
+
"internalType": "struct Prediction[]",
|
178
|
+
"name": "",
|
179
|
+
"type": "tuple[]"
|
180
|
+
}
|
181
|
+
],
|
182
|
+
"stateMutability": "view",
|
183
|
+
"type": "function"
|
184
|
+
},
|
185
|
+
{
|
186
|
+
"inputs": [],
|
187
|
+
"name": "marketPlatformName",
|
188
|
+
"outputs": [{ "internalType": "string", "name": "", "type": "string" }],
|
189
|
+
"stateMutability": "view",
|
190
|
+
"type": "function"
|
191
|
+
}
|
192
|
+
]
|
@@ -1,4 +1,6 @@
|
|
1
|
+
import json
|
1
2
|
import os
|
3
|
+
from concurrent.futures import ThreadPoolExecutor, as_completed
|
2
4
|
from datetime import datetime, timedelta
|
3
5
|
from pathlib import Path
|
4
6
|
from typing import Any
|
@@ -14,6 +16,7 @@ from prediction_market_agent_tooling.gtypes import DatetimeUTC, OutcomeStr, Outc
|
|
14
16
|
from prediction_market_agent_tooling.loggers import logger
|
15
17
|
from prediction_market_agent_tooling.markets.agent_market import AgentMarket
|
16
18
|
from prediction_market_agent_tooling.markets.data_models import Resolution
|
19
|
+
from prediction_market_agent_tooling.markets.markets import MarketType
|
17
20
|
from prediction_market_agent_tooling.markets.omen.omen import OmenAgentMarket
|
18
21
|
from prediction_market_agent_tooling.markets.seer.seer import SeerAgentMarket
|
19
22
|
from prediction_market_agent_tooling.markets.seer.seer_subgraph_handler import (
|
@@ -24,7 +27,6 @@ from prediction_market_agent_tooling.tools.httpx_cached_client import HttpxCache
|
|
24
27
|
from prediction_market_agent_tooling.tools.langfuse_client_utils import (
|
25
28
|
get_traces_for_agent,
|
26
29
|
)
|
27
|
-
from prediction_market_agent_tooling.tools.parallelism import par_map
|
28
30
|
|
29
31
|
PREDICTION_STATES = [
|
30
32
|
"predict_market",
|
@@ -33,9 +35,11 @@ PREDICTION_STATES = [
|
|
33
35
|
]
|
34
36
|
REPORT_STATES = ["prepare_report"]
|
35
37
|
|
38
|
+
TRADE_STATES = ["build_trades"]
|
39
|
+
|
36
40
|
MARKET_RESOLUTION_PROVIDERS = {
|
37
|
-
|
38
|
-
|
41
|
+
MarketType.OMEN: lambda market_id: OmenAgentMarket.get_binary_market(market_id),
|
42
|
+
MarketType.SEER: lambda market_id: SeerAgentMarket.from_data_model_with_subgraph(
|
39
43
|
model=SeerSubgraphHandler().get_market_by_id(HexBytes(market_id)),
|
40
44
|
seer_subgraph=SeerSubgraphHandler(),
|
41
45
|
must_have_prices=False,
|
@@ -58,8 +62,12 @@ class TraceResult(BaseModel):
|
|
58
62
|
prediction_decision: str
|
59
63
|
prediction_p_yes: float
|
60
64
|
prediction_info_utility: float
|
65
|
+
prediction_confidence: float
|
61
66
|
market_resolution: str | None
|
62
67
|
resolution_is_valid: bool | None
|
68
|
+
full_market_json: str | None
|
69
|
+
prediction_json: str
|
70
|
+
trades: list[dict[str, Any]] | None
|
63
71
|
|
64
72
|
|
65
73
|
def get_langfuse_client() -> Langfuse:
|
@@ -72,27 +80,39 @@ def get_langfuse_client() -> Langfuse:
|
|
72
80
|
)
|
73
81
|
|
74
82
|
|
75
|
-
def
|
83
|
+
def create_output_file_path(
|
76
84
|
agent_name: str,
|
77
85
|
date_from: DatetimeUTC,
|
78
86
|
date_to: DatetimeUTC,
|
79
|
-
only_resolved: bool,
|
80
87
|
output_folder: str,
|
81
|
-
) ->
|
88
|
+
) -> str:
|
89
|
+
"""Create unique output file path, incrementing version if file exists."""
|
82
90
|
Path(output_folder).mkdir(parents=True, exist_ok=True)
|
83
|
-
|
91
|
+
|
84
92
|
default_file_name = f"{agent_name}_{date_from.date()}_{date_to.date()}"
|
85
93
|
output_file = os.path.join(output_folder, f"{default_file_name}.csv")
|
86
94
|
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
output_folder, f"{default_file_name}_v{index}.csv"
|
92
|
-
)
|
95
|
+
index = 0
|
96
|
+
while os.path.exists(output_file):
|
97
|
+
index += 1
|
98
|
+
output_file = os.path.join(output_folder, f"{default_file_name}_v{index}.csv")
|
93
99
|
|
100
|
+
return output_file
|
101
|
+
|
102
|
+
|
103
|
+
def download_data_daily(
|
104
|
+
agent_name: str,
|
105
|
+
date_from: DatetimeUTC,
|
106
|
+
date_to: DatetimeUTC,
|
107
|
+
only_resolved: bool,
|
108
|
+
output_file: str,
|
109
|
+
append_mode: bool = False,
|
110
|
+
) -> tuple[int, int]:
|
111
|
+
"""Download data for a single day/period and return (traces_downloaded, records_saved)."""
|
94
112
|
langfuse_client_for_traces = get_langfuse_client()
|
95
113
|
|
114
|
+
logger.info(f"Processing data for {date_from.date()} to {date_to.date()}")
|
115
|
+
|
96
116
|
traces = get_traces_for_agent(
|
97
117
|
agent_name=agent_name,
|
98
118
|
trace_name="process_market",
|
@@ -103,40 +123,138 @@ def download_data(
|
|
103
123
|
tags=["answered"],
|
104
124
|
)
|
105
125
|
|
126
|
+
traces_count = len(traces) if traces else 0
|
106
127
|
if not traces:
|
107
|
-
|
108
|
-
|
109
|
-
|
110
|
-
|
111
|
-
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
]
|
116
|
-
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
128
|
+
logger.info(f"No traces found for {date_from.date()}")
|
129
|
+
# If this is the first call and no traces, create empty CSV with header
|
130
|
+
if not append_mode:
|
131
|
+
df_empty = pd.DataFrame(columns=list(TraceResult.model_fields.keys()))
|
132
|
+
df_empty.to_csv(output_file, mode="w", header=True, index=False)
|
133
|
+
return 0, 0
|
134
|
+
|
135
|
+
# Use ThreadPoolExecutor with shared client (thread-safe)
|
136
|
+
results = []
|
137
|
+
with ThreadPoolExecutor(max_workers=3) as executor:
|
138
|
+
# Submit all tasks
|
139
|
+
future_to_trace = {
|
140
|
+
executor.submit(
|
141
|
+
process_trace, trace, only_resolved, langfuse_client_for_traces
|
142
|
+
): trace
|
143
|
+
for trace in traces
|
144
|
+
}
|
145
|
+
|
146
|
+
# Collect results as they complete
|
147
|
+
for future in as_completed(future_to_trace):
|
148
|
+
try:
|
149
|
+
result = future.result()
|
150
|
+
results.append(result)
|
151
|
+
except Exception as e:
|
152
|
+
trace = future_to_trace[future]
|
153
|
+
logger.exception(f"Error processing trace {trace.id}: {e}")
|
154
|
+
results.append(None)
|
122
155
|
|
123
156
|
successful_results = [r for r in results if r is not None]
|
124
157
|
if successful_results:
|
125
158
|
results_data = [result.model_dump() for result in successful_results]
|
126
|
-
pd.DataFrame(results_data)
|
127
|
-
|
128
|
-
|
159
|
+
df = pd.DataFrame(results_data)
|
160
|
+
|
161
|
+
df.to_csv(
|
162
|
+
output_file,
|
163
|
+
mode="a" if append_mode else "w",
|
164
|
+
header=not append_mode,
|
165
|
+
index=False,
|
166
|
+
)
|
167
|
+
logger.info(f"Saved {len(successful_results)} records for {date_from.date()}")
|
168
|
+
elif not append_mode:
|
169
|
+
df_empty = pd.DataFrame(columns=list(TraceResult.model_fields.keys()))
|
170
|
+
df_empty.to_csv(output_file, mode="w", header=True, index=False)
|
171
|
+
|
172
|
+
return traces_count, len(successful_results)
|
173
|
+
|
174
|
+
|
175
|
+
def download_data(
|
176
|
+
agent_name: str,
|
177
|
+
date_from: DatetimeUTC,
|
178
|
+
date_to: DatetimeUTC,
|
179
|
+
only_resolved: bool,
|
180
|
+
output_folder: str,
|
181
|
+
) -> None:
|
182
|
+
output_file = create_output_file_path(agent_name, date_from, date_to, output_folder)
|
183
|
+
total_traces = 0
|
184
|
+
total_saved = 0
|
185
|
+
daily_stats = []
|
186
|
+
|
187
|
+
current_date = date_from
|
188
|
+
first_call = True
|
189
|
+
|
190
|
+
while current_date < date_to:
|
191
|
+
next_date = DatetimeUTC.from_datetime(current_date + timedelta(days=1))
|
192
|
+
if next_date > date_to:
|
193
|
+
next_date = date_to
|
194
|
+
|
195
|
+
traces_downloaded, records_saved = download_data_daily(
|
196
|
+
agent_name=agent_name,
|
197
|
+
date_from=current_date,
|
198
|
+
date_to=next_date,
|
199
|
+
only_resolved=only_resolved,
|
200
|
+
output_file=output_file,
|
201
|
+
append_mode=not first_call,
|
202
|
+
)
|
203
|
+
|
204
|
+
daily_stats.append(
|
205
|
+
{
|
206
|
+
"date": current_date.date(),
|
207
|
+
"traces_downloaded": traces_downloaded,
|
208
|
+
"records_saved": records_saved,
|
209
|
+
}
|
210
|
+
)
|
211
|
+
|
212
|
+
total_traces += traces_downloaded
|
213
|
+
total_saved += records_saved
|
214
|
+
first_call = False
|
215
|
+
current_date = next_date
|
216
|
+
|
217
|
+
# Print daily report
|
218
|
+
logger.info("=" * 60)
|
219
|
+
logger.info("DAILY PROCESSING REPORT")
|
220
|
+
logger.info("=" * 60)
|
221
|
+
for stats in daily_stats:
|
222
|
+
total_traces_downloaded = int(stats["traces_downloaded"]) # type: ignore
|
223
|
+
total_records_saved = int(stats["records_saved"]) # type: ignore
|
224
|
+
success_rate = (
|
225
|
+
(total_records_saved / total_traces_downloaded * 100)
|
226
|
+
if total_traces_downloaded > 0
|
227
|
+
else 0
|
228
|
+
)
|
229
|
+
logger.info(
|
230
|
+
f"{stats['date']}: {total_traces_downloaded} traces downloaded, {total_records_saved} successfully processed ({success_rate:.1f}%)"
|
231
|
+
)
|
232
|
+
|
233
|
+
logger.info("=" * 60)
|
234
|
+
logger.info("OVERALL SUMMARY")
|
235
|
+
logger.info("=" * 60)
|
236
|
+
overall_success_rate = (total_saved / total_traces * 100) if total_traces > 0 else 0
|
237
|
+
logger.info(f"Total traces downloaded: {total_traces}")
|
238
|
+
logger.info(f"Total records saved: {total_saved}")
|
239
|
+
logger.info(f"Overall success rate: {overall_success_rate:.1f}%")
|
240
|
+
|
241
|
+
if total_saved == 0:
|
129
242
|
logger.warning("No results to save")
|
243
|
+
else:
|
244
|
+
logger.info(f"Output file: {output_file}")
|
245
|
+
logger.info("=" * 60)
|
130
246
|
|
131
247
|
|
132
248
|
def process_trace(
|
133
249
|
trace: TraceWithDetails,
|
134
250
|
only_resolved: bool,
|
251
|
+
langfuse_client: Langfuse,
|
252
|
+
include_market: bool = True,
|
135
253
|
) -> TraceResult | None:
|
136
|
-
langfuse_client = get_langfuse_client()
|
137
254
|
try:
|
255
|
+
logger.info(f"Processing trace {trace.id}")
|
138
256
|
observations = langfuse_client.fetch_observations(trace_id=trace.id)
|
139
|
-
|
257
|
+
logger.info(f"Observations downloaded for trace {trace.id}")
|
140
258
|
market_state, market_type = get_agent_market_state(trace.input)
|
141
259
|
|
142
260
|
prepare_report_obs = [
|
@@ -145,7 +263,9 @@ def process_trace(
|
|
145
263
|
predict_market_obs = [
|
146
264
|
obs for obs in observations.data if obs.name in PREDICTION_STATES
|
147
265
|
]
|
148
|
-
|
266
|
+
build_trades_obs = [
|
267
|
+
obs for obs in observations.data if obs.name in TRADE_STATES
|
268
|
+
]
|
149
269
|
if not prepare_report_obs or not predict_market_obs:
|
150
270
|
raise ValueError(f"Missing required observations for trace {trace.id}")
|
151
271
|
|
@@ -161,7 +281,7 @@ def process_trace(
|
|
161
281
|
agent_name=trace.metadata["agent_class"],
|
162
282
|
trace_id=trace.id,
|
163
283
|
market_id=market_state.id,
|
164
|
-
market_type=market_type,
|
284
|
+
market_type=market_type.value,
|
165
285
|
market_question=market_state.question,
|
166
286
|
market_outcomes=list(market_state.outcomes),
|
167
287
|
market_outcome_token_pool=market_state.outcome_token_pool,
|
@@ -169,13 +289,17 @@ def process_trace(
|
|
169
289
|
market_close_time=market_state.close_time,
|
170
290
|
analysis=analysis,
|
171
291
|
prediction_reasoning=prediction["reasoning"],
|
172
|
-
prediction_decision="
|
292
|
+
prediction_decision="y" if prediction["p_yes"] > 0.5 else "n",
|
173
293
|
prediction_p_yes=prediction["p_yes"],
|
174
294
|
prediction_info_utility=prediction["info_utility"],
|
295
|
+
prediction_confidence=prediction["confidence"],
|
296
|
+
prediction_json=json.dumps(prediction),
|
175
297
|
market_resolution=resolution.outcome if resolution else None,
|
176
298
|
resolution_is_valid=not resolution.invalid if resolution else None,
|
299
|
+
full_market_json=market_state.model_dump_json() if include_market else None,
|
300
|
+
trades=build_trades_obs[0].output if build_trades_obs else None,
|
177
301
|
)
|
178
|
-
|
302
|
+
logger.info(f"Downloaded trace {trace.id} finished")
|
179
303
|
return result
|
180
304
|
|
181
305
|
except Exception as e:
|
@@ -183,7 +307,9 @@ def process_trace(
|
|
183
307
|
return None
|
184
308
|
|
185
309
|
|
186
|
-
def get_agent_market_state(
|
310
|
+
def get_agent_market_state(
|
311
|
+
input_data: dict[str, Any]
|
312
|
+
) -> tuple[AgentMarket, MarketType]:
|
187
313
|
if not input_data or "args" not in input_data:
|
188
314
|
raise ValueError("Invalid input data: missing args")
|
189
315
|
|
@@ -191,27 +317,38 @@ def get_agent_market_state(input_data: dict[str, Any]) -> tuple[AgentMarket, str
|
|
191
317
|
if len(args) < 2:
|
192
318
|
raise ValueError("Invalid args: expected at least 2 elements")
|
193
319
|
|
194
|
-
market_type = args[0]
|
195
|
-
|
320
|
+
market_type = MarketType(args[0])
|
196
321
|
if market_type not in MARKET_RESOLUTION_PROVIDERS:
|
197
322
|
raise ValueError(f"Unknown market type: {market_type}")
|
198
323
|
|
199
324
|
market_data = args[1] # market object data
|
200
|
-
market_state = AgentMarket.model_construct(**market_data)
|
201
325
|
|
202
|
-
|
326
|
+
# recreate probabilities if not present
|
327
|
+
if "outcome_token_pool" in market_data and "probabilities" not in market_data:
|
328
|
+
market_data["probabilities"] = AgentMarket.build_probability_map(
|
329
|
+
[
|
330
|
+
OutcomeToken(
|
331
|
+
float(value["value"]) if isinstance(value, dict) else float(value)
|
332
|
+
).as_outcome_wei
|
333
|
+
for value in market_data["outcome_token_pool"].values()
|
334
|
+
],
|
335
|
+
list(market_data["outcome_token_pool"].keys()),
|
336
|
+
)
|
203
337
|
|
338
|
+
if market_type == MarketType.OMEN:
|
339
|
+
return OmenAgentMarket.model_validate(market_data), market_type
|
340
|
+
elif market_type == MarketType.SEER:
|
341
|
+
return SeerAgentMarket.model_validate(market_data), market_type
|
342
|
+
else:
|
343
|
+
return AgentMarket.model_validate(market_data), market_type
|
204
344
|
|
205
|
-
def get_market_resolution(market_id: str, market_type: str) -> Resolution:
|
206
|
-
market_type_lower = market_type.lower()
|
207
345
|
|
208
|
-
|
209
|
-
|
346
|
+
def get_market_resolution(market_id: str, market_type: MarketType) -> Resolution:
|
347
|
+
if market_type not in MARKET_RESOLUTION_PROVIDERS:
|
348
|
+
raise ValueError(f"Unknown market type: {market_type.market_class}")
|
210
349
|
|
211
350
|
try:
|
212
|
-
market: AgentMarket | None = MARKET_RESOLUTION_PROVIDERS[
|
213
|
-
market_id
|
214
|
-
)
|
351
|
+
market: AgentMarket | None = MARKET_RESOLUTION_PROVIDERS[market_type](market_id)
|
215
352
|
|
216
353
|
if not market or not market.resolution:
|
217
354
|
raise ValueError(f"No resolution found for market: {market_id}")
|
@@ -220,7 +357,7 @@ def get_market_resolution(market_id: str, market_type: str) -> Resolution:
|
|
220
357
|
|
221
358
|
except Exception as e:
|
222
359
|
raise ValueError(
|
223
|
-
f"Failed to fetch {market_type} market {market_id} resolution: {e}"
|
360
|
+
f"Failed to fetch {market_type.market_class} market {market_id} resolution: {e}"
|
224
361
|
) from e
|
225
362
|
|
226
363
|
|
@@ -234,7 +371,7 @@ def parse_date(date_str: str, param_name: str) -> DatetimeUTC:
|
|
234
371
|
|
235
372
|
|
236
373
|
def main(
|
237
|
-
agent_name: str = "
|
374
|
+
agent_name: str = "DeployablePredictionProphet",
|
238
375
|
only_resolved: bool = True,
|
239
376
|
date_from: str = typer.Option(
|
240
377
|
None, help="Start date in ISO format (YYYY-MM-DD or YYYY-MM-DDTHH:MM:SS)"
|
@@ -373,7 +373,8 @@ class DeployablePredictionAgent(DeployableAgent):
|
|
373
373
|
binary_answer = self.answer_binary_market(market)
|
374
374
|
return (
|
375
375
|
CategoricalProbabilisticAnswer.from_probabilistic_answer(
|
376
|
-
binary_answer
|
376
|
+
binary_answer,
|
377
|
+
market.outcomes,
|
377
378
|
)
|
378
379
|
if binary_answer is not None
|
379
380
|
else None
|
@@ -385,6 +386,27 @@ class DeployablePredictionAgent(DeployableAgent):
|
|
385
386
|
|
386
387
|
return self.answer_categorical_market(market)
|
387
388
|
|
389
|
+
def verify_answer_outcomes(
|
390
|
+
self, market: AgentMarket, answer: CategoricalProbabilisticAnswer
|
391
|
+
) -> None:
|
392
|
+
outcomes_from_prob_map = list(answer.probabilities.keys())
|
393
|
+
|
394
|
+
if any(
|
395
|
+
outcome_from_answer not in market.outcomes
|
396
|
+
for outcome_from_answer in outcomes_from_prob_map
|
397
|
+
):
|
398
|
+
raise ValueError(
|
399
|
+
f"Some of generated outcomes ({outcomes_from_prob_map=}) in probability map doesn't match with market's outcomes ({market.outcomes=})."
|
400
|
+
)
|
401
|
+
|
402
|
+
if any(
|
403
|
+
market_outcome not in outcomes_from_prob_map
|
404
|
+
for market_outcome in market.outcomes
|
405
|
+
):
|
406
|
+
logger.warning(
|
407
|
+
f"Some of market's outcomes ({market.outcomes=}) isn't included in the probability map ({outcomes_from_prob_map=})."
|
408
|
+
)
|
409
|
+
|
388
410
|
def process_market(
|
389
411
|
self,
|
390
412
|
market_type: MarketType,
|
@@ -399,6 +421,8 @@ class DeployablePredictionAgent(DeployableAgent):
|
|
399
421
|
answer = self.build_answer(
|
400
422
|
market=market, market_type=market_type, verify_market=verify_market
|
401
423
|
)
|
424
|
+
if answer is not None:
|
425
|
+
self.verify_answer_outcomes(market=market, answer=answer)
|
402
426
|
|
403
427
|
processed_market = (
|
404
428
|
ProcessedMarket(answer=answer) if answer is not None else None
|
@@ -406,7 +430,7 @@ class DeployablePredictionAgent(DeployableAgent):
|
|
406
430
|
|
407
431
|
self.update_langfuse_trace_by_processed_market(market_type, processed_market)
|
408
432
|
logger.info(
|
409
|
-
f"Processed market {market.question=} from {market.url=} with {
|
433
|
+
f"Processed market {market.question=} from {market.url=} with {processed_market=}."
|
410
434
|
)
|
411
435
|
return processed_market
|
412
436
|
|
@@ -21,7 +21,6 @@ from prediction_market_agent_tooling.gtypes import (
|
|
21
21
|
OutcomeWei,
|
22
22
|
Probability,
|
23
23
|
)
|
24
|
-
from prediction_market_agent_tooling.loggers import logger
|
25
24
|
from prediction_market_agent_tooling.markets.data_models import (
|
26
25
|
USD,
|
27
26
|
Bet,
|
@@ -93,11 +92,6 @@ class AgentMarket(BaseModel):
|
|
93
92
|
outcomes: t.Sequence[OutcomeStr] = check_not_none(info.data.get("outcomes"))
|
94
93
|
if set(probs.keys()) != set(outcomes):
|
95
94
|
raise ValueError("Keys of `probabilities` must match `outcomes` exactly.")
|
96
|
-
total = float(sum(probs.values()))
|
97
|
-
if not 0.999 <= total <= 1.001:
|
98
|
-
# We simply log a warning because for some use-cases (e.g. existing positions), the
|
99
|
-
# markets might be already closed hence no reliable outcome token prices exist anymore.
|
100
|
-
logger.warning(f"Probabilities for market {info.data=} do not sum to 1.")
|
101
95
|
return probs
|
102
96
|
|
103
97
|
@field_validator("outcome_token_pool")
|
@@ -1,8 +1,15 @@
|
|
1
|
+
from typing import Sequence
|
2
|
+
|
1
3
|
from web3 import Web3
|
2
4
|
from web3.constants import HASH_ZERO
|
3
5
|
|
4
6
|
from prediction_market_agent_tooling.config import APIKeys
|
5
|
-
from prediction_market_agent_tooling.gtypes import
|
7
|
+
from prediction_market_agent_tooling.gtypes import (
|
8
|
+
ChecksumAddress,
|
9
|
+
HexBytes,
|
10
|
+
HexStr,
|
11
|
+
OutcomeStr,
|
12
|
+
)
|
6
13
|
from prediction_market_agent_tooling.loggers import logger
|
7
14
|
from prediction_market_agent_tooling.markets.agent_market import ProcessedTradedMarket
|
8
15
|
from prediction_market_agent_tooling.markets.omen.data_models import (
|
@@ -10,18 +17,17 @@ from prediction_market_agent_tooling.markets.omen.data_models import (
|
|
10
17
|
IPFSAgentResult,
|
11
18
|
)
|
12
19
|
from prediction_market_agent_tooling.markets.omen.omen_contracts import (
|
13
|
-
|
20
|
+
_AgentResultMappingContract,
|
14
21
|
)
|
15
22
|
from prediction_market_agent_tooling.tools.ipfs.ipfs_handler import IPFSHandler
|
16
23
|
from prediction_market_agent_tooling.tools.utils import BPS_CONSTANT
|
17
24
|
from prediction_market_agent_tooling.tools.web3_utils import ipfscidv0_to_byte32
|
18
25
|
|
19
|
-
# max uint16 for easy prediction identification (if market does not have YES outcome)
|
20
|
-
UINT16_MAX = 2**16 - 1 # = 65535
|
21
|
-
|
22
26
|
|
23
27
|
def store_trades(
|
24
|
-
|
28
|
+
contract: _AgentResultMappingContract,
|
29
|
+
market_id: ChecksumAddress,
|
30
|
+
outcomes: Sequence[OutcomeStr],
|
25
31
|
traded_market: ProcessedTradedMarket | None,
|
26
32
|
keys: APIKeys,
|
27
33
|
agent_name: str,
|
@@ -31,10 +37,14 @@ def store_trades(
|
|
31
37
|
logger.warning(f"No prediction for market {market_id}, not storing anything.")
|
32
38
|
return None
|
33
39
|
|
34
|
-
|
35
|
-
if not
|
36
|
-
logger.info("Skipping this since no
|
40
|
+
probabilities = traded_market.answer.probabilities
|
41
|
+
if not probabilities:
|
42
|
+
logger.info("Skipping this since no probabilities available.")
|
37
43
|
return None
|
44
|
+
|
45
|
+
if all(outcome not in probabilities for outcome in outcomes):
|
46
|
+
raise ValueError("No of the market's outcomes is in the probabilities.")
|
47
|
+
|
38
48
|
reasoning = traded_market.answer.reasoning if traded_market.answer.reasoning else ""
|
39
49
|
|
40
50
|
ipfs_hash_decoded = HexBytes(HASH_ZERO)
|
@@ -50,17 +60,23 @@ def store_trades(
|
|
50
60
|
HexBytes(HexStr(i.id)) for i in traded_market.trades if i.id is not None
|
51
61
|
]
|
52
62
|
|
53
|
-
|
63
|
+
# Dune dashboard expects the probs to be in the same order as on the market.
|
64
|
+
probabilities_converted = [
|
65
|
+
(outcome, int(probabilities.get(outcome, 0) * BPS_CONSTANT))
|
66
|
+
for outcome in outcomes
|
67
|
+
]
|
54
68
|
|
55
69
|
prediction = ContractPrediction(
|
70
|
+
market=market_id,
|
56
71
|
publisher=keys.bet_from_address,
|
57
72
|
ipfs_hash=ipfs_hash_decoded,
|
58
73
|
tx_hashes=tx_hashes,
|
59
|
-
|
74
|
+
outcomes=[x[0] for x in probabilities_converted],
|
75
|
+
estimated_probabilities_bps=[x[1] for x in probabilities_converted],
|
60
76
|
)
|
61
|
-
tx_receipt =
|
77
|
+
tx_receipt = contract.add_prediction(
|
62
78
|
api_keys=keys,
|
63
|
-
market_address=
|
79
|
+
market_address=market_id,
|
64
80
|
prediction=prediction,
|
65
81
|
web3=web3,
|
66
82
|
)
|
@@ -15,6 +15,10 @@ from prediction_market_agent_tooling.gtypes import (
|
|
15
15
|
Probability,
|
16
16
|
)
|
17
17
|
from prediction_market_agent_tooling.logprobs_parser import FieldLogprobs
|
18
|
+
from prediction_market_agent_tooling.markets.omen.omen_constants import (
|
19
|
+
OMEN_FALSE_OUTCOME,
|
20
|
+
OMEN_TRUE_OUTCOME,
|
21
|
+
)
|
18
22
|
from prediction_market_agent_tooling.tools.utils import DatetimeUTC, check_not_none
|
19
23
|
|
20
24
|
|
@@ -142,13 +146,20 @@ class CategoricalProbabilisticAnswer(BaseModel):
|
|
142
146
|
@staticmethod
|
143
147
|
def from_probabilistic_answer(
|
144
148
|
answer: ProbabilisticAnswer,
|
149
|
+
market_outcomes: Sequence[OutcomeStr] | None = None,
|
145
150
|
) -> "CategoricalProbabilisticAnswer":
|
146
151
|
return CategoricalProbabilisticAnswer(
|
147
152
|
probabilities={
|
148
|
-
|
149
|
-
|
150
|
-
|
151
|
-
|
153
|
+
(
|
154
|
+
OMEN_TRUE_OUTCOME
|
155
|
+
if market_outcomes and OMEN_TRUE_OUTCOME in market_outcomes
|
156
|
+
else OutcomeStr(YES_OUTCOME_LOWERCASE_IDENTIFIER)
|
157
|
+
): answer.p_yes,
|
158
|
+
(
|
159
|
+
OMEN_FALSE_OUTCOME
|
160
|
+
if market_outcomes and OMEN_FALSE_OUTCOME in market_outcomes
|
161
|
+
else OutcomeStr(NO_OUTCOME_LOWERCASE_IDENTIFIER)
|
162
|
+
): Probability(1 - answer.p_yes),
|
152
163
|
},
|
153
164
|
confidence=answer.confidence,
|
154
165
|
reasoning=answer.reasoning,
|
@@ -5,7 +5,6 @@ from pydantic_core.core_schema import FieldValidationInfo
|
|
5
5
|
|
6
6
|
from prediction_market_agent_tooling.config import APIKeys
|
7
7
|
from prediction_market_agent_tooling.gtypes import OutcomeStr, Probability
|
8
|
-
from prediction_market_agent_tooling.loggers import logger
|
9
8
|
from prediction_market_agent_tooling.markets.agent_market import (
|
10
9
|
AgentMarket,
|
11
10
|
FilterBy,
|
@@ -43,13 +42,7 @@ class MetaculusAgentMarket(AgentMarket):
|
|
43
42
|
probs: dict[OutcomeStr, Probability],
|
44
43
|
info: FieldValidationInfo,
|
45
44
|
) -> dict[OutcomeStr, Probability]:
|
46
|
-
|
47
|
-
# We don't check for outcomes match because Metaculus has no filled outcomes.
|
48
|
-
total = float(sum(probs.values()))
|
49
|
-
if not 0.999 <= total <= 1.001:
|
50
|
-
# We simply log a warning because for some use-cases (e.g. existing positions), the
|
51
|
-
# markets might be already closed hence no reliable outcome token prices exist anymore.
|
52
|
-
logger.warning(f"Probabilities for market {info.data=} do not sum to 1.")
|
45
|
+
# We don't check for outcomes match here because Metaculus has no filled outcomes.
|
53
46
|
return probs
|
54
47
|
|
55
48
|
@staticmethod
|
@@ -23,6 +23,10 @@ from prediction_market_agent_tooling.markets.data_models import (
|
|
23
23
|
Resolution,
|
24
24
|
ResolvedBet,
|
25
25
|
)
|
26
|
+
from prediction_market_agent_tooling.markets.omen.omen_constants import (
|
27
|
+
OMEN_FALSE_OUTCOME,
|
28
|
+
OMEN_TRUE_OUTCOME,
|
29
|
+
)
|
26
30
|
from prediction_market_agent_tooling.tools.contract import (
|
27
31
|
ContractERC20OnGnosisChain,
|
28
32
|
init_collateral_token_contract,
|
@@ -37,8 +41,6 @@ from prediction_market_agent_tooling.tools.utils import (
|
|
37
41
|
utcnow,
|
38
42
|
)
|
39
43
|
|
40
|
-
OMEN_TRUE_OUTCOME = OutcomeStr("Yes")
|
41
|
-
OMEN_FALSE_OUTCOME = OutcomeStr("No")
|
42
44
|
OMEN_BINARY_MARKET_OUTCOMES: t.Sequence[OutcomeStr] = [
|
43
45
|
OMEN_TRUE_OUTCOME,
|
44
46
|
OMEN_FALSE_OUTCOME,
|
@@ -826,18 +828,44 @@ class CreatedMarket(BaseModel):
|
|
826
828
|
|
827
829
|
class ContractPrediction(BaseModel):
|
828
830
|
model_config = ConfigDict(populate_by_name=True)
|
831
|
+
|
832
|
+
market: str | None = Field(
|
833
|
+
None,
|
834
|
+
alias="marketAddress",
|
835
|
+
description="Market's address. Will be None on older records.",
|
836
|
+
)
|
829
837
|
publisher: str = Field(..., alias="publisherAddress")
|
830
838
|
ipfs_hash: HexBytes = Field(..., alias="ipfsHash")
|
831
839
|
tx_hashes: list[HexBytes] = Field(..., alias="txHashes")
|
832
|
-
|
840
|
+
outcomes: list[OutcomeStr] = Field(...)
|
841
|
+
estimated_probabilities_bps: list[int] = Field(
|
842
|
+
..., alias="estimatedProbabilitiesBps"
|
843
|
+
)
|
833
844
|
|
834
|
-
@
|
835
|
-
|
836
|
-
|
845
|
+
@model_validator(mode="before")
|
846
|
+
@classmethod
|
847
|
+
def handle_legacy_estimated_probability_bps(
|
848
|
+
cls, values: dict[str, t.Any]
|
849
|
+
) -> dict[str, t.Any]:
|
850
|
+
# If 'estimatedProbabilityBps' is present and 'outcomes'/'estimatedProbabilitiesBps' are not,
|
851
|
+
# convert to the new format using "Yes" and "No" outcomes.
|
852
|
+
# This allows for backward compatibility with old contract events.
|
853
|
+
if (
|
854
|
+
"estimatedProbabilityBps" in values
|
855
|
+
and "outcomes" not in values
|
856
|
+
and "estimatedProbabilitiesBps" not in values
|
857
|
+
):
|
858
|
+
prob_bps = values["estimatedProbabilityBps"]
|
859
|
+
values["outcomes"] = [
|
860
|
+
OMEN_TRUE_OUTCOME,
|
861
|
+
OMEN_FALSE_OUTCOME,
|
862
|
+
]
|
863
|
+
values["estimatedProbabilitiesBps"] = [prob_bps, BPS_CONSTANT - prob_bps]
|
864
|
+
return values
|
837
865
|
|
838
|
-
|
839
|
-
|
840
|
-
return self.
|
866
|
+
def estimated_probability_of_outcome(self, outcome: OutcomeStr) -> Probability:
|
867
|
+
index = self.outcomes.index(outcome)
|
868
|
+
return Probability(self.estimated_probabilities_bps[index] / BPS_CONSTANT)
|
841
869
|
|
842
870
|
@computed_field # type: ignore[prop-decorator] # Mypy issue: https://github.com/python/mypy/issues/14461
|
843
871
|
@property
|
@@ -847,10 +875,12 @@ class ContractPrediction(BaseModel):
|
|
847
875
|
@staticmethod
|
848
876
|
def from_tuple(values: tuple[t.Any, ...]) -> "ContractPrediction":
|
849
877
|
return ContractPrediction(
|
850
|
-
|
851
|
-
|
852
|
-
|
853
|
-
|
878
|
+
market=values[0],
|
879
|
+
publisher=values[1],
|
880
|
+
ipfs_hash=values[2],
|
881
|
+
tx_hashes=values[3],
|
882
|
+
outcomes=values[4],
|
883
|
+
estimated_probabilities_bps=values[5],
|
854
884
|
)
|
855
885
|
|
856
886
|
|
@@ -50,6 +50,7 @@ from prediction_market_agent_tooling.markets.omen.omen_contracts import (
|
|
50
50
|
OMEN_DEFAULT_MARKET_FEE_PERC,
|
51
51
|
REALITY_DEFAULT_FINALIZATION_TIMEOUT,
|
52
52
|
Arbitrator,
|
53
|
+
OmenAgentResultMappingContract,
|
53
54
|
OmenConditionalTokenContract,
|
54
55
|
OmenFixedProductMarketMakerContract,
|
55
56
|
OmenFixedProductMarketMakerFactoryContract,
|
@@ -436,10 +437,13 @@ class OmenAgentMarket(AgentMarket):
|
|
436
437
|
web3: Web3 | None = None,
|
437
438
|
) -> None:
|
438
439
|
return store_trades(
|
439
|
-
|
440
|
+
contract=OmenAgentResultMappingContract(),
|
441
|
+
market_id=Web3.to_checksum_address(self.id),
|
442
|
+
outcomes=self.outcomes,
|
440
443
|
traded_market=traded_market,
|
441
444
|
keys=keys,
|
442
445
|
agent_name=agent_name,
|
446
|
+
web3=web3,
|
443
447
|
)
|
444
448
|
|
445
449
|
@staticmethod
|
@@ -1,5 +1,10 @@
|
|
1
1
|
from web3 import Web3
|
2
2
|
|
3
|
+
from prediction_market_agent_tooling.gtypes import OutcomeStr
|
4
|
+
|
5
|
+
OMEN_TRUE_OUTCOME = OutcomeStr("Yes")
|
6
|
+
OMEN_FALSE_OUTCOME = OutcomeStr("No")
|
7
|
+
|
3
8
|
WRAPPED_XDAI_CONTRACT_ADDRESS = Web3.to_checksum_address(
|
4
9
|
"0xe91d153e0b41518a2ce8dd3d7944fa863463a97d"
|
5
10
|
)
|
@@ -822,20 +822,16 @@ class OmenRealitioContract(ContractOnGnosisChain):
|
|
822
822
|
return is_pending_arbitration
|
823
823
|
|
824
824
|
|
825
|
-
class
|
825
|
+
class _AgentResultMappingContract(ContractOnGnosisChain):
|
826
826
|
# Contract ABI taken from built https://github.com/gnosis/labs-contracts.
|
827
827
|
|
828
828
|
abi: ABI = abi_field_validator(
|
829
829
|
os.path.join(
|
830
830
|
os.path.dirname(os.path.realpath(__file__)),
|
831
|
-
"../../abis/
|
831
|
+
"../../abis/agentresultmapping.abi.json",
|
832
832
|
)
|
833
833
|
)
|
834
834
|
|
835
|
-
address: ChecksumAddress = Web3.to_checksum_address(
|
836
|
-
"0x260E1077dEA98e738324A6cEfB0EE9A272eD471a"
|
837
|
-
)
|
838
|
-
|
839
835
|
def get_predictions(
|
840
836
|
self,
|
841
837
|
market_address: ChecksumAddress,
|
@@ -861,6 +857,18 @@ class OmenAgentResultMappingContract(ContractOnGnosisChain):
|
|
861
857
|
)
|
862
858
|
|
863
859
|
|
860
|
+
class OmenAgentResultMappingContract(_AgentResultMappingContract):
|
861
|
+
address: ChecksumAddress = Web3.to_checksum_address(
|
862
|
+
"0x99c43743A2dbd406160CC43cf08113b17178789c"
|
863
|
+
)
|
864
|
+
|
865
|
+
|
866
|
+
class SeerAgentResultMappingContract(_AgentResultMappingContract):
|
867
|
+
address: ChecksumAddress = Web3.to_checksum_address(
|
868
|
+
"0x1aafdfBD38EE92A4a74A44A1614E00894205074e"
|
869
|
+
)
|
870
|
+
|
871
|
+
|
864
872
|
class OmenThumbnailMapping(ContractOnGnosisChain):
|
865
873
|
# Contract ABI taken from built https://github.com/gnosis/labs-contracts.
|
866
874
|
abi: ABI = abi_field_validator(
|
@@ -27,9 +27,13 @@ from prediction_market_agent_tooling.markets.agent_market import (
|
|
27
27
|
ProcessedTradedMarket,
|
28
28
|
SortBy,
|
29
29
|
)
|
30
|
+
from prediction_market_agent_tooling.markets.blockchain_utils import store_trades
|
30
31
|
from prediction_market_agent_tooling.markets.data_models import ExistingPosition
|
31
32
|
from prediction_market_agent_tooling.markets.market_fees import MarketFees
|
32
33
|
from prediction_market_agent_tooling.markets.omen.omen import OmenAgentMarket
|
34
|
+
from prediction_market_agent_tooling.markets.omen.omen_contracts import (
|
35
|
+
SeerAgentResultMappingContract,
|
36
|
+
)
|
33
37
|
from prediction_market_agent_tooling.markets.seer.data_models import (
|
34
38
|
RedeemParams,
|
35
39
|
SeerMarket,
|
@@ -113,7 +117,15 @@ class SeerAgentMarket(AgentMarket):
|
|
113
117
|
agent_name: str,
|
114
118
|
web3: Web3 | None = None,
|
115
119
|
) -> None:
|
116
|
-
|
120
|
+
return store_trades(
|
121
|
+
contract=SeerAgentResultMappingContract(),
|
122
|
+
market_id=Web3.to_checksum_address(self.id),
|
123
|
+
outcomes=self.outcomes,
|
124
|
+
traded_market=traded_market,
|
125
|
+
keys=keys,
|
126
|
+
agent_name=agent_name,
|
127
|
+
web3=web3,
|
128
|
+
)
|
117
129
|
|
118
130
|
def get_token_in_usd(self, x: CollateralToken) -> USD:
|
119
131
|
return get_token_in_usd(x, self.collateral_token_contract_address_checksummed)
|
@@ -1,10 +1,10 @@
|
|
1
|
+
prediction_market_agent_tooling/abis/agentresultmapping.abi.json,sha256=YWyJBpZFlpLoSDmYZYNavTETAh3Hj7SRpI_iNEebMOg,4753
|
1
2
|
prediction_market_agent_tooling/abis/debuggingcontract.abi.json,sha256=KdkCWmif_WO421RFKhc03kGJiccFqsxsZNrgCd8EH_4,571
|
2
3
|
prediction_market_agent_tooling/abis/depositablewrapper_erc20.abi.json,sha256=m0Wk3uQyLM8apWRRvX4d3u1d77bWVuXfV38D-aV48t0,4612
|
3
4
|
prediction_market_agent_tooling/abis/erc20.abi.json,sha256=b8t6tPUhjBo5dHFq8ipsK-ih_g29dOTbWI0GBZiEWL8,3685
|
4
5
|
prediction_market_agent_tooling/abis/erc4626.abi.json,sha256=qvmonhj3tJU5flR4pJjXCSSwB8tgEiJcx4Sc5ilQ5VY,17801
|
5
6
|
prediction_market_agent_tooling/abis/erc721.abi.json,sha256=sslrlTfLAkHA7cAJAaAHmGBSsY8_3K6jNK-2OoPUTPs,5396
|
6
7
|
prediction_market_agent_tooling/abis/gvp2_settlement.abi.json,sha256=J-OqsRqeqIo81RaT4btCGmwqBv6s7mYuTI9vWLr6Eu4,1790
|
7
|
-
prediction_market_agent_tooling/abis/omen_agentresultmapping.abi.json,sha256=TVWP5J6RO8mb1YpI-RA2iCgeCqGfrIw0OfbHngekTSs,5769
|
8
8
|
prediction_market_agent_tooling/abis/omen_dxdao.abi.json,sha256=Z1kD1QfgYfGwsZEI2UFDNEGZ4hMOix3HGICk8xlO4Ds,9578
|
9
9
|
prediction_market_agent_tooling/abis/omen_fpmm.abi.json,sha256=CDib_b5PVj4m0JBVCB20cTshiVx8st1Be-7E460qFoY,11406
|
10
10
|
prediction_market_agent_tooling/abis/omen_fpmm_conditionaltokens.abi.json,sha256=ux30hTxtZiOZ86FsTEK8Xnm910iyjB1CFnkXrCbhfm8,9841
|
@@ -25,8 +25,8 @@ prediction_market_agent_tooling/benchmark/benchmark.py,sha256=hpIpjjePDFTLhK841s
|
|
25
25
|
prediction_market_agent_tooling/benchmark/utils.py,sha256=xQd7p9H08-OtN3iC4QT2i9bkUTmrXa6rxGXeg9yMhgU,2986
|
26
26
|
prediction_market_agent_tooling/chains.py,sha256=1qQstoqXMwqwM7k-KH7MjMz8Ei-D83KZByvDbCZpAxs,116
|
27
27
|
prediction_market_agent_tooling/config.py,sha256=-kJfdDr-m0R-tGZ1KRI-hJJk0mXDt142CAlvwaJ2N2I,11778
|
28
|
-
prediction_market_agent_tooling/data_download/langfuse_data_downloader.py,sha256=
|
29
|
-
prediction_market_agent_tooling/deploy/agent.py,sha256=
|
28
|
+
prediction_market_agent_tooling/data_download/langfuse_data_downloader.py,sha256=VY23h324VKIVkevj1B1O-zL1eEp9AElmcfn6SwYDUSc,14246
|
29
|
+
prediction_market_agent_tooling/deploy/agent.py,sha256=HLK5rf38PWgKydc_aIUck-wYzWQUnPdhto3Evoq82Lg,26282
|
30
30
|
prediction_market_agent_tooling/deploy/agent_example.py,sha256=yS1fWkHynr9MYGNOM2WsCnRWLPaffY4bOc6bIudrdd4,1377
|
31
31
|
prediction_market_agent_tooling/deploy/betting_strategy.py,sha256=YYayGjTKW02d3BUavJ8M3NmFk41oldEM3FHbwppZGRM,17184
|
32
32
|
prediction_market_agent_tooling/deploy/constants.py,sha256=Qe9cllgsGMkecfmbhXoFkPxuJyG6ATsrT87RF9SmPWM,249
|
@@ -40,11 +40,11 @@ prediction_market_agent_tooling/jobs/jobs_models.py,sha256=DoZ9dlvVhpNrnINiR1uy6
|
|
40
40
|
prediction_market_agent_tooling/jobs/omen/omen_jobs.py,sha256=qbTZ9HVvu_iP4dDxuvOZxAp6JsRKejvEW2YDYCnRmd4,5039
|
41
41
|
prediction_market_agent_tooling/loggers.py,sha256=o1HyvwtK1DbuC0YWQwJNqzXLLbSC41gNBkEUxiAziEg,5796
|
42
42
|
prediction_market_agent_tooling/logprobs_parser.py,sha256=DBlBQtWX8_URXhzTU3YWIPa76Zx3QDHlx1ARqbgJsVI,5008
|
43
|
-
prediction_market_agent_tooling/markets/agent_market.py,sha256=
|
43
|
+
prediction_market_agent_tooling/markets/agent_market.py,sha256=nMIa6BkoUWdiz12kFDKuKXD_m6PhLzp3to1vgyj63ZQ,18834
|
44
44
|
prediction_market_agent_tooling/markets/base_subgraph_handler.py,sha256=7RaYO_4qAmQ6ZGM8oPK2-CkiJfKmV9MxM-rJlduaecU,1971
|
45
|
-
prediction_market_agent_tooling/markets/blockchain_utils.py,sha256=
|
45
|
+
prediction_market_agent_tooling/markets/blockchain_utils.py,sha256=6REOt70v3vnzmtCbuRcUTdwt6htXy9nAfNkLOH3Bv1U,2987
|
46
46
|
prediction_market_agent_tooling/markets/categorize.py,sha256=orLZlPaHgeREU66m1amxfWikeV77idV4sZDPB8NgSD0,1300
|
47
|
-
prediction_market_agent_tooling/markets/data_models.py,sha256=
|
47
|
+
prediction_market_agent_tooling/markets/data_models.py,sha256=H3G-2I9QFhWrBY_KI-4BO2jMtb_f9yauzHz-zA4NI5Q,8016
|
48
48
|
prediction_market_agent_tooling/markets/manifold/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
49
49
|
prediction_market_agent_tooling/markets/manifold/api.py,sha256=tWnjuqvU8pcCuja2B_ynHeds1iiEFc6QWHjeSO_GSxY,7676
|
50
50
|
prediction_market_agent_tooling/markets/manifold/data_models.py,sha256=3z1gFbPMEgCDGqeH-IK8wcvqmIHgLdZX8C2M1UQ7iDw,6740
|
@@ -54,13 +54,13 @@ prediction_market_agent_tooling/markets/market_fees.py,sha256=YeK3ynjYIguB0xf6sO
|
|
54
54
|
prediction_market_agent_tooling/markets/markets.py,sha256=lIEPfPJD1Gz90pTvN2pZi51rpb969STPgQtNFCqHUJg,2667
|
55
55
|
prediction_market_agent_tooling/markets/metaculus/api.py,sha256=4TRPGytQQbSdf42DCg2M_JWYPAuNjqZ3eBqaQBLkNks,2736
|
56
56
|
prediction_market_agent_tooling/markets/metaculus/data_models.py,sha256=FaBCTPPezXbBwZ9p791CiVgQ4vB696xnMbz9XVXmiVI,3267
|
57
|
-
prediction_market_agent_tooling/markets/metaculus/metaculus.py,sha256=
|
57
|
+
prediction_market_agent_tooling/markets/metaculus/metaculus.py,sha256=S1Kkf_F3KuqFO938d5bun60wd8lsGvnGqxsW-tgEBgw,5110
|
58
58
|
prediction_market_agent_tooling/markets/omen/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
59
59
|
prediction_market_agent_tooling/markets/omen/cow_contracts.py,sha256=sl1L4cK5nAJwZ2wdhLzqh8p7h_IEValNvLwKUlInKxw,957
|
60
|
-
prediction_market_agent_tooling/markets/omen/data_models.py,sha256=
|
61
|
-
prediction_market_agent_tooling/markets/omen/omen.py,sha256=
|
62
|
-
prediction_market_agent_tooling/markets/omen/omen_constants.py,sha256=
|
63
|
-
prediction_market_agent_tooling/markets/omen/omen_contracts.py,sha256=
|
60
|
+
prediction_market_agent_tooling/markets/omen/data_models.py,sha256=RsBYSbM4deA6Os4kQ3egH3HvwT80tQho6T1yyoATCMs,31103
|
61
|
+
prediction_market_agent_tooling/markets/omen/omen.py,sha256=hTHqS_-Z7jocrwuisqymYWLUUn8bK_WSRwaKPgXz1es,50477
|
62
|
+
prediction_market_agent_tooling/markets/omen/omen_constants.py,sha256=XtRk4vpxwUYkTndfjlcmghA-NOIneV8zdHFdyI7tHhM,487
|
63
|
+
prediction_market_agent_tooling/markets/omen/omen_contracts.py,sha256=27-HRngTqfk_wgvttB3GeVHhy_O2YZcz9izo9OufOI0,29991
|
64
64
|
prediction_market_agent_tooling/markets/omen/omen_resolving.py,sha256=D-ubf_LumHs_c5rBAAntQ8wGKprtO2V1JZeedmChNIE,11035
|
65
65
|
prediction_market_agent_tooling/markets/omen/omen_subgraph_handler.py,sha256=h-YFRLY5rlhM9RqqceyfbHlno3elltN8Nr_Mnu1kJ90,40006
|
66
66
|
prediction_market_agent_tooling/markets/polymarket/api.py,sha256=UZ4_TG8ceb9Y-qgsOKs8Qiv8zDt957QkT8IX2c83yqo,4800
|
@@ -71,7 +71,7 @@ prediction_market_agent_tooling/markets/polymarket/utils.py,sha256=8kTeVjXPcXC6D
|
|
71
71
|
prediction_market_agent_tooling/markets/seer/data_models.py,sha256=osM9WaLsxQf-pfVGq0O-IkM93ehP9a7fVUf-hi2VlMs,5523
|
72
72
|
prediction_market_agent_tooling/markets/seer/exceptions.py,sha256=cEObdjluivD94tgOLzmimR7wgQEOt6SRakrYdhsRQtk,112
|
73
73
|
prediction_market_agent_tooling/markets/seer/price_manager.py,sha256=MClY2NGwOV70nZYIcmzXFy6Ogd8NBIq7telQcQ3VcU4,6243
|
74
|
-
prediction_market_agent_tooling/markets/seer/seer.py,sha256=
|
74
|
+
prediction_market_agent_tooling/markets/seer/seer.py,sha256=vXSJ-iqkO44q4HVsKbu_6gD8lpx7t70Rbx-Or6xOao8,24354
|
75
75
|
prediction_market_agent_tooling/markets/seer/seer_contracts.py,sha256=uMzpHpI6_tgfhWxPzupLdUJlZ1P2wr0rRiYjAGClKgU,4984
|
76
76
|
prediction_market_agent_tooling/markets/seer/seer_subgraph_handler.py,sha256=pJxch9_u0EdiIatQP1-UFClt8UEfMZAXBlk5wDO_ovk,9940
|
77
77
|
prediction_market_agent_tooling/markets/seer/subgraph_data_models.py,sha256=0izxS8Mtzonfdl9UqvFVXrdj0hVzieroekXhogfZKCw,1817
|
@@ -124,8 +124,8 @@ prediction_market_agent_tooling/tools/tokens/usd.py,sha256=yuW8iPPtcpP4eLH2nORMD
|
|
124
124
|
prediction_market_agent_tooling/tools/transaction_cache.py,sha256=K5YKNL2_tR10Iw2TD9fuP-CTGpBbZtNdgbd0B_R7pjg,1814
|
125
125
|
prediction_market_agent_tooling/tools/utils.py,sha256=RlWSlzS2LavMIWrpwn1fevbzgPZruD4VcXTa-XxjWnE,7343
|
126
126
|
prediction_market_agent_tooling/tools/web3_utils.py,sha256=0r26snqCXGdLKCWA8jpe7DV8x2NPYWZwOy4oyKyDCYk,12615
|
127
|
-
prediction_market_agent_tooling-0.
|
128
|
-
prediction_market_agent_tooling-0.
|
129
|
-
prediction_market_agent_tooling-0.
|
130
|
-
prediction_market_agent_tooling-0.
|
131
|
-
prediction_market_agent_tooling-0.
|
127
|
+
prediction_market_agent_tooling-0.66.0.dist-info/LICENSE,sha256=6or154nLLU6bELzjh0mCreFjt0m2v72zLi3yHE0QbeE,7650
|
128
|
+
prediction_market_agent_tooling-0.66.0.dist-info/METADATA,sha256=RwJLAfhZDRncghDhfMIkeKlsjXiwNtcZ-7TgeO6ZlKo,8734
|
129
|
+
prediction_market_agent_tooling-0.66.0.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
|
130
|
+
prediction_market_agent_tooling-0.66.0.dist-info/entry_points.txt,sha256=m8PukHbeH5g0IAAmOf_1Ahm-sGAMdhSSRQmwtpmi2s8,81
|
131
|
+
prediction_market_agent_tooling-0.66.0.dist-info/RECORD,,
|
@@ -1,171 +0,0 @@
|
|
1
|
-
[
|
2
|
-
{
|
3
|
-
"type": "constructor",
|
4
|
-
"inputs": [],
|
5
|
-
"stateMutability": "nonpayable"
|
6
|
-
},
|
7
|
-
{
|
8
|
-
"type": "function",
|
9
|
-
"name": "addPrediction",
|
10
|
-
"inputs": [
|
11
|
-
{
|
12
|
-
"name": "marketAddress",
|
13
|
-
"type": "address",
|
14
|
-
"internalType": "address"
|
15
|
-
},
|
16
|
-
{
|
17
|
-
"name": "prediction",
|
18
|
-
"type": "tuple",
|
19
|
-
"internalType": "struct Prediction",
|
20
|
-
"components": [
|
21
|
-
{
|
22
|
-
"name": "publisherAddress",
|
23
|
-
"type": "address",
|
24
|
-
"internalType": "address"
|
25
|
-
},
|
26
|
-
{
|
27
|
-
"name": "ipfsHash",
|
28
|
-
"type": "bytes32",
|
29
|
-
"internalType": "bytes32"
|
30
|
-
},
|
31
|
-
{
|
32
|
-
"name": "txHashes",
|
33
|
-
"type": "bytes32[]",
|
34
|
-
"internalType": "bytes32[]"
|
35
|
-
},
|
36
|
-
{
|
37
|
-
"name": "estimatedProbabilityBps",
|
38
|
-
"type": "uint16",
|
39
|
-
"internalType": "uint16"
|
40
|
-
}
|
41
|
-
]
|
42
|
-
}
|
43
|
-
],
|
44
|
-
"outputs": [],
|
45
|
-
"stateMutability": "nonpayable"
|
46
|
-
},
|
47
|
-
{
|
48
|
-
"type": "function",
|
49
|
-
"name": "getPredictionByIndex",
|
50
|
-
"inputs": [
|
51
|
-
{
|
52
|
-
"name": "marketAddress",
|
53
|
-
"type": "address",
|
54
|
-
"internalType": "address"
|
55
|
-
},
|
56
|
-
{
|
57
|
-
"name": "index",
|
58
|
-
"type": "uint256",
|
59
|
-
"internalType": "uint256"
|
60
|
-
}
|
61
|
-
],
|
62
|
-
"outputs": [
|
63
|
-
{
|
64
|
-
"name": "",
|
65
|
-
"type": "tuple",
|
66
|
-
"internalType": "struct Prediction",
|
67
|
-
"components": [
|
68
|
-
{
|
69
|
-
"name": "publisherAddress",
|
70
|
-
"type": "address",
|
71
|
-
"internalType": "address"
|
72
|
-
},
|
73
|
-
{
|
74
|
-
"name": "ipfsHash",
|
75
|
-
"type": "bytes32",
|
76
|
-
"internalType": "bytes32"
|
77
|
-
},
|
78
|
-
{
|
79
|
-
"name": "txHashes",
|
80
|
-
"type": "bytes32[]",
|
81
|
-
"internalType": "bytes32[]"
|
82
|
-
},
|
83
|
-
{
|
84
|
-
"name": "estimatedProbabilityBps",
|
85
|
-
"type": "uint16",
|
86
|
-
"internalType": "uint16"
|
87
|
-
}
|
88
|
-
]
|
89
|
-
}
|
90
|
-
],
|
91
|
-
"stateMutability": "view"
|
92
|
-
},
|
93
|
-
{
|
94
|
-
"type": "function",
|
95
|
-
"name": "getPredictions",
|
96
|
-
"inputs": [
|
97
|
-
{
|
98
|
-
"name": "marketAddress",
|
99
|
-
"type": "address",
|
100
|
-
"internalType": "address"
|
101
|
-
}
|
102
|
-
],
|
103
|
-
"outputs": [
|
104
|
-
{
|
105
|
-
"name": "",
|
106
|
-
"type": "tuple[]",
|
107
|
-
"internalType": "struct Prediction[]",
|
108
|
-
"components": [
|
109
|
-
{
|
110
|
-
"name": "publisherAddress",
|
111
|
-
"type": "address",
|
112
|
-
"internalType": "address"
|
113
|
-
},
|
114
|
-
{
|
115
|
-
"name": "ipfsHash",
|
116
|
-
"type": "bytes32",
|
117
|
-
"internalType": "bytes32"
|
118
|
-
},
|
119
|
-
{
|
120
|
-
"name": "txHashes",
|
121
|
-
"type": "bytes32[]",
|
122
|
-
"internalType": "bytes32[]"
|
123
|
-
},
|
124
|
-
{
|
125
|
-
"name": "estimatedProbabilityBps",
|
126
|
-
"type": "uint16",
|
127
|
-
"internalType": "uint16"
|
128
|
-
}
|
129
|
-
]
|
130
|
-
}
|
131
|
-
],
|
132
|
-
"stateMutability": "view"
|
133
|
-
},
|
134
|
-
{
|
135
|
-
"type": "event",
|
136
|
-
"name": "PredictionAdded",
|
137
|
-
"inputs": [
|
138
|
-
{
|
139
|
-
"name": "marketAddress",
|
140
|
-
"type": "address",
|
141
|
-
"indexed": true,
|
142
|
-
"internalType": "address"
|
143
|
-
},
|
144
|
-
{
|
145
|
-
"name": "estimatedProbabilityBps",
|
146
|
-
"type": "uint16",
|
147
|
-
"indexed": false,
|
148
|
-
"internalType": "uint16"
|
149
|
-
},
|
150
|
-
{
|
151
|
-
"name": "publisherAddress",
|
152
|
-
"type": "address",
|
153
|
-
"indexed": true,
|
154
|
-
"internalType": "address"
|
155
|
-
},
|
156
|
-
{
|
157
|
-
"name": "txHashes",
|
158
|
-
"type": "bytes32[]",
|
159
|
-
"indexed": false,
|
160
|
-
"internalType": "bytes32[]"
|
161
|
-
},
|
162
|
-
{
|
163
|
-
"name": "ipfsHash",
|
164
|
-
"type": "bytes32",
|
165
|
-
"indexed": false,
|
166
|
-
"internalType": "bytes32"
|
167
|
-
}
|
168
|
-
],
|
169
|
-
"anonymous": false
|
170
|
-
}
|
171
|
-
]
|
File without changes
|
File without changes
|