polymarket-cli 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- polymarket_cli/__init__.py +2 -0
- polymarket_cli/__main__.py +4 -0
- polymarket_cli/api.py +652 -0
- polymarket_cli/cli.py +482 -0
- polymarket_cli/formatting.py +157 -0
- polymarket_cli-0.2.0.dist-info/METADATA +96 -0
- polymarket_cli-0.2.0.dist-info/RECORD +10 -0
- polymarket_cli-0.2.0.dist-info/WHEEL +4 -0
- polymarket_cli-0.2.0.dist-info/entry_points.txt +3 -0
- polymarket_cli-0.2.0.dist-info/licenses/LICENSE +21 -0
polymarket_cli/api.py
ADDED
|
@@ -0,0 +1,652 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import urllib.parse
|
|
5
|
+
import urllib.request
|
|
6
|
+
from dataclasses import dataclass
|
|
7
|
+
from datetime import datetime
|
|
8
|
+
from typing import Any, Iterable
|
|
9
|
+
|
|
10
|
+
from .formatting import coerce_float, parse_datetime
|
|
11
|
+
|
|
12
|
+
DEFAULT_HEADERS = {
|
|
13
|
+
"User-Agent": "Mozilla/5.0 (compatible; polymarket-cli/0.1)",
|
|
14
|
+
"Accept": "application/json",
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
GAMMA_ORDER_MAP = {
|
|
18
|
+
"volume24hr": "volume_24hr",
|
|
19
|
+
"volume_24hr": "volume_24hr",
|
|
20
|
+
"volume": "volume",
|
|
21
|
+
"liquidity": "liquidity",
|
|
22
|
+
"startDate": "start_date",
|
|
23
|
+
"start_date": "start_date",
|
|
24
|
+
"endDate": "end_date",
|
|
25
|
+
"end_date": "end_date",
|
|
26
|
+
"competitive": "competitive",
|
|
27
|
+
"closedTime": "closed_time",
|
|
28
|
+
"closed_time": "closed_time",
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
HISTORY_INTERVALS = {"max", "all", "1m", "1h", "6h", "1d", "1w"}
|
|
32
|
+
RANK_FIELD_ALIASES = {
|
|
33
|
+
"liquidity": ("liquidityNum", "liquidity"),
|
|
34
|
+
"volume_24hr": ("volume24hrClob", "volume24hr"),
|
|
35
|
+
"volume": ("volumeClob", "volume"),
|
|
36
|
+
"start_date": ("startDate", "startDateIso"),
|
|
37
|
+
"end_date": ("endDate", "endDateIso"),
|
|
38
|
+
"competitive": ("competitive",),
|
|
39
|
+
"closed_time": ("closedTime",),
|
|
40
|
+
}
|
|
41
|
+
DISPLAY_ORDER_MAP = {
|
|
42
|
+
"volume_24hr": "volume24hr",
|
|
43
|
+
"start_date": "startDate",
|
|
44
|
+
"end_date": "endDate",
|
|
45
|
+
"closed_time": "closedTime",
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class ApiError(RuntimeError):
|
|
50
|
+
pass
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
@dataclass
|
|
54
|
+
class HttpClient:
|
|
55
|
+
timeout: float = 20.0
|
|
56
|
+
headers: dict[str, str] | None = None
|
|
57
|
+
|
|
58
|
+
def __post_init__(self) -> None:
|
|
59
|
+
merged = dict(DEFAULT_HEADERS)
|
|
60
|
+
if self.headers:
|
|
61
|
+
merged.update(self.headers)
|
|
62
|
+
self.headers = merged
|
|
63
|
+
|
|
64
|
+
def _iter_param_pairs(self, params: dict[str, Any]) -> Iterable[tuple[str, str]]:
|
|
65
|
+
for key, value in params.items():
|
|
66
|
+
if value is None:
|
|
67
|
+
continue
|
|
68
|
+
if isinstance(value, (list, tuple)):
|
|
69
|
+
for item in value:
|
|
70
|
+
if item is None:
|
|
71
|
+
continue
|
|
72
|
+
if isinstance(item, bool):
|
|
73
|
+
item = str(item).lower()
|
|
74
|
+
yield key, str(item)
|
|
75
|
+
continue
|
|
76
|
+
if isinstance(value, bool):
|
|
77
|
+
value = str(value).lower()
|
|
78
|
+
yield key, str(value)
|
|
79
|
+
|
|
80
|
+
def get_json(self, url: str, params: dict[str, Any] | None = None) -> Any:
|
|
81
|
+
if params:
|
|
82
|
+
query = urllib.parse.urlencode(list(self._iter_param_pairs(params)), doseq=True)
|
|
83
|
+
sep = "&" if "?" in url else "?"
|
|
84
|
+
url = f"{url}{sep}{query}"
|
|
85
|
+
req = urllib.request.Request(url, headers=self.headers)
|
|
86
|
+
try:
|
|
87
|
+
with urllib.request.urlopen(req, timeout=self.timeout) as resp:
|
|
88
|
+
return json.load(resp)
|
|
89
|
+
except Exception as exc: # pragma: no cover - exercised through tests with stubs
|
|
90
|
+
raise ApiError(f"GET {url} failed: {exc}") from exc
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
class PolymarketClient:
|
|
94
|
+
def __init__(self, http: HttpClient | None = None) -> None:
|
|
95
|
+
self.http = http or HttpClient()
|
|
96
|
+
self.gamma_base = "https://gamma-api.polymarket.com"
|
|
97
|
+
self.clob_base = "https://clob.polymarket.com"
|
|
98
|
+
self.data_base = "https://data-api.polymarket.com"
|
|
99
|
+
|
|
100
|
+
def list_markets(
|
|
101
|
+
self,
|
|
102
|
+
*,
|
|
103
|
+
limit: int = 10,
|
|
104
|
+
offset: int = 0,
|
|
105
|
+
active: bool | None = True,
|
|
106
|
+
closed: bool | None = False,
|
|
107
|
+
archived: bool | None = False,
|
|
108
|
+
search: str | None = None,
|
|
109
|
+
slug: str | None = None,
|
|
110
|
+
order: str = "volume24hr",
|
|
111
|
+
ascending: bool = False,
|
|
112
|
+
tag_ids: list[str] | None = None,
|
|
113
|
+
exclude_tag_ids: list[str] | None = None,
|
|
114
|
+
related_tags: bool = False,
|
|
115
|
+
start_after: str | None = None,
|
|
116
|
+
start_before: str | None = None,
|
|
117
|
+
end_after: str | None = None,
|
|
118
|
+
end_before: str | None = None,
|
|
119
|
+
min_liquidity: float | None = None,
|
|
120
|
+
max_liquidity: float | None = None,
|
|
121
|
+
min_volume24hr: float | None = None,
|
|
122
|
+
max_volume24hr: float | None = None,
|
|
123
|
+
hydrate: bool = False,
|
|
124
|
+
) -> list[dict[str, Any]]:
|
|
125
|
+
gamma_order = self._normalize_market_order(order)
|
|
126
|
+
params: dict[str, Any] = {
|
|
127
|
+
"limit": limit,
|
|
128
|
+
"offset": offset,
|
|
129
|
+
"active": active,
|
|
130
|
+
"closed": closed,
|
|
131
|
+
"archived": archived,
|
|
132
|
+
"order": gamma_order,
|
|
133
|
+
"ascending": ascending,
|
|
134
|
+
}
|
|
135
|
+
if search:
|
|
136
|
+
params["search"] = search
|
|
137
|
+
if slug:
|
|
138
|
+
params["slug"] = slug
|
|
139
|
+
if tag_ids:
|
|
140
|
+
params["tag_id"] = tag_ids
|
|
141
|
+
if exclude_tag_ids:
|
|
142
|
+
params["exclude_tag_id"] = exclude_tag_ids
|
|
143
|
+
if related_tags:
|
|
144
|
+
params["related_tags"] = True
|
|
145
|
+
|
|
146
|
+
data = self.http.get_json(f"{self.gamma_base}/markets", params)
|
|
147
|
+
markets = list(data)
|
|
148
|
+
if hydrate:
|
|
149
|
+
markets = self._hydrate_markets(markets, order=gamma_order, include_tokens=True)
|
|
150
|
+
filtered = self._filter_markets(
|
|
151
|
+
markets,
|
|
152
|
+
active=None,
|
|
153
|
+
closed=None,
|
|
154
|
+
archived=None,
|
|
155
|
+
tag_ids=None,
|
|
156
|
+
exclude_tag_ids=None,
|
|
157
|
+
order=gamma_order,
|
|
158
|
+
ascending=ascending,
|
|
159
|
+
start_after=start_after,
|
|
160
|
+
start_before=start_before,
|
|
161
|
+
end_after=end_after,
|
|
162
|
+
end_before=end_before,
|
|
163
|
+
min_liquidity=min_liquidity,
|
|
164
|
+
max_liquidity=max_liquidity,
|
|
165
|
+
min_volume24hr=min_volume24hr,
|
|
166
|
+
max_volume24hr=max_volume24hr,
|
|
167
|
+
)
|
|
168
|
+
return self._annotate_rankings(filtered, order=gamma_order)
|
|
169
|
+
|
|
170
|
+
def get_market(self, *, slug: str | None = None, market_id: str | None = None) -> dict[str, Any]:
|
|
171
|
+
if bool(slug) == bool(market_id):
|
|
172
|
+
raise ValueError("Provide exactly one of slug or market_id")
|
|
173
|
+
if slug:
|
|
174
|
+
return self.get_market_by_slug(slug)
|
|
175
|
+
return dict(self.http.get_json(f"{self.gamma_base}/markets/{market_id}"))
|
|
176
|
+
|
|
177
|
+
def find_market(self, query: str, *, limit: int = 10) -> list[dict[str, Any]]:
|
|
178
|
+
return self.list_markets(limit=limit, active=None, closed=None, archived=None, search=query)
|
|
179
|
+
|
|
180
|
+
def search_markets(
|
|
181
|
+
self,
|
|
182
|
+
query: str,
|
|
183
|
+
*,
|
|
184
|
+
limit: int = 10,
|
|
185
|
+
offset: int = 0,
|
|
186
|
+
active: bool | None = True,
|
|
187
|
+
closed: bool | None = False,
|
|
188
|
+
archived: bool | None = False,
|
|
189
|
+
order: str = "volume24hr",
|
|
190
|
+
ascending: bool = False,
|
|
191
|
+
tag_ids: list[str] | None = None,
|
|
192
|
+
exclude_tag_ids: list[str] | None = None,
|
|
193
|
+
related_tags: bool = False,
|
|
194
|
+
start_after: str | None = None,
|
|
195
|
+
start_before: str | None = None,
|
|
196
|
+
end_after: str | None = None,
|
|
197
|
+
end_before: str | None = None,
|
|
198
|
+
min_liquidity: float | None = None,
|
|
199
|
+
max_liquidity: float | None = None,
|
|
200
|
+
min_volume24hr: float | None = None,
|
|
201
|
+
max_volume24hr: float | None = None,
|
|
202
|
+
hydrate: bool = False,
|
|
203
|
+
) -> list[dict[str, Any]]:
|
|
204
|
+
if not query:
|
|
205
|
+
return []
|
|
206
|
+
|
|
207
|
+
gamma_order = self._normalize_market_order(order)
|
|
208
|
+
requires_ranked_search = any(
|
|
209
|
+
value
|
|
210
|
+
for value in (
|
|
211
|
+
offset,
|
|
212
|
+
tag_ids,
|
|
213
|
+
exclude_tag_ids,
|
|
214
|
+
related_tags,
|
|
215
|
+
start_after,
|
|
216
|
+
start_before,
|
|
217
|
+
end_after,
|
|
218
|
+
end_before,
|
|
219
|
+
min_liquidity,
|
|
220
|
+
max_liquidity,
|
|
221
|
+
min_volume24hr,
|
|
222
|
+
max_volume24hr,
|
|
223
|
+
ascending,
|
|
224
|
+
archived is not False,
|
|
225
|
+
active is not True,
|
|
226
|
+
closed is not False,
|
|
227
|
+
gamma_order != "volume_24hr",
|
|
228
|
+
hydrate,
|
|
229
|
+
)
|
|
230
|
+
)
|
|
231
|
+
if requires_ranked_search:
|
|
232
|
+
try:
|
|
233
|
+
return self.list_markets(
|
|
234
|
+
limit=limit,
|
|
235
|
+
offset=offset,
|
|
236
|
+
active=active,
|
|
237
|
+
closed=closed,
|
|
238
|
+
archived=archived,
|
|
239
|
+
search=query,
|
|
240
|
+
order=order,
|
|
241
|
+
ascending=ascending,
|
|
242
|
+
tag_ids=tag_ids,
|
|
243
|
+
exclude_tag_ids=exclude_tag_ids,
|
|
244
|
+
related_tags=related_tags,
|
|
245
|
+
start_after=start_after,
|
|
246
|
+
start_before=start_before,
|
|
247
|
+
end_after=end_after,
|
|
248
|
+
end_before=end_before,
|
|
249
|
+
min_liquidity=min_liquidity,
|
|
250
|
+
max_liquidity=max_liquidity,
|
|
251
|
+
min_volume24hr=min_volume24hr,
|
|
252
|
+
max_volume24hr=max_volume24hr,
|
|
253
|
+
hydrate=hydrate,
|
|
254
|
+
)
|
|
255
|
+
except ApiError:
|
|
256
|
+
pass
|
|
257
|
+
|
|
258
|
+
candidate_limit = self._search_candidate_limit(limit=limit, offset=offset, hydrate=hydrate or requires_ranked_search)
|
|
259
|
+
markets = self._search_public_markets(query, limit=candidate_limit)
|
|
260
|
+
if not markets:
|
|
261
|
+
return []
|
|
262
|
+
|
|
263
|
+
if hydrate or requires_ranked_search:
|
|
264
|
+
markets = self._hydrate_markets(markets, order=gamma_order, include_tokens=True)
|
|
265
|
+
|
|
266
|
+
filtered = self._filter_markets(
|
|
267
|
+
markets,
|
|
268
|
+
active=active,
|
|
269
|
+
closed=closed,
|
|
270
|
+
archived=archived,
|
|
271
|
+
tag_ids=tag_ids,
|
|
272
|
+
exclude_tag_ids=exclude_tag_ids,
|
|
273
|
+
order=gamma_order,
|
|
274
|
+
ascending=ascending,
|
|
275
|
+
start_after=start_after,
|
|
276
|
+
start_before=start_before,
|
|
277
|
+
end_after=end_after,
|
|
278
|
+
end_before=end_before,
|
|
279
|
+
min_liquidity=min_liquidity,
|
|
280
|
+
max_liquidity=max_liquidity,
|
|
281
|
+
min_volume24hr=min_volume24hr,
|
|
282
|
+
max_volume24hr=max_volume24hr,
|
|
283
|
+
)
|
|
284
|
+
return self._annotate_rankings(filtered, order=gamma_order)[offset : offset + limit]
|
|
285
|
+
|
|
286
|
+
def get_market_by_slug(self, slug: str) -> dict[str, Any]:
|
|
287
|
+
quoted_slug = urllib.parse.quote(slug)
|
|
288
|
+
try:
|
|
289
|
+
return dict(self.http.get_json(f"{self.gamma_base}/markets/slug/{quoted_slug}"))
|
|
290
|
+
except ApiError:
|
|
291
|
+
pass
|
|
292
|
+
|
|
293
|
+
exact = self.list_markets(
|
|
294
|
+
limit=10,
|
|
295
|
+
active=None,
|
|
296
|
+
closed=None,
|
|
297
|
+
archived=None,
|
|
298
|
+
slug=slug,
|
|
299
|
+
)
|
|
300
|
+
match = self._select_market_match(exact, slug)
|
|
301
|
+
if match:
|
|
302
|
+
return match
|
|
303
|
+
|
|
304
|
+
fallback = self.search_markets(slug, limit=10)
|
|
305
|
+
match = self._select_market_match(fallback, slug)
|
|
306
|
+
if match:
|
|
307
|
+
return match
|
|
308
|
+
raise ApiError(f"Market not found for slug: {slug}")
|
|
309
|
+
|
|
310
|
+
def _search_public_markets(self, query: str, *, limit: int) -> list[dict[str, Any]]:
|
|
311
|
+
public_search = None
|
|
312
|
+
try:
|
|
313
|
+
public_search = self.http.get_json(
|
|
314
|
+
f"{self.gamma_base}/public-search",
|
|
315
|
+
{
|
|
316
|
+
"q": query,
|
|
317
|
+
"limit_per_type": limit,
|
|
318
|
+
"search_tags": False,
|
|
319
|
+
"search_profiles": False,
|
|
320
|
+
"optimized": True,
|
|
321
|
+
},
|
|
322
|
+
)
|
|
323
|
+
except ApiError:
|
|
324
|
+
pass
|
|
325
|
+
|
|
326
|
+
markets = self._extract_public_search_markets(public_search)
|
|
327
|
+
if markets:
|
|
328
|
+
return markets
|
|
329
|
+
|
|
330
|
+
try:
|
|
331
|
+
return self.list_markets(
|
|
332
|
+
limit=limit,
|
|
333
|
+
offset=0,
|
|
334
|
+
active=None,
|
|
335
|
+
closed=None,
|
|
336
|
+
archived=None,
|
|
337
|
+
search=query,
|
|
338
|
+
order="volume24hr",
|
|
339
|
+
)
|
|
340
|
+
except ApiError:
|
|
341
|
+
return []
|
|
342
|
+
|
|
343
|
+
def _extract_public_search_markets(self, payload: Any) -> list[dict[str, Any]]:
|
|
344
|
+
events = payload.get("events") if isinstance(payload, dict) else []
|
|
345
|
+
markets: list[dict[str, Any]] = []
|
|
346
|
+
for event in events or []:
|
|
347
|
+
for market in event.get("markets") or []:
|
|
348
|
+
if isinstance(market, dict):
|
|
349
|
+
markets.append(market)
|
|
350
|
+
return markets
|
|
351
|
+
|
|
352
|
+
def _search_candidate_limit(self, *, limit: int, offset: int, hydrate: bool) -> int:
|
|
353
|
+
minimum = offset + limit
|
|
354
|
+
if not hydrate:
|
|
355
|
+
return max(minimum, 1)
|
|
356
|
+
return max(minimum * 5, 25)
|
|
357
|
+
|
|
358
|
+
def _hydrate_markets(self, markets: list[dict[str, Any]], *, order: str, include_tokens: bool) -> list[dict[str, Any]]:
|
|
359
|
+
hydrated = []
|
|
360
|
+
for market in markets:
|
|
361
|
+
candidate = dict(market)
|
|
362
|
+
if self._market_needs_hydration(candidate, order=order, include_tokens=include_tokens):
|
|
363
|
+
refreshed = self._hydrate_market(candidate)
|
|
364
|
+
if refreshed is not None:
|
|
365
|
+
candidate = self._merge_market_ranking_fields(candidate, refreshed)
|
|
366
|
+
hydrated.append(candidate)
|
|
367
|
+
return hydrated
|
|
368
|
+
|
|
369
|
+
def _merge_market_ranking_fields(self, candidate: dict[str, Any], refreshed: dict[str, Any]) -> dict[str, Any]:
|
|
370
|
+
merged = dict(refreshed)
|
|
371
|
+
for field_names in RANK_FIELD_ALIASES.values():
|
|
372
|
+
for field_name in field_names:
|
|
373
|
+
if merged.get(field_name) in (None, "") and candidate.get(field_name) not in (None, ""):
|
|
374
|
+
merged[field_name] = candidate[field_name]
|
|
375
|
+
return merged
|
|
376
|
+
|
|
377
|
+
def _hydrate_market(self, market: dict[str, Any]) -> dict[str, Any] | None:
|
|
378
|
+
slug = market.get("slug")
|
|
379
|
+
market_id = market.get("id")
|
|
380
|
+
try:
|
|
381
|
+
if slug:
|
|
382
|
+
return self.get_market_by_slug(str(slug))
|
|
383
|
+
if market_id:
|
|
384
|
+
return self.get_market(market_id=str(market_id))
|
|
385
|
+
except ApiError:
|
|
386
|
+
return None
|
|
387
|
+
return None
|
|
388
|
+
|
|
389
|
+
def _market_needs_hydration(self, market: dict[str, Any], *, order: str, include_tokens: bool) -> bool:
|
|
390
|
+
if not market.get("id") or not market.get("conditionId"):
|
|
391
|
+
return True
|
|
392
|
+
if include_tokens and not market.get("clobTokenIds"):
|
|
393
|
+
return True
|
|
394
|
+
if order == "liquidity" and coerce_float(market.get("liquidityNum") or market.get("liquidity")) is None:
|
|
395
|
+
return True
|
|
396
|
+
if order == "volume_24hr" and coerce_float(market.get("volume24hrClob") or market.get("volume24hr")) is None:
|
|
397
|
+
return True
|
|
398
|
+
if order == "volume" and coerce_float(market.get("volumeClob") or market.get("volume")) is None:
|
|
399
|
+
return True
|
|
400
|
+
if order == "start_date" and parse_datetime(market.get("startDate") or market.get("startDateIso")) is None:
|
|
401
|
+
return True
|
|
402
|
+
if order == "end_date" and parse_datetime(market.get("endDate") or market.get("endDateIso")) is None:
|
|
403
|
+
return True
|
|
404
|
+
return False
|
|
405
|
+
|
|
406
|
+
def _select_market_match(self, markets: list[dict[str, Any]], slug: str) -> dict[str, Any] | None:
|
|
407
|
+
if not markets:
|
|
408
|
+
return None
|
|
409
|
+
|
|
410
|
+
normalized = slug.strip().lower()
|
|
411
|
+
for market in markets:
|
|
412
|
+
if str(market.get("slug", "")).strip().lower() == normalized:
|
|
413
|
+
return dict(market)
|
|
414
|
+
|
|
415
|
+
for market in markets:
|
|
416
|
+
event_slug = ""
|
|
417
|
+
events = market.get("events")
|
|
418
|
+
if isinstance(events, list) and events:
|
|
419
|
+
first_event = events[0]
|
|
420
|
+
if isinstance(first_event, dict):
|
|
421
|
+
event_slug = str(first_event.get("slug", "")).strip().lower()
|
|
422
|
+
if event_slug == normalized:
|
|
423
|
+
return dict(market)
|
|
424
|
+
|
|
425
|
+
return dict(markets[0])
|
|
426
|
+
|
|
427
|
+
def get_book(self, token_id: str) -> dict[str, Any]:
|
|
428
|
+
return dict(self.http.get_json(f"{self.clob_base}/book", {"token_id": token_id}))
|
|
429
|
+
|
|
430
|
+
def get_midpoint(self, token_id: str) -> dict[str, Any]:
|
|
431
|
+
return dict(self.http.get_json(f"{self.clob_base}/midpoint", {"token_id": token_id}))
|
|
432
|
+
|
|
433
|
+
def get_last_trade_price(self, token_id: str) -> dict[str, Any]:
|
|
434
|
+
return dict(self.http.get_json(f"{self.clob_base}/last-trade-price", {"token_id": token_id}))
|
|
435
|
+
|
|
436
|
+
def get_price_history(
|
|
437
|
+
self,
|
|
438
|
+
token_id: str,
|
|
439
|
+
*,
|
|
440
|
+
interval: str = "1d",
|
|
441
|
+
fidelity: int = 60,
|
|
442
|
+
start_ts: int | None = None,
|
|
443
|
+
end_ts: int | None = None,
|
|
444
|
+
) -> dict[str, Any]:
|
|
445
|
+
if interval not in HISTORY_INTERVALS:
|
|
446
|
+
raise ValueError(f"Unsupported interval: {interval}")
|
|
447
|
+
params: dict[str, Any] = {"market": token_id, "interval": interval, "fidelity": fidelity}
|
|
448
|
+
if start_ts is not None:
|
|
449
|
+
params["startTs"] = start_ts
|
|
450
|
+
if end_ts is not None:
|
|
451
|
+
params["endTs"] = end_ts
|
|
452
|
+
return dict(self.http.get_json(f"{self.clob_base}/prices-history", params))
|
|
453
|
+
|
|
454
|
+
def get_trades(self, *, market: str | None = None, condition_id: str | None = None, limit: int = 20) -> list[dict[str, Any]]:
|
|
455
|
+
params = {"limit": limit}
|
|
456
|
+
market_filter = condition_id or market
|
|
457
|
+
if market_filter:
|
|
458
|
+
params["market"] = market_filter
|
|
459
|
+
data = self.http.get_json(f"{self.data_base}/trades", params)
|
|
460
|
+
return list(data)
|
|
461
|
+
|
|
462
|
+
def _normalize_market_order(self, order: str) -> str:
|
|
463
|
+
try:
|
|
464
|
+
return GAMMA_ORDER_MAP[order]
|
|
465
|
+
except KeyError as exc:
|
|
466
|
+
valid = ", ".join(sorted(GAMMA_ORDER_MAP))
|
|
467
|
+
raise ValueError(f"Unsupported market sort field: {order}. Choose from: {valid}") from exc
|
|
468
|
+
|
|
469
|
+
def _filter_markets(
|
|
470
|
+
self,
|
|
471
|
+
markets: list[dict[str, Any]],
|
|
472
|
+
*,
|
|
473
|
+
active: bool | None,
|
|
474
|
+
closed: bool | None,
|
|
475
|
+
archived: bool | None,
|
|
476
|
+
tag_ids: list[str] | None,
|
|
477
|
+
exclude_tag_ids: list[str] | None,
|
|
478
|
+
order: str,
|
|
479
|
+
ascending: bool,
|
|
480
|
+
start_after: str | None,
|
|
481
|
+
start_before: str | None,
|
|
482
|
+
end_after: str | None,
|
|
483
|
+
end_before: str | None,
|
|
484
|
+
min_liquidity: float | None,
|
|
485
|
+
max_liquidity: float | None,
|
|
486
|
+
min_volume24hr: float | None,
|
|
487
|
+
max_volume24hr: float | None,
|
|
488
|
+
) -> list[dict[str, Any]]:
|
|
489
|
+
start_after_dt = parse_datetime(start_after)
|
|
490
|
+
start_before_dt = parse_datetime(start_before)
|
|
491
|
+
end_after_dt = parse_datetime(end_after)
|
|
492
|
+
end_before_dt = parse_datetime(end_before)
|
|
493
|
+
filtered = []
|
|
494
|
+
for market in markets:
|
|
495
|
+
if active is not None and bool(market.get("active")) != active:
|
|
496
|
+
continue
|
|
497
|
+
if closed is not None and bool(market.get("closed")) != closed:
|
|
498
|
+
continue
|
|
499
|
+
if archived is not None and bool(market.get("archived")) != archived:
|
|
500
|
+
continue
|
|
501
|
+
market_tag_ids = self._market_tag_ids(market)
|
|
502
|
+
has_tag_metadata = self._market_has_tag_metadata(market)
|
|
503
|
+
if tag_ids and has_tag_metadata and not all(tag_id in market_tag_ids for tag_id in tag_ids):
|
|
504
|
+
continue
|
|
505
|
+
if exclude_tag_ids and has_tag_metadata and any(tag_id in market_tag_ids for tag_id in exclude_tag_ids):
|
|
506
|
+
continue
|
|
507
|
+
start_dt = parse_datetime(market.get("startDate") or market.get("startDateIso"))
|
|
508
|
+
end_dt = parse_datetime(market.get("endDate") or market.get("endDateIso"))
|
|
509
|
+
liquidity = coerce_float(market.get("liquidityNum") or market.get("liquidity"))
|
|
510
|
+
volume24hr = coerce_float(market.get("volume24hrClob") or market.get("volume24hr"))
|
|
511
|
+
if start_after_dt and (start_dt is None or start_dt < start_after_dt):
|
|
512
|
+
continue
|
|
513
|
+
if start_before_dt and (start_dt is None or start_dt > start_before_dt):
|
|
514
|
+
continue
|
|
515
|
+
if end_after_dt and (end_dt is None or end_dt < end_after_dt):
|
|
516
|
+
continue
|
|
517
|
+
if end_before_dt and (end_dt is None or end_dt > end_before_dt):
|
|
518
|
+
continue
|
|
519
|
+
if min_liquidity is not None and (liquidity is None or liquidity < min_liquidity):
|
|
520
|
+
continue
|
|
521
|
+
if max_liquidity is not None and (liquidity is None or liquidity > max_liquidity):
|
|
522
|
+
continue
|
|
523
|
+
if min_volume24hr is not None and (volume24hr is None or volume24hr < min_volume24hr):
|
|
524
|
+
continue
|
|
525
|
+
if max_volume24hr is not None and (volume24hr is None or volume24hr > max_volume24hr):
|
|
526
|
+
continue
|
|
527
|
+
filtered.append(market)
|
|
528
|
+
|
|
529
|
+
sort_key = self._market_sort_key(order, ascending)
|
|
530
|
+
return sorted(filtered, key=sort_key)
|
|
531
|
+
|
|
532
|
+
def _market_tag_ids(self, market: dict[str, Any]) -> set[str]:
|
|
533
|
+
values: set[str] = set()
|
|
534
|
+
self._collect_tag_ids(values, market.get("tags"))
|
|
535
|
+
events = market.get("events")
|
|
536
|
+
if isinstance(events, list):
|
|
537
|
+
for event in events:
|
|
538
|
+
if isinstance(event, dict):
|
|
539
|
+
self._collect_tag_ids(values, event.get("tags"))
|
|
540
|
+
return values
|
|
541
|
+
|
|
542
|
+
def _market_has_tag_metadata(self, market: dict[str, Any]) -> bool:
|
|
543
|
+
if market.get("tags") is not None:
|
|
544
|
+
return True
|
|
545
|
+
events = market.get("events")
|
|
546
|
+
if isinstance(events, list):
|
|
547
|
+
for event in events:
|
|
548
|
+
if isinstance(event, dict) and event.get("tags") is not None:
|
|
549
|
+
return True
|
|
550
|
+
return False
|
|
551
|
+
|
|
552
|
+
def _collect_tag_ids(self, values: set[str], payload: Any) -> None:
|
|
553
|
+
if isinstance(payload, dict):
|
|
554
|
+
for key in ("id", "tagId"):
|
|
555
|
+
value = payload.get(key)
|
|
556
|
+
if value not in (None, ""):
|
|
557
|
+
values.add(str(value))
|
|
558
|
+
return
|
|
559
|
+
if isinstance(payload, list):
|
|
560
|
+
for item in payload:
|
|
561
|
+
self._collect_tag_ids(values, item)
|
|
562
|
+
return
|
|
563
|
+
if payload not in (None, ""):
|
|
564
|
+
values.add(str(payload))
|
|
565
|
+
|
|
566
|
+
def _market_sort_key(self, order: str, ascending: bool):
|
|
567
|
+
def key(market: dict[str, Any]) -> tuple[int, float, str, str, str]:
|
|
568
|
+
value = self._market_order_value(market, order)
|
|
569
|
+
if isinstance(value, datetime):
|
|
570
|
+
sort_value = value.timestamp()
|
|
571
|
+
elif value is None:
|
|
572
|
+
sort_value = 0.0
|
|
573
|
+
else:
|
|
574
|
+
sort_value = float(value)
|
|
575
|
+
if value is not None and not ascending:
|
|
576
|
+
sort_value *= -1
|
|
577
|
+
return (
|
|
578
|
+
0 if value is not None else 1,
|
|
579
|
+
sort_value,
|
|
580
|
+
str(market.get("slug") or ""),
|
|
581
|
+
str(market.get("id") or ""),
|
|
582
|
+
str(market.get("question") or ""),
|
|
583
|
+
)
|
|
584
|
+
|
|
585
|
+
return key
|
|
586
|
+
|
|
587
|
+
def _market_order_value(self, market: dict[str, Any], order: str) -> Any:
|
|
588
|
+
if order == "liquidity":
|
|
589
|
+
return coerce_float(market.get("liquidityNum") or market.get("liquidity"))
|
|
590
|
+
if order in {"volume_24hr", "volume"}:
|
|
591
|
+
field = "volume24hrClob" if order == "volume_24hr" else "volumeClob"
|
|
592
|
+
fallback = "volume24hr" if order == "volume_24hr" else "volume"
|
|
593
|
+
return coerce_float(market.get(field) or market.get(fallback))
|
|
594
|
+
if order == "start_date":
|
|
595
|
+
return parse_datetime(market.get("startDate") or market.get("startDateIso"))
|
|
596
|
+
if order == "end_date":
|
|
597
|
+
return parse_datetime(market.get("endDate") or market.get("endDateIso"))
|
|
598
|
+
if order == "competitive":
|
|
599
|
+
return coerce_float(market.get("competitive"))
|
|
600
|
+
if order == "closed_time":
|
|
601
|
+
return parse_datetime(market.get("closedTime"))
|
|
602
|
+
return None
|
|
603
|
+
|
|
604
|
+
def _annotate_rankings(self, markets: list[dict[str, Any]], *, order: str) -> list[dict[str, Any]]:
|
|
605
|
+
unresolved = 0
|
|
606
|
+
annotated = []
|
|
607
|
+
rank_field = DISPLAY_ORDER_MAP.get(order, order)
|
|
608
|
+
for market in markets:
|
|
609
|
+
annotated_market = dict(market)
|
|
610
|
+
rank_value = self._market_order_value(annotated_market, order)
|
|
611
|
+
ranking_resolved = rank_value is not None
|
|
612
|
+
if not ranking_resolved:
|
|
613
|
+
unresolved += 1
|
|
614
|
+
annotated_market["_ranking"] = {
|
|
615
|
+
"rankField": rank_field,
|
|
616
|
+
"rankValue": self._serialize_rank_value(rank_value),
|
|
617
|
+
"rankingResolved": ranking_resolved,
|
|
618
|
+
"rankingSource": self._ranking_source(annotated_market, order, ranking_resolved),
|
|
619
|
+
"rankingFallbackUsed": self._ranking_fallback_used(annotated_market, order),
|
|
620
|
+
}
|
|
621
|
+
annotated.append(annotated_market)
|
|
622
|
+
|
|
623
|
+
degraded = unresolved > 0
|
|
624
|
+
reason = None
|
|
625
|
+
if degraded:
|
|
626
|
+
reason = f"{unresolved} matching candidate(s) had null {rank_field} after fallback resolution"
|
|
627
|
+
for market in annotated:
|
|
628
|
+
market["_rankingContext"] = {
|
|
629
|
+
"rankingDegraded": degraded,
|
|
630
|
+
"rankingIncompleteCount": unresolved,
|
|
631
|
+
"rankingDegradedReason": reason,
|
|
632
|
+
}
|
|
633
|
+
return annotated
|
|
634
|
+
|
|
635
|
+
def _ranking_source(self, market: dict[str, Any], order: str, ranking_resolved: bool) -> str:
|
|
636
|
+
if not ranking_resolved:
|
|
637
|
+
return "unresolved"
|
|
638
|
+
return "fallback" if self._ranking_fallback_used(market, order) else "market"
|
|
639
|
+
|
|
640
|
+
def _ranking_fallback_used(self, market: dict[str, Any], order: str) -> bool:
|
|
641
|
+
field_names = RANK_FIELD_ALIASES.get(order, ())
|
|
642
|
+
if not field_names:
|
|
643
|
+
return False
|
|
644
|
+
primary = field_names[0]
|
|
645
|
+
if market.get(primary) not in (None, ""):
|
|
646
|
+
return False
|
|
647
|
+
return any(market.get(field_name) not in (None, "") for field_name in field_names[1:])
|
|
648
|
+
|
|
649
|
+
def _serialize_rank_value(self, value: Any) -> Any:
|
|
650
|
+
if isinstance(value, datetime):
|
|
651
|
+
return value.isoformat().replace("+00:00", "Z")
|
|
652
|
+
return value
|