opentradex 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env.example +8 -0
- package/CLAUDE.md +98 -0
- package/README.md +246 -0
- package/SOUL.md +79 -0
- package/SPEC.md +317 -0
- package/SUBMISSION.md +30 -0
- package/architecture.excalidraw +170 -0
- package/architecture.png +0 -0
- package/bin/opentradex.mjs +4 -0
- package/data/.gitkeep +0 -0
- package/data/strategy_notes.md +158 -0
- package/gossip/__init__.py +0 -0
- package/gossip/dashboard.py +150 -0
- package/gossip/db.py +358 -0
- package/gossip/kalshi.py +492 -0
- package/gossip/news.py +235 -0
- package/gossip/trader.py +646 -0
- package/main.py +287 -0
- package/package.json +47 -0
- package/requirements.txt +7 -0
- package/src/cli.mjs +124 -0
- package/src/index.mjs +420 -0
- package/web/AGENTS.md +5 -0
- package/web/CLAUDE.md +1 -0
- package/web/README.md +36 -0
- package/web/components.json +25 -0
- package/web/eslint.config.mjs +18 -0
- package/web/next.config.ts +7 -0
- package/web/package-lock.json +11626 -0
- package/web/package.json +37 -0
- package/web/postcss.config.mjs +7 -0
- package/web/public/file.svg +1 -0
- package/web/public/globe.svg +1 -0
- package/web/public/next.svg +1 -0
- package/web/public/vercel.svg +1 -0
- package/web/public/window.svg +1 -0
- package/web/src/app/api/agent/route.ts +77 -0
- package/web/src/app/api/agent/stream/route.ts +87 -0
- package/web/src/app/api/markets/route.ts +15 -0
- package/web/src/app/api/news/live/route.ts +77 -0
- package/web/src/app/api/news/reddit/route.ts +118 -0
- package/web/src/app/api/news/route.ts +10 -0
- package/web/src/app/api/news/tiktok/route.ts +115 -0
- package/web/src/app/api/news/truthsocial/route.ts +116 -0
- package/web/src/app/api/news/twitter/route.ts +186 -0
- package/web/src/app/api/portfolio/route.ts +50 -0
- package/web/src/app/api/prices/route.ts +18 -0
- package/web/src/app/api/trades/route.ts +10 -0
- package/web/src/app/favicon.ico +0 -0
- package/web/src/app/globals.css +170 -0
- package/web/src/app/layout.tsx +36 -0
- package/web/src/app/page.tsx +366 -0
- package/web/src/components/AgentLog.tsx +71 -0
- package/web/src/components/LiveStream.tsx +394 -0
- package/web/src/components/MarketScanner.tsx +111 -0
- package/web/src/components/NewsFeed.tsx +561 -0
- package/web/src/components/PortfolioStrip.tsx +139 -0
- package/web/src/components/PositionsPanel.tsx +219 -0
- package/web/src/components/TopBar.tsx +127 -0
- package/web/src/components/ui/badge.tsx +52 -0
- package/web/src/components/ui/button.tsx +60 -0
- package/web/src/components/ui/card.tsx +103 -0
- package/web/src/components/ui/scroll-area.tsx +55 -0
- package/web/src/components/ui/separator.tsx +25 -0
- package/web/src/components/ui/tabs.tsx +82 -0
- package/web/src/components/ui/tooltip.tsx +66 -0
- package/web/src/lib/db.ts +81 -0
- package/web/src/lib/types.ts +130 -0
- package/web/src/lib/utils.ts +6 -0
- package/web/tsconfig.json +34 -0
package/gossip/kalshi.py
ADDED
|
@@ -0,0 +1,492 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Kalshi API client — market scanning, orderbook, search, and authenticated trading.
|
|
3
|
+
|
|
4
|
+
CLI tool invoked by Claude Code agent:
|
|
5
|
+
python3 gossip/kalshi.py scan [--categories "Economics,Politics"] [--days 14]
|
|
6
|
+
python3 gossip/kalshi.py market TICKER
|
|
7
|
+
python3 gossip/kalshi.py orderbook TICKER
|
|
8
|
+
python3 gossip/kalshi.py search "bitcoin"
|
|
9
|
+
python3 gossip/kalshi.py events TICKER
|
|
10
|
+
python3 gossip/kalshi.py order TICKER --action buy --side yes --count 3 --price 55
|
|
11
|
+
python3 gossip/kalshi.py positions
|
|
12
|
+
python3 gossip/kalshi.py balance
|
|
13
|
+
|
|
14
|
+
All output is JSON to stdout. Logs go to stderr.
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
from __future__ import annotations
|
|
18
|
+
|
|
19
|
+
import argparse
|
|
20
|
+
import asyncio
|
|
21
|
+
import json
|
|
22
|
+
import os
|
|
23
|
+
import sys
|
|
24
|
+
import time
|
|
25
|
+
import math
|
|
26
|
+
from dataclasses import dataclass, asdict
|
|
27
|
+
from datetime import datetime, timezone, timedelta
|
|
28
|
+
from pathlib import Path
|
|
29
|
+
from cryptography.hazmat.primitives import hashes, serialization
|
|
30
|
+
from cryptography.hazmat.primitives.asymmetric import padding
|
|
31
|
+
|
|
32
|
+
import aiohttp
|
|
33
|
+
from dotenv import load_dotenv
|
|
34
|
+
|
|
35
|
+
load_dotenv(Path(__file__).resolve().parent.parent / ".env")
|
|
36
|
+
|
|
37
|
+
PROD_BASE = "https://api.elections.kalshi.com/trade-api/v2"
|
|
38
|
+
DEMO_BASE = "https://demo-api.kalshi.co/trade-api/v2"
|
|
39
|
+
|
|
40
|
+
def get_base_url() -> str:
|
|
41
|
+
"""Always prod. Demo API has stale/fake data and is useless."""
|
|
42
|
+
return PROD_BASE
|
|
43
|
+
|
|
44
|
+
def log(msg: str) -> None:
|
|
45
|
+
print(msg, file=sys.stderr)
|
|
46
|
+
|
|
47
|
+
# --- Auth ---
|
|
48
|
+
|
|
49
|
+
def load_private_key():
|
|
50
|
+
key_path = os.getenv("KALSHI_PRIVATE_KEY_PATH", "")
|
|
51
|
+
key_raw = os.getenv("KALSHI_PRIVATE_KEY", "")
|
|
52
|
+
if key_path:
|
|
53
|
+
key_raw = Path(key_path).read_text()
|
|
54
|
+
if not key_raw:
|
|
55
|
+
return None
|
|
56
|
+
return serialization.load_pem_private_key(key_raw.encode(), password=None)
|
|
57
|
+
|
|
58
|
+
def build_auth_headers(method: str, path: str) -> dict:
|
|
59
|
+
api_key = os.getenv("KALSHI_API_KEY_ID", "")
|
|
60
|
+
pk = load_private_key()
|
|
61
|
+
if not api_key or not pk:
|
|
62
|
+
return {}
|
|
63
|
+
|
|
64
|
+
timestamp = str(int(time.time() * 1000))
|
|
65
|
+
message = f"{timestamp}{method.upper()}{path}"
|
|
66
|
+
signature = pk.sign(
|
|
67
|
+
message.encode(),
|
|
68
|
+
padding.PSS(
|
|
69
|
+
mgf=padding.MGF1(hashes.SHA256()),
|
|
70
|
+
salt_length=32,
|
|
71
|
+
),
|
|
72
|
+
hashes.SHA256(),
|
|
73
|
+
)
|
|
74
|
+
import base64
|
|
75
|
+
return {
|
|
76
|
+
"KALSHI-ACCESS-KEY": api_key,
|
|
77
|
+
"KALSHI-ACCESS-SIGNATURE": base64.b64encode(signature).decode(),
|
|
78
|
+
"KALSHI-ACCESS-TIMESTAMP": timestamp,
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
# --- API helpers ---
|
|
82
|
+
|
|
83
|
+
SKIP_SERIES = {"KXMVESPORTS", "KXMVE"} # only skip pure noise; agent decides what's interesting
|
|
84
|
+
|
|
85
|
+
MAX_RETRIES = 3
|
|
86
|
+
BASE_DELAY = 1.0
|
|
87
|
+
|
|
88
|
+
async def api_get(session: aiohttp.ClientSession, path: str, params: dict | None = None, auth: bool = False) -> dict:
|
|
89
|
+
base = get_base_url()
|
|
90
|
+
url = f"{base}{path}"
|
|
91
|
+
headers = {"Accept-Encoding": "gzip", "Content-Type": "application/json"}
|
|
92
|
+
if auth:
|
|
93
|
+
headers.update(build_auth_headers("GET", f"/trade-api/v2{path}"))
|
|
94
|
+
|
|
95
|
+
for attempt in range(MAX_RETRIES + 1):
|
|
96
|
+
try:
|
|
97
|
+
async with session.get(url, params=params, headers=headers) as resp:
|
|
98
|
+
text = await resp.text()
|
|
99
|
+
if resp.status == 429 and attempt < MAX_RETRIES:
|
|
100
|
+
delay = BASE_DELAY * (2 ** attempt)
|
|
101
|
+
log(f"Rate limited, retrying in {delay:.0f}s...")
|
|
102
|
+
await asyncio.sleep(delay)
|
|
103
|
+
continue
|
|
104
|
+
if resp.status >= 400:
|
|
105
|
+
return {"error": f"HTTP {resp.status}", "body": text[:200]}
|
|
106
|
+
return json.loads(text) if text else {}
|
|
107
|
+
except (aiohttp.ClientError, asyncio.TimeoutError) as e:
|
|
108
|
+
if attempt < MAX_RETRIES:
|
|
109
|
+
await asyncio.sleep(BASE_DELAY * (2 ** attempt))
|
|
110
|
+
continue
|
|
111
|
+
return {"error": str(e)}
|
|
112
|
+
return {"error": "max retries exceeded"}
|
|
113
|
+
|
|
114
|
+
async def api_post(session: aiohttp.ClientSession, path: str, body: dict) -> dict:
|
|
115
|
+
url = f"{get_base_url()}{path}"
|
|
116
|
+
headers = {"Content-Type": "application/json"}
|
|
117
|
+
headers.update(build_auth_headers("POST", f"/trade-api/v2{path}"))
|
|
118
|
+
|
|
119
|
+
for attempt in range(MAX_RETRIES + 1):
|
|
120
|
+
try:
|
|
121
|
+
async with session.post(url, json=body, headers=headers) as resp:
|
|
122
|
+
text = await resp.text()
|
|
123
|
+
if resp.status == 429 and attempt < MAX_RETRIES:
|
|
124
|
+
await asyncio.sleep(BASE_DELAY * (2 ** attempt))
|
|
125
|
+
continue
|
|
126
|
+
if resp.status >= 400:
|
|
127
|
+
return {"error": f"HTTP {resp.status}", "body": text[:500]}
|
|
128
|
+
return json.loads(text) if text else {}
|
|
129
|
+
except (aiohttp.ClientError, asyncio.TimeoutError) as e:
|
|
130
|
+
if attempt < MAX_RETRIES:
|
|
131
|
+
await asyncio.sleep(BASE_DELAY * (2 ** attempt))
|
|
132
|
+
continue
|
|
133
|
+
return {"error": str(e)}
|
|
134
|
+
return {"error": "max retries exceeded"}
|
|
135
|
+
|
|
136
|
+
async def api_delete(session: aiohttp.ClientSession, path: str, body: dict | None = None) -> dict:
|
|
137
|
+
url = f"{get_base_url()}{path}"
|
|
138
|
+
headers = {"Content-Type": "application/json"}
|
|
139
|
+
headers.update(build_auth_headers("DELETE", f"/trade-api/v2{path}"))
|
|
140
|
+
|
|
141
|
+
async with session.delete(url, json=body, headers=headers) as resp:
|
|
142
|
+
text = await resp.text()
|
|
143
|
+
if resp.status >= 400:
|
|
144
|
+
return {"error": f"HTTP {resp.status}", "body": text[:500]}
|
|
145
|
+
return json.loads(text) if text else {}
|
|
146
|
+
|
|
147
|
+
# --- Market scanning ---
|
|
148
|
+
|
|
149
|
+
@dataclass
|
|
150
|
+
class Market:
|
|
151
|
+
ticker: str
|
|
152
|
+
event_ticker: str
|
|
153
|
+
series_ticker: str
|
|
154
|
+
title: str
|
|
155
|
+
category: str
|
|
156
|
+
rules: str
|
|
157
|
+
close_time: str
|
|
158
|
+
days_to_close: float
|
|
159
|
+
yes_bid: float
|
|
160
|
+
yes_ask: float
|
|
161
|
+
mid: float
|
|
162
|
+
spread_cents: float
|
|
163
|
+
volume: float
|
|
164
|
+
open_interest: float
|
|
165
|
+
implied_prob: float
|
|
166
|
+
|
|
167
|
+
def parse_market(m: dict, category: str = "") -> Market | None:
|
|
168
|
+
ticker = m.get("ticker", "")
|
|
169
|
+
for skip in SKIP_SERIES:
|
|
170
|
+
if ticker.startswith(skip):
|
|
171
|
+
return None
|
|
172
|
+
|
|
173
|
+
close_str = m.get("close_time", "")
|
|
174
|
+
if not close_str:
|
|
175
|
+
return None
|
|
176
|
+
try:
|
|
177
|
+
close_dt = datetime.fromisoformat(close_str.replace("Z", "+00:00"))
|
|
178
|
+
except ValueError:
|
|
179
|
+
return None
|
|
180
|
+
|
|
181
|
+
now = datetime.now(timezone.utc)
|
|
182
|
+
days = (close_dt - now).total_seconds() / 86400
|
|
183
|
+
if days < 0:
|
|
184
|
+
return None
|
|
185
|
+
|
|
186
|
+
bid = float(m.get("yes_bid_dollars", "0") or "0")
|
|
187
|
+
ask = float(m.get("yes_ask_dollars", "0") or "0")
|
|
188
|
+
vol = float(m.get("volume_fp", "0") or "0")
|
|
189
|
+
oi = float(m.get("open_interest_fp", "0") or "0")
|
|
190
|
+
|
|
191
|
+
if bid == 0 and ask == 0:
|
|
192
|
+
return None
|
|
193
|
+
|
|
194
|
+
mid = (bid + ask) / 2
|
|
195
|
+
spread = (ask - bid) * 100
|
|
196
|
+
|
|
197
|
+
cat = category or m.get("category", "")
|
|
198
|
+
|
|
199
|
+
return Market(
|
|
200
|
+
ticker=ticker,
|
|
201
|
+
event_ticker=m.get("event_ticker", ""),
|
|
202
|
+
series_ticker=m.get("series_ticker", ""),
|
|
203
|
+
title=m.get("title", ""),
|
|
204
|
+
category=cat,
|
|
205
|
+
rules=m.get("rules_primary", ""),
|
|
206
|
+
close_time=close_str[:16],
|
|
207
|
+
days_to_close=round(days, 2),
|
|
208
|
+
yes_bid=bid,
|
|
209
|
+
yes_ask=ask,
|
|
210
|
+
mid=round(mid, 4),
|
|
211
|
+
spread_cents=round(spread, 1),
|
|
212
|
+
volume=vol,
|
|
213
|
+
open_interest=oi,
|
|
214
|
+
implied_prob=round(mid, 4),
|
|
215
|
+
)
|
|
216
|
+
|
|
217
|
+
async def scan_markets(
|
|
218
|
+
categories: set[str] | None = None,
|
|
219
|
+
max_days: int = 30,
|
|
220
|
+
min_oi: float = 50,
|
|
221
|
+
max_series: int = 100,
|
|
222
|
+
) -> list[Market]:
|
|
223
|
+
async with aiohttp.ClientSession() as session:
|
|
224
|
+
data = await api_get(session, "/series", {"limit": 500})
|
|
225
|
+
all_series = data.get("series", [])
|
|
226
|
+
|
|
227
|
+
interesting = []
|
|
228
|
+
for s in all_series:
|
|
229
|
+
ticker = s.get("ticker", "")
|
|
230
|
+
if any(ticker.startswith(skip) for skip in SKIP_SERIES):
|
|
231
|
+
continue
|
|
232
|
+
if categories:
|
|
233
|
+
cat = s.get("category", "")
|
|
234
|
+
if cat and cat not in categories:
|
|
235
|
+
continue
|
|
236
|
+
interesting.append(s)
|
|
237
|
+
|
|
238
|
+
interesting = interesting[:max_series]
|
|
239
|
+
log(f"Scanning {len(interesting)} series...")
|
|
240
|
+
|
|
241
|
+
markets: list[Market] = []
|
|
242
|
+
for i, s in enumerate(interesting):
|
|
243
|
+
mdata = await api_get(session, "/markets", {
|
|
244
|
+
"series_ticker": s["ticker"],
|
|
245
|
+
"status": "open",
|
|
246
|
+
"limit": 200,
|
|
247
|
+
})
|
|
248
|
+
for m in mdata.get("markets", []):
|
|
249
|
+
parsed = parse_market(m, s.get("category", ""))
|
|
250
|
+
if parsed and parsed.days_to_close <= max_days and parsed.open_interest >= min_oi:
|
|
251
|
+
markets.append(parsed)
|
|
252
|
+
|
|
253
|
+
if (i + 1) % 50 == 0:
|
|
254
|
+
log(f" checked {i+1}/{len(interesting)}...")
|
|
255
|
+
await asyncio.sleep(0.3)
|
|
256
|
+
|
|
257
|
+
markets.sort(key=lambda m: m.volume, reverse=True)
|
|
258
|
+
return markets
|
|
259
|
+
|
|
260
|
+
async def get_market_detail(ticker: str) -> dict:
|
|
261
|
+
async with aiohttp.ClientSession() as session:
|
|
262
|
+
data = await api_get(session, f"/markets/{ticker}")
|
|
263
|
+
market = data.get("market", data)
|
|
264
|
+
ob = await api_get(session, f"/markets/{ticker}/orderbook")
|
|
265
|
+
orderbook = ob.get("orderbook_fp", ob.get("orderbook", {}))
|
|
266
|
+
return {"market": market, "orderbook": orderbook}
|
|
267
|
+
|
|
268
|
+
async def get_orderbook(ticker: str) -> dict:
|
|
269
|
+
async with aiohttp.ClientSession() as session:
|
|
270
|
+
data = await api_get(session, f"/markets/{ticker}/orderbook")
|
|
271
|
+
return data.get("orderbook_fp", data.get("orderbook", data))
|
|
272
|
+
|
|
273
|
+
async def search_events(query: str) -> list[dict]:
|
|
274
|
+
async with aiohttp.ClientSession() as session:
|
|
275
|
+
data = await api_get(session, "/events", {"limit": 200, "with_nested_markets": "true"})
|
|
276
|
+
events = data.get("events", [])
|
|
277
|
+
q = query.lower()
|
|
278
|
+
matched = [e for e in events if q in e.get("title", "").lower()]
|
|
279
|
+
results = []
|
|
280
|
+
for e in matched[:20]:
|
|
281
|
+
results.append({
|
|
282
|
+
"event_ticker": e.get("event_ticker", ""),
|
|
283
|
+
"title": e.get("title", ""),
|
|
284
|
+
"category": e.get("category", ""),
|
|
285
|
+
"markets": [
|
|
286
|
+
{
|
|
287
|
+
"ticker": m.get("ticker", ""),
|
|
288
|
+
"title": m.get("yes_sub_title", m.get("title", "")),
|
|
289
|
+
"yes_bid": m.get("yes_bid", 0),
|
|
290
|
+
"yes_ask": m.get("yes_ask", 0),
|
|
291
|
+
"volume": m.get("volume", 0),
|
|
292
|
+
}
|
|
293
|
+
for m in e.get("markets", [])[:10]
|
|
294
|
+
],
|
|
295
|
+
})
|
|
296
|
+
return results
|
|
297
|
+
|
|
298
|
+
async def quick_scan(categories: set[str] | None = None, max_days: int = 30, min_volume: float = 100) -> list[dict]:
|
|
299
|
+
"""Fast scan using /events endpoint — single paginated call instead of per-series iteration."""
|
|
300
|
+
async with aiohttp.ClientSession() as session:
|
|
301
|
+
all_markets = []
|
|
302
|
+
cursor = ""
|
|
303
|
+
for page in range(5): # max 5 pages
|
|
304
|
+
params = {"limit": 200, "with_nested_markets": "true", "status": "open"}
|
|
305
|
+
if cursor:
|
|
306
|
+
params["cursor"] = cursor
|
|
307
|
+
data = await api_get(session, "/events", params)
|
|
308
|
+
events = data.get("events", [])
|
|
309
|
+
cursor = data.get("cursor", "")
|
|
310
|
+
|
|
311
|
+
for e in events:
|
|
312
|
+
cat = e.get("category", "")
|
|
313
|
+
if categories and cat not in categories:
|
|
314
|
+
continue
|
|
315
|
+
for m in e.get("markets", []):
|
|
316
|
+
parsed = parse_market(m, cat)
|
|
317
|
+
if parsed and parsed.days_to_close <= max_days and parsed.volume >= min_volume:
|
|
318
|
+
all_markets.append(parsed)
|
|
319
|
+
|
|
320
|
+
if not cursor:
|
|
321
|
+
break
|
|
322
|
+
|
|
323
|
+
all_markets.sort(key=lambda m: m.volume, reverse=True)
|
|
324
|
+
log(f"Quick scan: {len(all_markets)} markets found")
|
|
325
|
+
return all_markets
|
|
326
|
+
|
|
327
|
+
|
|
328
|
+
async def get_event_markets(event_ticker: str) -> list[dict]:
|
|
329
|
+
async with aiohttp.ClientSession() as session:
|
|
330
|
+
data = await api_get(session, "/markets", {
|
|
331
|
+
"event_ticker": event_ticker,
|
|
332
|
+
"limit": 200,
|
|
333
|
+
})
|
|
334
|
+
return data.get("markets", [])
|
|
335
|
+
|
|
336
|
+
# --- Authenticated endpoints ---
|
|
337
|
+
|
|
338
|
+
async def place_order(ticker: str, action: str, side: str, count: int, price_cents: int | None = None, order_type: str = "market") -> dict:
|
|
339
|
+
body: dict = {
|
|
340
|
+
"ticker": ticker,
|
|
341
|
+
"action": action,
|
|
342
|
+
"side": side,
|
|
343
|
+
"type": order_type,
|
|
344
|
+
"count": count,
|
|
345
|
+
}
|
|
346
|
+
if price_cents is not None:
|
|
347
|
+
body["yes_price"] = price_cents
|
|
348
|
+
async with aiohttp.ClientSession() as session:
|
|
349
|
+
return await api_post(session, "/portfolio/orders", body)
|
|
350
|
+
|
|
351
|
+
async def get_positions() -> dict:
|
|
352
|
+
async with aiohttp.ClientSession() as session:
|
|
353
|
+
return await api_get(session, "/portfolio/positions", {"limit": 200}, auth=True)
|
|
354
|
+
|
|
355
|
+
async def get_balance() -> dict:
|
|
356
|
+
async with aiohttp.ClientSession() as session:
|
|
357
|
+
return await api_get(session, "/portfolio/balance", auth=True)
|
|
358
|
+
|
|
359
|
+
async def cancel_order(order_id: str) -> dict:
|
|
360
|
+
async with aiohttp.ClientSession() as session:
|
|
361
|
+
return await api_delete(session, f"/portfolio/orders/{order_id}")
|
|
362
|
+
|
|
363
|
+
# --- Fee calculation ---
|
|
364
|
+
|
|
365
|
+
def kalshi_fee(contracts: int, price: float) -> float:
|
|
366
|
+
return math.ceil(0.07 * contracts * price * (1 - price)) / 100
|
|
367
|
+
|
|
368
|
+
# --- CLI ---
|
|
369
|
+
|
|
370
|
+
async def main():
|
|
371
|
+
parser = argparse.ArgumentParser(description="Kalshi API client")
|
|
372
|
+
sub = parser.add_subparsers(dest="command")
|
|
373
|
+
|
|
374
|
+
scan_p = sub.add_parser("scan", help="Scan active markets (slow, per-series)")
|
|
375
|
+
scan_p.add_argument("--categories", type=str, default=None)
|
|
376
|
+
scan_p.add_argument("--days", type=int, default=30)
|
|
377
|
+
scan_p.add_argument("--min-oi", type=float, default=50)
|
|
378
|
+
scan_p.add_argument("--limit", type=int, default=50)
|
|
379
|
+
|
|
380
|
+
quick_p = sub.add_parser("quick", help="Fast scan via events endpoint (~10s)")
|
|
381
|
+
quick_p.add_argument("--categories", type=str, default=None)
|
|
382
|
+
quick_p.add_argument("--days", type=int, default=30)
|
|
383
|
+
quick_p.add_argument("--min-volume", type=float, default=100)
|
|
384
|
+
quick_p.add_argument("--limit", type=int, default=50)
|
|
385
|
+
|
|
386
|
+
market_p = sub.add_parser("market", help="Get market details")
|
|
387
|
+
market_p.add_argument("ticker")
|
|
388
|
+
|
|
389
|
+
ob_p = sub.add_parser("orderbook", help="Get orderbook")
|
|
390
|
+
ob_p.add_argument("ticker")
|
|
391
|
+
|
|
392
|
+
search_p = sub.add_parser("search", help="Search events")
|
|
393
|
+
search_p.add_argument("query")
|
|
394
|
+
|
|
395
|
+
events_p = sub.add_parser("events", help="Get event markets")
|
|
396
|
+
events_p.add_argument("event_ticker")
|
|
397
|
+
|
|
398
|
+
order_p = sub.add_parser("order", help="Place order (authenticated)")
|
|
399
|
+
order_p.add_argument("ticker")
|
|
400
|
+
order_p.add_argument("--action", choices=["buy", "sell"], required=True)
|
|
401
|
+
order_p.add_argument("--side", choices=["yes", "no"], required=True)
|
|
402
|
+
order_p.add_argument("--count", type=int, required=True)
|
|
403
|
+
order_p.add_argument("--price", type=int, default=None, help="Price in cents (1-99)")
|
|
404
|
+
order_p.add_argument("--type", dest="order_type", choices=["market", "limit"], default="market")
|
|
405
|
+
|
|
406
|
+
sub.add_parser("positions", help="Get positions (authenticated)")
|
|
407
|
+
sub.add_parser("balance", help="Get balance (authenticated)")
|
|
408
|
+
|
|
409
|
+
cancel_p = sub.add_parser("cancel", help="Cancel order")
|
|
410
|
+
cancel_p.add_argument("order_id")
|
|
411
|
+
|
|
412
|
+
args = parser.parse_args()
|
|
413
|
+
|
|
414
|
+
if args.command == "scan":
|
|
415
|
+
cats = set(args.categories.split(",")) if args.categories else None
|
|
416
|
+
markets = await scan_markets(categories=cats, max_days=args.days, min_oi=args.min_oi)
|
|
417
|
+
results = [asdict(m) for m in markets[:args.limit]]
|
|
418
|
+
|
|
419
|
+
# persist snapshots to DB
|
|
420
|
+
if results:
|
|
421
|
+
try:
|
|
422
|
+
import sys as _sys
|
|
423
|
+
_sys.path.insert(0, str(Path(__file__).resolve().parent.parent))
|
|
424
|
+
from gossip.db import GossipDB
|
|
425
|
+
db = GossipDB()
|
|
426
|
+
db.insert_market_snapshots(results)
|
|
427
|
+
except Exception as e:
|
|
428
|
+
log(f"DB snapshot write failed: {e}")
|
|
429
|
+
|
|
430
|
+
print(json.dumps(results, indent=2))
|
|
431
|
+
|
|
432
|
+
elif args.command == "quick":
|
|
433
|
+
cats = set(args.categories.split(",")) if args.categories else None
|
|
434
|
+
markets = await quick_scan(categories=cats, max_days=args.days, min_volume=args.min_volume)
|
|
435
|
+
results = [asdict(m) for m in markets[:args.limit]]
|
|
436
|
+
|
|
437
|
+
if results:
|
|
438
|
+
try:
|
|
439
|
+
import sys as _sys
|
|
440
|
+
_sys.path.insert(0, str(Path(__file__).resolve().parent.parent))
|
|
441
|
+
from gossip.db import GossipDB
|
|
442
|
+
db = GossipDB()
|
|
443
|
+
db.insert_market_snapshots(results)
|
|
444
|
+
except Exception as e:
|
|
445
|
+
log(f"DB snapshot write failed: {e}")
|
|
446
|
+
|
|
447
|
+
print(json.dumps(results, indent=2))
|
|
448
|
+
|
|
449
|
+
elif args.command == "market":
|
|
450
|
+
result = await get_market_detail(args.ticker)
|
|
451
|
+
print(json.dumps(result, indent=2))
|
|
452
|
+
|
|
453
|
+
elif args.command == "orderbook":
|
|
454
|
+
result = await get_orderbook(args.ticker)
|
|
455
|
+
print(json.dumps(result, indent=2))
|
|
456
|
+
|
|
457
|
+
elif args.command == "search":
|
|
458
|
+
results = await search_events(args.query)
|
|
459
|
+
print(json.dumps(results, indent=2))
|
|
460
|
+
|
|
461
|
+
elif args.command == "events":
|
|
462
|
+
results = await get_event_markets(args.event_ticker)
|
|
463
|
+
print(json.dumps(results, indent=2))
|
|
464
|
+
|
|
465
|
+
elif args.command == "order":
|
|
466
|
+
result = await place_order(
|
|
467
|
+
ticker=args.ticker,
|
|
468
|
+
action=args.action,
|
|
469
|
+
side=args.side,
|
|
470
|
+
count=args.count,
|
|
471
|
+
price_cents=args.price,
|
|
472
|
+
order_type=args.order_type,
|
|
473
|
+
)
|
|
474
|
+
print(json.dumps(result, indent=2))
|
|
475
|
+
|
|
476
|
+
elif args.command == "positions":
|
|
477
|
+
result = await get_positions()
|
|
478
|
+
print(json.dumps(result, indent=2))
|
|
479
|
+
|
|
480
|
+
elif args.command == "balance":
|
|
481
|
+
result = await get_balance()
|
|
482
|
+
print(json.dumps(result, indent=2))
|
|
483
|
+
|
|
484
|
+
elif args.command == "cancel":
|
|
485
|
+
result = await cancel_order(args.order_id)
|
|
486
|
+
print(json.dumps(result, indent=2))
|
|
487
|
+
|
|
488
|
+
else:
|
|
489
|
+
parser.print_help()
|
|
490
|
+
|
|
491
|
+
if __name__ == "__main__":
|
|
492
|
+
asyncio.run(main())
|