probable-trader 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +118 -0
- package/bin/probable-trader.js +64 -0
- package/lib/setup.js +125 -0
- package/package.json +30 -0
- package/requirements.txt +6 -0
- package/schemas/action-output.schema.json +50 -0
- package/schemas/config.schema.json +49 -0
- package/schemas/order-intent.schema.json +47 -0
- package/scripts/lib/__init__.py +1 -0
- package/scripts/lib/client_wrapper.py +241 -0
- package/scripts/lib/config.py +150 -0
- package/scripts/lib/db.py +176 -0
- package/scripts/lib/onboard.py +177 -0
- package/scripts/lib/report.py +105 -0
- package/scripts/lib/safety.py +76 -0
- package/scripts/lib/ws_client.py +109 -0
- package/scripts/prob.py +474 -0
|
@@ -0,0 +1,241 @@
|
|
|
1
|
+
"""ProbableClient: wraps opinion_clob_sdk.Client with normalization, audit logging, and safety."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import traceback
|
|
5
|
+
from dataclasses import asdict
|
|
6
|
+
from typing import Any, Optional
|
|
7
|
+
|
|
8
|
+
from .config import ProbableConfig, create_client, load_config
|
|
9
|
+
from .db import ProbableDB
|
|
10
|
+
from .safety import ActionResult
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def _normalize(obj: Any) -> Any:
|
|
14
|
+
"""Convert SDK response objects to plain dicts/lists."""
|
|
15
|
+
if obj is None:
|
|
16
|
+
return None
|
|
17
|
+
if isinstance(obj, (str, int, float, bool)):
|
|
18
|
+
return obj
|
|
19
|
+
if isinstance(obj, (bytes, bytearray)):
|
|
20
|
+
return "0x" + obj.hex()
|
|
21
|
+
if isinstance(obj, dict):
|
|
22
|
+
return {k: _normalize(v) for k, v in obj.items()}
|
|
23
|
+
if isinstance(obj, (list, tuple)):
|
|
24
|
+
return [_normalize(v) for v in obj]
|
|
25
|
+
# Handle dataclasses
|
|
26
|
+
if hasattr(obj, "__dataclass_fields__"):
|
|
27
|
+
return {k: _normalize(v) for k, v in asdict(obj).items()}
|
|
28
|
+
# Handle OpenAPI generated models (have to_dict or attribute_map)
|
|
29
|
+
if hasattr(obj, "to_dict"):
|
|
30
|
+
return _normalize(obj.to_dict())
|
|
31
|
+
if hasattr(obj, "__dict__"):
|
|
32
|
+
return {k: _normalize(v) for k, v in obj.__dict__.items() if not k.startswith("_")}
|
|
33
|
+
return str(obj)
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class ProbableClient:
|
|
37
|
+
"""High-level wrapper around opinion_clob_sdk.Client."""
|
|
38
|
+
|
|
39
|
+
def __init__(self, cfg: Optional[ProbableConfig] = None, db: Optional[ProbableDB] = None):
|
|
40
|
+
self.cfg = cfg or load_config()
|
|
41
|
+
self.db = db
|
|
42
|
+
self._sdk = None
|
|
43
|
+
|
|
44
|
+
@property
|
|
45
|
+
def sdk(self):
|
|
46
|
+
if self._sdk is None:
|
|
47
|
+
self._sdk = create_client(self.cfg)
|
|
48
|
+
return self._sdk
|
|
49
|
+
|
|
50
|
+
def _log(self, action: str, request: Any, response: Any, success: bool):
|
|
51
|
+
if self.db:
|
|
52
|
+
self.db.log_action(action, request, response, success)
|
|
53
|
+
|
|
54
|
+
def _call(self, action: str, request_data: Any, fn, *args, **kwargs) -> ActionResult:
|
|
55
|
+
"""Execute an SDK call with error handling and audit logging."""
|
|
56
|
+
try:
|
|
57
|
+
raw = fn(*args, **kwargs)
|
|
58
|
+
data = _normalize(raw)
|
|
59
|
+
self._log(action, request_data, data, True)
|
|
60
|
+
return ActionResult(success=True, action=action, data=data)
|
|
61
|
+
except Exception as e:
|
|
62
|
+
err = f"{type(e).__name__}: {e}"
|
|
63
|
+
self._log(action, request_data, {"error": err}, False)
|
|
64
|
+
return ActionResult(success=False, action=action, error=err)
|
|
65
|
+
|
|
66
|
+
# --- Health / Info ---
|
|
67
|
+
|
|
68
|
+
def doctor(self) -> ActionResult:
|
|
69
|
+
"""Health check: verify API connectivity and config."""
|
|
70
|
+
checks = {}
|
|
71
|
+
# Config check
|
|
72
|
+
missing = self.cfg.validate()
|
|
73
|
+
checks["config"] = {"ok": len(missing) == 0, "missing": missing}
|
|
74
|
+
checks["host"] = self.cfg.host
|
|
75
|
+
checks["chain_id"] = self.cfg.chain_id
|
|
76
|
+
checks["authenticated"] = self.cfg.is_authenticated()
|
|
77
|
+
|
|
78
|
+
# API connectivity
|
|
79
|
+
try:
|
|
80
|
+
qt = self.sdk.get_quote_tokens(use_cache=False)
|
|
81
|
+
checks["api_reachable"] = True
|
|
82
|
+
checks["quote_tokens"] = _normalize(qt)
|
|
83
|
+
except Exception as e:
|
|
84
|
+
checks["api_reachable"] = False
|
|
85
|
+
checks["api_error"] = str(e)
|
|
86
|
+
|
|
87
|
+
ok = checks.get("api_reachable", False)
|
|
88
|
+
self._log("doctor", None, checks, ok)
|
|
89
|
+
return ActionResult(success=ok, action="doctor", data=checks)
|
|
90
|
+
|
|
91
|
+
# --- Market Data (Read-Only) ---
|
|
92
|
+
|
|
93
|
+
def get_markets(self, topic_type=None, page: int = 1, limit: int = 20,
|
|
94
|
+
status=None, sort_by=None) -> ActionResult:
|
|
95
|
+
from opinion_clob_sdk.sdk import TopicType, TopicStatusFilter, TopicSortType
|
|
96
|
+
|
|
97
|
+
tt = TopicType.ALL
|
|
98
|
+
if topic_type == "binary":
|
|
99
|
+
tt = TopicType.BINARY
|
|
100
|
+
elif topic_type == "categorical":
|
|
101
|
+
tt = TopicType.CATEGORICAL
|
|
102
|
+
|
|
103
|
+
sf = None
|
|
104
|
+
if status == "activated":
|
|
105
|
+
sf = TopicStatusFilter.ACTIVATED
|
|
106
|
+
elif status == "resolved":
|
|
107
|
+
sf = TopicStatusFilter.RESOLVED
|
|
108
|
+
|
|
109
|
+
sb = None
|
|
110
|
+
if sort_by:
|
|
111
|
+
sort_map = {
|
|
112
|
+
"time": TopicSortType.BY_TIME_DESC,
|
|
113
|
+
"volume": TopicSortType.BY_VOLUME_DESC,
|
|
114
|
+
"volume_24h": TopicSortType.BY_VOLUME_24H_DESC,
|
|
115
|
+
"volume_7d": TopicSortType.BY_VOLUME_7D_DESC,
|
|
116
|
+
"cutoff": TopicSortType.BY_CUTOFF_TIME_ASC,
|
|
117
|
+
}
|
|
118
|
+
sb = sort_map.get(sort_by)
|
|
119
|
+
|
|
120
|
+
req = {"topic_type": topic_type, "page": page, "limit": limit, "status": status, "sort_by": sort_by}
|
|
121
|
+
return self._call("get_markets", req, self.sdk.get_markets, tt, page, limit, sf, sb)
|
|
122
|
+
|
|
123
|
+
def get_market(self, market_id: int) -> ActionResult:
|
|
124
|
+
# Check cache first
|
|
125
|
+
if self.db:
|
|
126
|
+
cached = self.db.get_cached_market(market_id)
|
|
127
|
+
if cached:
|
|
128
|
+
self._log("get_market", {"market_id": market_id}, cached, True)
|
|
129
|
+
return ActionResult(success=True, action="get_market", data=cached)
|
|
130
|
+
|
|
131
|
+
result = self._call("get_market", {"market_id": market_id}, self.sdk.get_market, market_id)
|
|
132
|
+
if result.success and result.data and self.db:
|
|
133
|
+
self.db.cache_market(market_id, result.data)
|
|
134
|
+
return result
|
|
135
|
+
|
|
136
|
+
def get_orderbook(self, token_id: str) -> ActionResult:
|
|
137
|
+
return self._call("get_orderbook", {"token_id": token_id}, self.sdk.get_orderbook, token_id)
|
|
138
|
+
|
|
139
|
+
def get_latest_price(self, token_id: str) -> ActionResult:
|
|
140
|
+
return self._call("get_latest_price", {"token_id": token_id}, self.sdk.get_latest_price, token_id)
|
|
141
|
+
|
|
142
|
+
def get_price_history(self, token_id: str, interval: str = "1h",
|
|
143
|
+
start_at=None, end_at=None) -> ActionResult:
|
|
144
|
+
req = {"token_id": token_id, "interval": interval, "start_at": start_at, "end_at": end_at}
|
|
145
|
+
kwargs = {"token_id": token_id, "interval": interval}
|
|
146
|
+
if start_at:
|
|
147
|
+
kwargs["start_at"] = start_at
|
|
148
|
+
if end_at:
|
|
149
|
+
kwargs["end_at"] = end_at
|
|
150
|
+
return self._call("get_price_history", req, self.sdk.get_price_history, **kwargs)
|
|
151
|
+
|
|
152
|
+
def get_fee_rates(self, token_id: str) -> ActionResult:
|
|
153
|
+
return self._call("get_fee_rates", {"token_id": token_id}, self.sdk.get_fee_rates, token_id)
|
|
154
|
+
|
|
155
|
+
def get_quote_tokens(self) -> ActionResult:
|
|
156
|
+
return self._call("get_quote_tokens", {}, self.sdk.get_quote_tokens)
|
|
157
|
+
|
|
158
|
+
# --- Authenticated: Trading ---
|
|
159
|
+
|
|
160
|
+
def enable_trading(self) -> ActionResult:
|
|
161
|
+
return self._call("enable_trading", {}, self.sdk.enable_trading)
|
|
162
|
+
|
|
163
|
+
def get_user_auth(self) -> ActionResult:
|
|
164
|
+
return self._call("get_user_auth", {}, self.sdk.get_user_auth)
|
|
165
|
+
|
|
166
|
+
def place_order(self, market_id: int, token_id: str, side: str, price: str,
|
|
167
|
+
amount: str, order_type: str = "limit") -> ActionResult:
|
|
168
|
+
from opinion_clob_sdk.chain.py_order_utils.model.order import PlaceOrderDataInput
|
|
169
|
+
|
|
170
|
+
side_int = 0 if side.upper() == "BUY" else 1
|
|
171
|
+
otype_int = 2 if order_type.lower() == "limit" else 1
|
|
172
|
+
|
|
173
|
+
data = PlaceOrderDataInput()
|
|
174
|
+
data.marketId = market_id
|
|
175
|
+
data.tokenId = token_id
|
|
176
|
+
data.side = side_int
|
|
177
|
+
data.price = price
|
|
178
|
+
data.orderType = otype_int
|
|
179
|
+
|
|
180
|
+
# Set amount based on side
|
|
181
|
+
if side_int == 0: # BUY
|
|
182
|
+
data.makerAmountInQuoteToken = amount
|
|
183
|
+
else: # SELL
|
|
184
|
+
data.makerAmountInBaseToken = amount
|
|
185
|
+
|
|
186
|
+
req = {"market_id": market_id, "token_id": token_id, "side": side, "price": price,
|
|
187
|
+
"amount": amount, "order_type": order_type}
|
|
188
|
+
result = self._call("place_order", req, self.sdk.place_order, data)
|
|
189
|
+
|
|
190
|
+
if result.success and result.data and self.db:
|
|
191
|
+
oid = result.data.get("id", result.data.get("order_id", ""))
|
|
192
|
+
if oid:
|
|
193
|
+
self.db.log_order(str(oid), market_id, token_id, side, price, amount, "pending", result.data)
|
|
194
|
+
return result
|
|
195
|
+
|
|
196
|
+
def cancel_order(self, order_id: str) -> ActionResult:
|
|
197
|
+
result = self._call("cancel_order", {"order_id": order_id}, self.sdk.cancel_order, order_id)
|
|
198
|
+
if result.success and self.db:
|
|
199
|
+
self.db.log_order(order_id, 0, "", "", "", "", "cancelled", result.data or {})
|
|
200
|
+
return result
|
|
201
|
+
|
|
202
|
+
def cancel_all_orders(self, market_id: Optional[int] = None) -> ActionResult:
|
|
203
|
+
req = {"market_id": market_id}
|
|
204
|
+
kwargs = {}
|
|
205
|
+
if market_id:
|
|
206
|
+
kwargs["market_id"] = market_id
|
|
207
|
+
return self._call("cancel_all_orders", req, self.sdk.cancel_all_orders, **kwargs)
|
|
208
|
+
|
|
209
|
+
def get_my_orders(self, market_id: int = 0, status: str = "", limit: int = 10,
|
|
210
|
+
page: int = 1) -> ActionResult:
|
|
211
|
+
req = {"market_id": market_id, "status": status, "limit": limit, "page": page}
|
|
212
|
+
return self._call("get_my_orders", req, self.sdk.get_my_orders, market_id, status, limit, page)
|
|
213
|
+
|
|
214
|
+
def get_order_by_id(self, order_id: str) -> ActionResult:
|
|
215
|
+
return self._call("get_order_by_id", {"order_id": order_id}, self.sdk.get_order_by_id, order_id)
|
|
216
|
+
|
|
217
|
+
def get_my_positions(self, market_id: int = 0, page: int = 1, limit: int = 10) -> ActionResult:
|
|
218
|
+
req = {"market_id": market_id, "page": page, "limit": limit}
|
|
219
|
+
return self._call("get_my_positions", req, self.sdk.get_my_positions, market_id, page, limit)
|
|
220
|
+
|
|
221
|
+
def get_my_trades(self, market_id: Optional[int] = None, page: int = 1,
|
|
222
|
+
limit: int = 10) -> ActionResult:
|
|
223
|
+
req = {"market_id": market_id, "page": page, "limit": limit}
|
|
224
|
+
return self._call("get_my_trades", req, self.sdk.get_my_trades, market_id, page, limit)
|
|
225
|
+
|
|
226
|
+
def get_my_balances(self) -> ActionResult:
|
|
227
|
+
return self._call("get_my_balances", {}, self.sdk.get_my_balances)
|
|
228
|
+
|
|
229
|
+
# --- On-chain: Split / Merge / Redeem ---
|
|
230
|
+
|
|
231
|
+
def split(self, market_id: int, amount: int) -> ActionResult:
|
|
232
|
+
req = {"market_id": market_id, "amount": amount}
|
|
233
|
+
return self._call("split", req, self.sdk.split, market_id, amount)
|
|
234
|
+
|
|
235
|
+
def merge(self, market_id: int, amount: int) -> ActionResult:
|
|
236
|
+
req = {"market_id": market_id, "amount": amount}
|
|
237
|
+
return self._call("merge", req, self.sdk.merge, market_id, amount)
|
|
238
|
+
|
|
239
|
+
def redeem(self, market_id: int) -> ActionResult:
|
|
240
|
+
req = {"market_id": market_id}
|
|
241
|
+
return self._call("redeem", req, self.sdk.redeem, market_id)
|
|
@@ -0,0 +1,150 @@
|
|
|
1
|
+
"""Configuration loading for Probable Markets Skill.
|
|
2
|
+
|
|
3
|
+
Loads from environment variables and optional .probable/config.json.
|
|
4
|
+
Provides create_client() factory for opinion_clob_sdk.Client.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import json
|
|
8
|
+
import os
|
|
9
|
+
from dataclasses import dataclass, field
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
from typing import Optional
|
|
12
|
+
|
|
13
|
+
DEFAULTS = {
|
|
14
|
+
"host": "https://api.probable.markets",
|
|
15
|
+
"chain_id": 56,
|
|
16
|
+
"rpc_url": "",
|
|
17
|
+
"api_key": "",
|
|
18
|
+
"private_key": "",
|
|
19
|
+
"multi_sig_addr": "",
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
CONFIG_DIR = ".probable"
|
|
23
|
+
CONFIG_FILE = "config.json"
|
|
24
|
+
DB_FILE = "probable.db"
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
@dataclass
|
|
28
|
+
class ProbableConfig:
|
|
29
|
+
host: str = DEFAULTS["host"]
|
|
30
|
+
api_key: str = ""
|
|
31
|
+
api_secret: str = ""
|
|
32
|
+
api_passphrase: str = ""
|
|
33
|
+
chain_id: int = DEFAULTS["chain_id"]
|
|
34
|
+
rpc_url: str = ""
|
|
35
|
+
private_key: str = ""
|
|
36
|
+
multi_sig_addr: str = ""
|
|
37
|
+
|
|
38
|
+
@property
|
|
39
|
+
def config_dir(self) -> Path:
|
|
40
|
+
return Path(CONFIG_DIR)
|
|
41
|
+
|
|
42
|
+
@property
|
|
43
|
+
def db_path(self) -> Path:
|
|
44
|
+
return self.config_dir / DB_FILE
|
|
45
|
+
|
|
46
|
+
def is_authenticated(self) -> bool:
|
|
47
|
+
return bool(self.api_key and self.private_key and self.multi_sig_addr)
|
|
48
|
+
|
|
49
|
+
def validate(self) -> list[str]:
|
|
50
|
+
"""Return list of missing required fields for trading."""
|
|
51
|
+
missing = []
|
|
52
|
+
if not self.api_key:
|
|
53
|
+
missing.append("PROB_API_KEY")
|
|
54
|
+
if not self.rpc_url:
|
|
55
|
+
missing.append("PROB_RPC_URL")
|
|
56
|
+
if not self.private_key:
|
|
57
|
+
missing.append("PROB_PRIVATE_KEY")
|
|
58
|
+
if not self.multi_sig_addr:
|
|
59
|
+
missing.append("PROB_MULTI_SIG_ADDR")
|
|
60
|
+
return missing
|
|
61
|
+
|
|
62
|
+
def to_dict(self, mask_secrets: bool = True) -> dict:
|
|
63
|
+
d = {
|
|
64
|
+
"host": self.host,
|
|
65
|
+
"chain_id": self.chain_id,
|
|
66
|
+
"rpc_url": self.rpc_url,
|
|
67
|
+
"api_key": _mask(self.api_key) if mask_secrets else self.api_key,
|
|
68
|
+
"api_secret": _mask(self.api_secret) if mask_secrets else self.api_secret,
|
|
69
|
+
"api_passphrase": _mask(self.api_passphrase) if mask_secrets else self.api_passphrase,
|
|
70
|
+
"private_key": _mask(self.private_key) if mask_secrets else self.private_key,
|
|
71
|
+
"multi_sig_addr": self.multi_sig_addr,
|
|
72
|
+
}
|
|
73
|
+
return d
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def _mask(val: str) -> str:
|
|
77
|
+
if not val:
|
|
78
|
+
return ""
|
|
79
|
+
if len(val) <= 8:
|
|
80
|
+
return "***"
|
|
81
|
+
return val[:4] + "..." + val[-4:]
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def load_config() -> ProbableConfig:
|
|
85
|
+
"""Load config from env vars, then overlay .probable/config.json if present."""
|
|
86
|
+
cfg = ProbableConfig(
|
|
87
|
+
host=os.environ.get("PROB_HOST", DEFAULTS["host"]),
|
|
88
|
+
api_key=os.environ.get("PROB_API_KEY", ""),
|
|
89
|
+
api_secret=os.environ.get("PROB_API_SECRET", ""),
|
|
90
|
+
api_passphrase=os.environ.get("PROB_API_PASSPHRASE", ""),
|
|
91
|
+
chain_id=int(os.environ.get("PROB_CHAIN_ID", str(DEFAULTS["chain_id"]))),
|
|
92
|
+
rpc_url=os.environ.get("PROB_RPC_URL", ""),
|
|
93
|
+
private_key=os.environ.get("PROB_PRIVATE_KEY", ""),
|
|
94
|
+
multi_sig_addr=os.environ.get("PROB_MULTI_SIG_ADDR", ""),
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
config_path = Path(CONFIG_DIR) / CONFIG_FILE
|
|
98
|
+
if config_path.exists():
|
|
99
|
+
try:
|
|
100
|
+
with open(config_path) as f:
|
|
101
|
+
overrides = json.load(f)
|
|
102
|
+
for key in ("host", "api_key", "api_secret", "api_passphrase",
|
|
103
|
+
"chain_id", "rpc_url", "private_key", "multi_sig_addr"):
|
|
104
|
+
if key in overrides and overrides[key]:
|
|
105
|
+
setattr(cfg, key, overrides[key])
|
|
106
|
+
except (json.JSONDecodeError, OSError):
|
|
107
|
+
pass # Ignore malformed config file
|
|
108
|
+
|
|
109
|
+
return cfg
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
def init_config_dir() -> Path:
|
|
113
|
+
"""Create .probable/ directory and empty config.json if not exists."""
|
|
114
|
+
config_dir = Path(CONFIG_DIR)
|
|
115
|
+
config_dir.mkdir(exist_ok=True)
|
|
116
|
+
config_path = config_dir / CONFIG_FILE
|
|
117
|
+
if not config_path.exists():
|
|
118
|
+
config_path.write_text(json.dumps({
|
|
119
|
+
"host": DEFAULTS["host"],
|
|
120
|
+
"chain_id": DEFAULTS["chain_id"],
|
|
121
|
+
"rpc_url": "",
|
|
122
|
+
"api_key": "",
|
|
123
|
+
"private_key": "",
|
|
124
|
+
"multi_sig_addr": "",
|
|
125
|
+
}, indent=2))
|
|
126
|
+
return config_dir
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
def create_client(cfg: Optional[ProbableConfig] = None):
|
|
130
|
+
"""Factory: instantiate opinion_clob_sdk.Client from config."""
|
|
131
|
+
from opinion_clob_sdk import Client
|
|
132
|
+
from hexbytes import HexBytes
|
|
133
|
+
|
|
134
|
+
if cfg is None:
|
|
135
|
+
cfg = load_config()
|
|
136
|
+
|
|
137
|
+
kwargs = {
|
|
138
|
+
"host": cfg.host,
|
|
139
|
+
"chain_id": cfg.chain_id,
|
|
140
|
+
}
|
|
141
|
+
if cfg.api_key:
|
|
142
|
+
kwargs["apikey"] = cfg.api_key
|
|
143
|
+
if cfg.rpc_url:
|
|
144
|
+
kwargs["rpc_url"] = cfg.rpc_url
|
|
145
|
+
if cfg.private_key:
|
|
146
|
+
kwargs["private_key"] = cfg.private_key
|
|
147
|
+
if cfg.multi_sig_addr:
|
|
148
|
+
kwargs["multi_sig_addr"] = cfg.multi_sig_addr
|
|
149
|
+
|
|
150
|
+
return Client(**kwargs)
|
|
@@ -0,0 +1,176 @@
|
|
|
1
|
+
"""SQLite audit log and local cache for Probable Markets Skill."""
|
|
2
|
+
|
|
3
|
+
import hashlib
|
|
4
|
+
import json
|
|
5
|
+
import sqlite3
|
|
6
|
+
import time
|
|
7
|
+
from contextlib import contextmanager
|
|
8
|
+
from datetime import datetime, timezone
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from typing import Any, Optional
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def _json_hash(obj: Any) -> str:
|
|
14
|
+
"""Deterministic JSON hash for audit logging."""
|
|
15
|
+
raw = json.dumps(obj, sort_keys=True, default=str)
|
|
16
|
+
return hashlib.sha256(raw.encode()).hexdigest()[:16]
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class ProbableDB:
|
|
20
|
+
def __init__(self, db_path: Path):
|
|
21
|
+
self.db_path = db_path
|
|
22
|
+
self.db_path.parent.mkdir(parents=True, exist_ok=True)
|
|
23
|
+
self._init_tables()
|
|
24
|
+
|
|
25
|
+
@contextmanager
|
|
26
|
+
def _conn(self):
|
|
27
|
+
conn = sqlite3.connect(str(self.db_path))
|
|
28
|
+
conn.row_factory = sqlite3.Row
|
|
29
|
+
try:
|
|
30
|
+
yield conn
|
|
31
|
+
conn.commit()
|
|
32
|
+
finally:
|
|
33
|
+
conn.close()
|
|
34
|
+
|
|
35
|
+
def _init_tables(self):
|
|
36
|
+
with self._conn() as conn:
|
|
37
|
+
conn.executescript("""
|
|
38
|
+
CREATE TABLE IF NOT EXISTS audit_log (
|
|
39
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
40
|
+
timestamp TEXT NOT NULL,
|
|
41
|
+
action TEXT NOT NULL,
|
|
42
|
+
request_hash TEXT,
|
|
43
|
+
response_hash TEXT,
|
|
44
|
+
success INTEGER NOT NULL,
|
|
45
|
+
details TEXT
|
|
46
|
+
);
|
|
47
|
+
|
|
48
|
+
CREATE TABLE IF NOT EXISTS markets_cache (
|
|
49
|
+
market_id INTEGER PRIMARY KEY,
|
|
50
|
+
data TEXT NOT NULL,
|
|
51
|
+
updated_at TEXT NOT NULL
|
|
52
|
+
);
|
|
53
|
+
|
|
54
|
+
CREATE TABLE IF NOT EXISTS orders_log (
|
|
55
|
+
order_id TEXT PRIMARY KEY,
|
|
56
|
+
market_id INTEGER,
|
|
57
|
+
token_id TEXT,
|
|
58
|
+
side TEXT,
|
|
59
|
+
price TEXT,
|
|
60
|
+
amount TEXT,
|
|
61
|
+
status TEXT,
|
|
62
|
+
created_at TEXT,
|
|
63
|
+
updated_at TEXT,
|
|
64
|
+
data TEXT
|
|
65
|
+
);
|
|
66
|
+
|
|
67
|
+
CREATE TABLE IF NOT EXISTS trades_log (
|
|
68
|
+
trade_id TEXT PRIMARY KEY,
|
|
69
|
+
order_id TEXT,
|
|
70
|
+
market_id INTEGER,
|
|
71
|
+
side TEXT,
|
|
72
|
+
price TEXT,
|
|
73
|
+
amount TEXT,
|
|
74
|
+
timestamp TEXT,
|
|
75
|
+
data TEXT
|
|
76
|
+
);
|
|
77
|
+
|
|
78
|
+
CREATE INDEX IF NOT EXISTS idx_audit_timestamp ON audit_log(timestamp);
|
|
79
|
+
CREATE INDEX IF NOT EXISTS idx_audit_action ON audit_log(action);
|
|
80
|
+
CREATE INDEX IF NOT EXISTS idx_orders_market ON orders_log(market_id);
|
|
81
|
+
CREATE INDEX IF NOT EXISTS idx_trades_market ON trades_log(market_id);
|
|
82
|
+
""")
|
|
83
|
+
|
|
84
|
+
def log_action(self, action: str, request: Any, response: Any, success: bool, details: str = ""):
|
|
85
|
+
"""Write an audit log row."""
|
|
86
|
+
now = datetime.now(timezone.utc).isoformat()
|
|
87
|
+
req_hash = _json_hash(request) if request else None
|
|
88
|
+
resp_hash = _json_hash(response) if response else None
|
|
89
|
+
with self._conn() as conn:
|
|
90
|
+
conn.execute(
|
|
91
|
+
"INSERT INTO audit_log (timestamp, action, request_hash, response_hash, success, details) "
|
|
92
|
+
"VALUES (?, ?, ?, ?, ?, ?)",
|
|
93
|
+
(now, action, req_hash, resp_hash, int(success), details),
|
|
94
|
+
)
|
|
95
|
+
|
|
96
|
+
def cache_market(self, market_id: int, data: dict):
|
|
97
|
+
"""Upsert market cache."""
|
|
98
|
+
now = datetime.now(timezone.utc).isoformat()
|
|
99
|
+
with self._conn() as conn:
|
|
100
|
+
conn.execute(
|
|
101
|
+
"INSERT OR REPLACE INTO markets_cache (market_id, data, updated_at) VALUES (?, ?, ?)",
|
|
102
|
+
(market_id, json.dumps(data, default=str), now),
|
|
103
|
+
)
|
|
104
|
+
|
|
105
|
+
def get_cached_market(self, market_id: int, max_age_seconds: int = 300) -> Optional[dict]:
|
|
106
|
+
"""Get cached market data if fresh enough."""
|
|
107
|
+
with self._conn() as conn:
|
|
108
|
+
row = conn.execute(
|
|
109
|
+
"SELECT data, updated_at FROM markets_cache WHERE market_id = ?",
|
|
110
|
+
(market_id,),
|
|
111
|
+
).fetchone()
|
|
112
|
+
if not row:
|
|
113
|
+
return None
|
|
114
|
+
updated = datetime.fromisoformat(row["updated_at"])
|
|
115
|
+
age = (datetime.now(timezone.utc) - updated).total_seconds()
|
|
116
|
+
if age > max_age_seconds:
|
|
117
|
+
return None
|
|
118
|
+
return json.loads(row["data"])
|
|
119
|
+
|
|
120
|
+
def log_order(self, order_id: str, market_id: int, token_id: str, side: str,
|
|
121
|
+
price: str, amount: str, status: str, data: dict):
|
|
122
|
+
"""Upsert order record."""
|
|
123
|
+
now = datetime.now(timezone.utc).isoformat()
|
|
124
|
+
with self._conn() as conn:
|
|
125
|
+
conn.execute(
|
|
126
|
+
"INSERT OR REPLACE INTO orders_log "
|
|
127
|
+
"(order_id, market_id, token_id, side, price, amount, status, created_at, updated_at, data) "
|
|
128
|
+
"VALUES (?, ?, ?, ?, ?, ?, ?, COALESCE((SELECT created_at FROM orders_log WHERE order_id = ?), ?), ?, ?)",
|
|
129
|
+
(order_id, market_id, token_id, side, price, amount, status, order_id, now, now,
|
|
130
|
+
json.dumps(data, default=str)),
|
|
131
|
+
)
|
|
132
|
+
|
|
133
|
+
def log_trade(self, trade_id: str, order_id: str, market_id: int, side: str,
|
|
134
|
+
price: str, amount: str, timestamp: str, data: dict):
|
|
135
|
+
"""Insert trade record."""
|
|
136
|
+
with self._conn() as conn:
|
|
137
|
+
conn.execute(
|
|
138
|
+
"INSERT OR IGNORE INTO trades_log "
|
|
139
|
+
"(trade_id, order_id, market_id, side, price, amount, timestamp, data) "
|
|
140
|
+
"VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
|
|
141
|
+
(trade_id, order_id, market_id, side, price, amount, timestamp,
|
|
142
|
+
json.dumps(data, default=str)),
|
|
143
|
+
)
|
|
144
|
+
|
|
145
|
+
def get_recent_trades(self, hours: int = 24) -> list[dict]:
|
|
146
|
+
"""Get trades from the past N hours."""
|
|
147
|
+
cutoff = datetime.now(timezone.utc).isoformat()
|
|
148
|
+
with self._conn() as conn:
|
|
149
|
+
rows = conn.execute(
|
|
150
|
+
"SELECT * FROM trades_log WHERE timestamp >= datetime(?, '-' || ? || ' hours') ORDER BY timestamp DESC",
|
|
151
|
+
(cutoff, hours),
|
|
152
|
+
).fetchall()
|
|
153
|
+
return [dict(r) for r in rows]
|
|
154
|
+
|
|
155
|
+
def get_recent_orders(self, hours: int = 24) -> list[dict]:
|
|
156
|
+
"""Get orders from the past N hours."""
|
|
157
|
+
cutoff = datetime.now(timezone.utc).isoformat()
|
|
158
|
+
with self._conn() as conn:
|
|
159
|
+
rows = conn.execute(
|
|
160
|
+
"SELECT * FROM orders_log WHERE updated_at >= datetime(?, '-' || ? || ' hours') ORDER BY updated_at DESC",
|
|
161
|
+
(cutoff, hours),
|
|
162
|
+
).fetchall()
|
|
163
|
+
return [dict(r) for r in rows]
|
|
164
|
+
|
|
165
|
+
def get_audit_log(self, hours: int = 24, action: Optional[str] = None) -> list[dict]:
|
|
166
|
+
"""Get recent audit log entries."""
|
|
167
|
+
cutoff = datetime.now(timezone.utc).isoformat()
|
|
168
|
+
query = "SELECT * FROM audit_log WHERE timestamp >= datetime(?, '-' || ? || ' hours')"
|
|
169
|
+
params: list = [cutoff, hours]
|
|
170
|
+
if action:
|
|
171
|
+
query += " AND action = ?"
|
|
172
|
+
params.append(action)
|
|
173
|
+
query += " ORDER BY timestamp DESC"
|
|
174
|
+
with self._conn() as conn:
|
|
175
|
+
rows = conn.execute(query, params).fetchall()
|
|
176
|
+
return [dict(r) for r in rows]
|