decibel-python-sdk 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- decibel/__init__.py +247 -0
- decibel/_base.py +726 -0
- decibel/_constants.py +164 -0
- decibel/_fee_pay.py +301 -0
- decibel/_gas_price_manager.py +262 -0
- decibel/_order_status.py +138 -0
- decibel/_order_types.py +109 -0
- decibel/_pagination.py +82 -0
- decibel/_subaccount_types.py +43 -0
- decibel/_transaction_builder.py +325 -0
- decibel/_utils.py +432 -0
- decibel/_version.py +1 -0
- decibel/abi/__init__.py +23 -0
- decibel/abi/__main__.py +4 -0
- decibel/abi/_registry.py +89 -0
- decibel/abi/_types.py +55 -0
- decibel/abi/generate.py +183 -0
- decibel/abi/json/netna.json +2417 -0
- decibel/abi/json/testnet.json +2919 -0
- decibel/admin.py +868 -0
- decibel/py.typed +0 -0
- decibel/read/__init__.py +279 -0
- decibel/read/_account_overview.py +119 -0
- decibel/read/_base.py +137 -0
- decibel/read/_candlesticks.py +97 -0
- decibel/read/_delegations.py +32 -0
- decibel/read/_leaderboard.py +64 -0
- decibel/read/_market_contexts.py +35 -0
- decibel/read/_market_depth.py +81 -0
- decibel/read/_market_prices.py +100 -0
- decibel/read/_market_trades.py +81 -0
- decibel/read/_markets.py +146 -0
- decibel/read/_portfolio_chart.py +48 -0
- decibel/read/_trading_points.py +36 -0
- decibel/read/_types.py +136 -0
- decibel/read/_user_active_twaps.py +70 -0
- decibel/read/_user_bulk_orders.py +73 -0
- decibel/read/_user_fund_history.py +49 -0
- decibel/read/_user_funding_history.py +45 -0
- decibel/read/_user_notifications.py +87 -0
- decibel/read/_user_open_orders.py +91 -0
- decibel/read/_user_order_history.py +101 -0
- decibel/read/_user_positions.py +84 -0
- decibel/read/_user_subaccounts.py +35 -0
- decibel/read/_user_trade_history.py +77 -0
- decibel/read/_user_twap_history.py +32 -0
- decibel/read/_vaults.py +218 -0
- decibel/read/_ws.py +245 -0
- decibel/write/__init__.py +1949 -0
- decibel/write/_types.py +190 -0
- decibel_python_sdk-0.1.0.dist-info/METADATA +255 -0
- decibel_python_sdk-0.1.0.dist-info/RECORD +53 -0
- decibel_python_sdk-0.1.0.dist-info/WHEEL +4 -0
decibel/_utils.py
ADDED
|
@@ -0,0 +1,432 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import logging
|
|
5
|
+
import math
|
|
6
|
+
import secrets
|
|
7
|
+
from typing import TYPE_CHECKING, Any, TypeVar, cast
|
|
8
|
+
|
|
9
|
+
import httpx
|
|
10
|
+
from aptos_sdk.account_address import AccountAddress
|
|
11
|
+
from aptos_sdk.bcs import Serializer
|
|
12
|
+
from pydantic import BaseModel, ValidationError
|
|
13
|
+
|
|
14
|
+
if TYPE_CHECKING:
|
|
15
|
+
from ._constants import CompatVersion
|
|
16
|
+
|
|
17
|
+
logger = logging.getLogger(__name__)
|
|
18
|
+
|
|
19
|
+
__all__ = [
|
|
20
|
+
"FetchError",
|
|
21
|
+
"bigint_reviver",
|
|
22
|
+
"prettify_validation_error",
|
|
23
|
+
"get_request",
|
|
24
|
+
"get_request_sync",
|
|
25
|
+
"post_request",
|
|
26
|
+
"post_request_sync",
|
|
27
|
+
"patch_request",
|
|
28
|
+
"patch_request_sync",
|
|
29
|
+
"get_market_addr",
|
|
30
|
+
"get_primary_subaccount_addr",
|
|
31
|
+
"get_trading_competition_subaccount_addr",
|
|
32
|
+
"get_vault_share_address",
|
|
33
|
+
"round_to_tick_size",
|
|
34
|
+
"round_to_valid_price",
|
|
35
|
+
"round_to_valid_order_size",
|
|
36
|
+
"amount_to_chain_units",
|
|
37
|
+
"chain_units_to_amount",
|
|
38
|
+
"extract_vault_address_from_create_tx",
|
|
39
|
+
"generate_random_replay_protection_nonce",
|
|
40
|
+
]
|
|
41
|
+
|
|
42
|
+
T = TypeVar("T", bound=BaseModel)
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class FetchError(Exception):
|
|
46
|
+
status: int
|
|
47
|
+
status_text: str
|
|
48
|
+
response_message: str
|
|
49
|
+
|
|
50
|
+
def __init__(self, response_data: str, status: int, status_text: str) -> None:
|
|
51
|
+
self.status = status
|
|
52
|
+
|
|
53
|
+
try:
|
|
54
|
+
parsed_data: Any = json.loads(response_data)
|
|
55
|
+
parsed_status: str | None = None
|
|
56
|
+
parsed_message: str | None = None
|
|
57
|
+
|
|
58
|
+
if isinstance(parsed_data, dict):
|
|
59
|
+
data_dict = cast("dict[str, Any]", parsed_data)
|
|
60
|
+
status_val = data_dict.get("status")
|
|
61
|
+
message_val = data_dict.get("message")
|
|
62
|
+
if isinstance(status_val, str):
|
|
63
|
+
parsed_status = status_val
|
|
64
|
+
if isinstance(message_val, str):
|
|
65
|
+
parsed_message = message_val
|
|
66
|
+
|
|
67
|
+
self.status_text = parsed_status if parsed_status is not None else status_text
|
|
68
|
+
self.response_message = parsed_message if parsed_message is not None else response_data
|
|
69
|
+
except (json.JSONDecodeError, TypeError):
|
|
70
|
+
self.status_text = status_text
|
|
71
|
+
self.response_message = response_data
|
|
72
|
+
|
|
73
|
+
formatted_status_text = f" ({self.status_text})" if self.status_text else ""
|
|
74
|
+
message = f"HTTP Error {self.status}{formatted_status_text}: {self.response_message}"
|
|
75
|
+
super().__init__(message)
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def bigint_reviver(obj: dict[str, Any]) -> Any:
|
|
79
|
+
if "$bigint" in obj and isinstance(obj["$bigint"], str):
|
|
80
|
+
return int(obj["$bigint"])
|
|
81
|
+
return obj
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def prettify_validation_error(e: ValidationError) -> str:
|
|
85
|
+
errors = e.errors()
|
|
86
|
+
lines: list[str] = []
|
|
87
|
+
for err in errors:
|
|
88
|
+
loc = " -> ".join(str(loc) for loc in err["loc"]) if err["loc"] else "root"
|
|
89
|
+
lines.append(f" {loc}: {err['msg']}")
|
|
90
|
+
return "Validation error:\n" + "\n".join(lines)
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
async def get_request(
|
|
94
|
+
model: type[T],
|
|
95
|
+
url: str,
|
|
96
|
+
*,
|
|
97
|
+
params: dict[str, Any] | None = None,
|
|
98
|
+
api_key: str | None = None,
|
|
99
|
+
client: httpx.AsyncClient | None = None,
|
|
100
|
+
) -> tuple[T, int, str]:
|
|
101
|
+
return await _base_request_async(
|
|
102
|
+
model=model,
|
|
103
|
+
url=url,
|
|
104
|
+
method="GET",
|
|
105
|
+
params=params,
|
|
106
|
+
api_key=api_key,
|
|
107
|
+
client=client,
|
|
108
|
+
)
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
async def post_request(
|
|
112
|
+
model: type[T],
|
|
113
|
+
url: str,
|
|
114
|
+
*,
|
|
115
|
+
body: Any | None = None,
|
|
116
|
+
api_key: str | None = None,
|
|
117
|
+
client: httpx.AsyncClient | None = None,
|
|
118
|
+
) -> tuple[T, int, str]:
|
|
119
|
+
return await _base_request_async(
|
|
120
|
+
model=model,
|
|
121
|
+
url=url,
|
|
122
|
+
method="POST",
|
|
123
|
+
body=body,
|
|
124
|
+
api_key=api_key,
|
|
125
|
+
client=client,
|
|
126
|
+
)
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
async def patch_request(
|
|
130
|
+
model: type[T],
|
|
131
|
+
url: str,
|
|
132
|
+
*,
|
|
133
|
+
body: Any | None = None,
|
|
134
|
+
api_key: str | None = None,
|
|
135
|
+
client: httpx.AsyncClient | None = None,
|
|
136
|
+
) -> tuple[T, int, str]:
|
|
137
|
+
return await _base_request_async(
|
|
138
|
+
model=model,
|
|
139
|
+
url=url,
|
|
140
|
+
method="PATCH",
|
|
141
|
+
body=body,
|
|
142
|
+
api_key=api_key,
|
|
143
|
+
client=client,
|
|
144
|
+
)
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
def get_request_sync(
|
|
148
|
+
model: type[T],
|
|
149
|
+
url: str,
|
|
150
|
+
*,
|
|
151
|
+
params: dict[str, Any] | None = None,
|
|
152
|
+
api_key: str | None = None,
|
|
153
|
+
client: httpx.Client | None = None,
|
|
154
|
+
) -> tuple[T, int, str]:
|
|
155
|
+
return _base_request_sync(
|
|
156
|
+
model=model,
|
|
157
|
+
url=url,
|
|
158
|
+
method="GET",
|
|
159
|
+
params=params,
|
|
160
|
+
api_key=api_key,
|
|
161
|
+
client=client,
|
|
162
|
+
)
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
def post_request_sync(
|
|
166
|
+
model: type[T],
|
|
167
|
+
url: str,
|
|
168
|
+
*,
|
|
169
|
+
body: Any | None = None,
|
|
170
|
+
api_key: str | None = None,
|
|
171
|
+
client: httpx.Client | None = None,
|
|
172
|
+
) -> tuple[T, int, str]:
|
|
173
|
+
return _base_request_sync(
|
|
174
|
+
model=model,
|
|
175
|
+
url=url,
|
|
176
|
+
method="POST",
|
|
177
|
+
body=body,
|
|
178
|
+
api_key=api_key,
|
|
179
|
+
client=client,
|
|
180
|
+
)
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
def patch_request_sync(
|
|
184
|
+
model: type[T],
|
|
185
|
+
url: str,
|
|
186
|
+
*,
|
|
187
|
+
body: Any | None = None,
|
|
188
|
+
api_key: str | None = None,
|
|
189
|
+
client: httpx.Client | None = None,
|
|
190
|
+
) -> tuple[T, int, str]:
|
|
191
|
+
return _base_request_sync(
|
|
192
|
+
model=model,
|
|
193
|
+
url=url,
|
|
194
|
+
method="PATCH",
|
|
195
|
+
body=body,
|
|
196
|
+
api_key=api_key,
|
|
197
|
+
client=client,
|
|
198
|
+
)
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
async def _base_request_async(
|
|
202
|
+
model: type[T],
|
|
203
|
+
url: str,
|
|
204
|
+
method: str,
|
|
205
|
+
*,
|
|
206
|
+
params: dict[str, Any] | None = None,
|
|
207
|
+
body: Any | None = None,
|
|
208
|
+
api_key: str | None = None,
|
|
209
|
+
client: httpx.AsyncClient | None = None,
|
|
210
|
+
) -> tuple[T, int, str]:
|
|
211
|
+
headers: dict[str, str] = {}
|
|
212
|
+
if method in ("POST", "PATCH"):
|
|
213
|
+
headers["Content-Type"] = "application/json"
|
|
214
|
+
if api_key:
|
|
215
|
+
headers["Authorization"] = f"Bearer {api_key}"
|
|
216
|
+
|
|
217
|
+
json_body = body if method in ("POST", "PATCH") else None
|
|
218
|
+
|
|
219
|
+
if client is not None:
|
|
220
|
+
response = await client.request(
|
|
221
|
+
method=method,
|
|
222
|
+
url=url,
|
|
223
|
+
params=params,
|
|
224
|
+
json=json_body,
|
|
225
|
+
headers=headers,
|
|
226
|
+
)
|
|
227
|
+
else:
|
|
228
|
+
async with httpx.AsyncClient() as temp_client:
|
|
229
|
+
response = await temp_client.request(
|
|
230
|
+
method=method,
|
|
231
|
+
url=url,
|
|
232
|
+
params=params,
|
|
233
|
+
json=json_body,
|
|
234
|
+
headers=headers,
|
|
235
|
+
)
|
|
236
|
+
|
|
237
|
+
return _process_response(model, response)
|
|
238
|
+
|
|
239
|
+
|
|
240
|
+
def _base_request_sync(
|
|
241
|
+
model: type[T],
|
|
242
|
+
url: str,
|
|
243
|
+
method: str,
|
|
244
|
+
*,
|
|
245
|
+
params: dict[str, Any] | None = None,
|
|
246
|
+
body: Any | None = None,
|
|
247
|
+
api_key: str | None = None,
|
|
248
|
+
client: httpx.Client | None = None,
|
|
249
|
+
) -> tuple[T, int, str]:
|
|
250
|
+
headers: dict[str, str] = {}
|
|
251
|
+
if method in ("POST", "PATCH"):
|
|
252
|
+
headers["Content-Type"] = "application/json"
|
|
253
|
+
if api_key:
|
|
254
|
+
headers["Authorization"] = f"Bearer {api_key}"
|
|
255
|
+
|
|
256
|
+
json_body = body if method in ("POST", "PATCH") else None
|
|
257
|
+
|
|
258
|
+
if client is not None:
|
|
259
|
+
response = client.request(
|
|
260
|
+
method=method,
|
|
261
|
+
url=url,
|
|
262
|
+
params=params,
|
|
263
|
+
json=json_body,
|
|
264
|
+
headers=headers,
|
|
265
|
+
)
|
|
266
|
+
else:
|
|
267
|
+
with httpx.Client() as temp_client:
|
|
268
|
+
response = temp_client.request(
|
|
269
|
+
method=method,
|
|
270
|
+
url=url,
|
|
271
|
+
params=params,
|
|
272
|
+
json=json_body,
|
|
273
|
+
headers=headers,
|
|
274
|
+
)
|
|
275
|
+
|
|
276
|
+
return _process_response(model, response)
|
|
277
|
+
|
|
278
|
+
|
|
279
|
+
def _process_response(model: type[T], response: httpx.Response) -> tuple[T, int, str]:
|
|
280
|
+
status = response.status_code
|
|
281
|
+
status_text = response.reason_phrase
|
|
282
|
+
|
|
283
|
+
if not response.is_success:
|
|
284
|
+
raise FetchError(response.text, status, status_text)
|
|
285
|
+
|
|
286
|
+
try:
|
|
287
|
+
raw_data = json.loads(response.text, object_hook=bigint_reviver)
|
|
288
|
+
data = model.model_validate(raw_data)
|
|
289
|
+
return (data, status, status_text)
|
|
290
|
+
except ValidationError as e:
|
|
291
|
+
raise ValueError(prettify_validation_error(e)) from e
|
|
292
|
+
|
|
293
|
+
|
|
294
|
+
def _bcs_encode_string(s: str) -> bytes:
|
|
295
|
+
serializer = Serializer()
|
|
296
|
+
serializer.str(s)
|
|
297
|
+
return serializer.output()
|
|
298
|
+
|
|
299
|
+
|
|
300
|
+
def _get_subaccount_seed_bytes(owner_addr: AccountAddress, seed: str) -> bytes:
|
|
301
|
+
# TODO: Is this the best way to concatenate/serialize SubaccountSeed?
|
|
302
|
+
return bytes(owner_addr.address) + _bcs_encode_string(seed)
|
|
303
|
+
|
|
304
|
+
|
|
305
|
+
def get_market_addr(name: str, perp_engine_global_addr: str) -> str:
|
|
306
|
+
creator = AccountAddress.from_str(perp_engine_global_addr)
|
|
307
|
+
market_name_bytes = _bcs_encode_string(name)
|
|
308
|
+
return str(AccountAddress.for_named_object(creator, market_name_bytes))
|
|
309
|
+
|
|
310
|
+
|
|
311
|
+
def get_primary_subaccount_addr(
|
|
312
|
+
addr: AccountAddress | str,
|
|
313
|
+
compat_version: CompatVersion,
|
|
314
|
+
package_addr: AccountAddress | str,
|
|
315
|
+
) -> str:
|
|
316
|
+
_ = compat_version
|
|
317
|
+
account = AccountAddress.from_str(addr) if isinstance(addr, str) else addr
|
|
318
|
+
package_address = (
|
|
319
|
+
AccountAddress.from_str(package_addr) if isinstance(package_addr, str) else package_addr
|
|
320
|
+
)
|
|
321
|
+
deriver = AccountAddress.for_named_object(package_address, b"GlobalSubaccountManager")
|
|
322
|
+
seed_bytes = _get_subaccount_seed_bytes(account, "primary_subaccount")
|
|
323
|
+
result = str(AccountAddress.for_named_object(deriver, seed_bytes))
|
|
324
|
+
logger.debug(
|
|
325
|
+
"Deriving primary subaccount address for account %s, package %s, deriver %s, got: %s",
|
|
326
|
+
account,
|
|
327
|
+
package_address,
|
|
328
|
+
deriver,
|
|
329
|
+
result,
|
|
330
|
+
)
|
|
331
|
+
return result
|
|
332
|
+
|
|
333
|
+
|
|
334
|
+
def get_trading_competition_subaccount_addr(addr: AccountAddress | str) -> str:
|
|
335
|
+
account = AccountAddress.from_str(addr) if isinstance(addr, str) else addr
|
|
336
|
+
return str(AccountAddress.for_named_object(account, b"trading_competition"))
|
|
337
|
+
|
|
338
|
+
|
|
339
|
+
def get_vault_share_address(vault_address: str) -> str:
|
|
340
|
+
creator = AccountAddress.from_str(vault_address)
|
|
341
|
+
return str(AccountAddress.for_named_object(creator, b"vault_share_asset"))
|
|
342
|
+
|
|
343
|
+
|
|
344
|
+
def round_to_tick_size(price: float, tick_size: int, px_decimals: int, round_up: bool) -> float:
|
|
345
|
+
if price == 0:
|
|
346
|
+
return 0.0
|
|
347
|
+
denormalized = price * (10**px_decimals)
|
|
348
|
+
if round_up:
|
|
349
|
+
rounded = math.ceil(denormalized / tick_size) * tick_size
|
|
350
|
+
else:
|
|
351
|
+
rounded = math.floor(denormalized / tick_size) * tick_size
|
|
352
|
+
return round(rounded / (10**px_decimals), px_decimals)
|
|
353
|
+
|
|
354
|
+
|
|
355
|
+
def round_to_valid_price(price: float, tick_size: int, px_decimals: int) -> float:
|
|
356
|
+
"""Round a price to the nearest valid tick size using standard rounding."""
|
|
357
|
+
if price == 0:
|
|
358
|
+
return 0.0
|
|
359
|
+
denormalized = price * (10**px_decimals)
|
|
360
|
+
rounded = round(denormalized / tick_size) * tick_size
|
|
361
|
+
return round(rounded / (10**px_decimals), px_decimals)
|
|
362
|
+
|
|
363
|
+
|
|
364
|
+
def round_to_valid_order_size(
|
|
365
|
+
order_size: float,
|
|
366
|
+
lot_size: int,
|
|
367
|
+
sz_decimals: int,
|
|
368
|
+
min_size: int,
|
|
369
|
+
) -> float:
|
|
370
|
+
"""Round an order size to the nearest valid lot size, enforcing minimum size."""
|
|
371
|
+
if order_size == 0:
|
|
372
|
+
return 0.0
|
|
373
|
+
|
|
374
|
+
normalized_min_size = min_size / (10**sz_decimals)
|
|
375
|
+
if order_size < normalized_min_size:
|
|
376
|
+
return normalized_min_size
|
|
377
|
+
|
|
378
|
+
denormalized = order_size * (10**sz_decimals)
|
|
379
|
+
rounded = round(denormalized / lot_size) * lot_size
|
|
380
|
+
return round(rounded / (10**sz_decimals), sz_decimals)
|
|
381
|
+
|
|
382
|
+
|
|
383
|
+
def amount_to_chain_units(amount: float, decimals: int = 6) -> int:
|
|
384
|
+
"""Convert a decimal amount to chain units (e.g., 5.67 USDC -> 5670000)."""
|
|
385
|
+
return int(amount * (10**decimals))
|
|
386
|
+
|
|
387
|
+
|
|
388
|
+
def chain_units_to_amount(chain_units: int, decimals: int = 6) -> float:
|
|
389
|
+
"""Convert chain units to a decimal amount (e.g., 5670000 -> 5.67)."""
|
|
390
|
+
return chain_units / (10**decimals)
|
|
391
|
+
|
|
392
|
+
|
|
393
|
+
def extract_vault_address_from_create_tx(create_vault_tx: dict[str, Any]) -> str:
|
|
394
|
+
vault_address: str | dict[str, str] | None = None
|
|
395
|
+
|
|
396
|
+
events = create_vault_tx.get("events")
|
|
397
|
+
if isinstance(events, list):
|
|
398
|
+
events_list = cast("list[Any]", events)
|
|
399
|
+
for event in events_list:
|
|
400
|
+
if isinstance(event, dict):
|
|
401
|
+
event_dict = cast("dict[str, Any]", event)
|
|
402
|
+
event_type = event_dict.get("type", "")
|
|
403
|
+
if isinstance(event_type, str) and "::vault::VaultCreatedEvent" in event_type:
|
|
404
|
+
event_data = event_dict.get("data", {})
|
|
405
|
+
if isinstance(event_data, dict):
|
|
406
|
+
data_dict = cast("dict[str, Any]", event_data)
|
|
407
|
+
vault_val = data_dict.get("vault")
|
|
408
|
+
if isinstance(vault_val, str):
|
|
409
|
+
vault_address = vault_val
|
|
410
|
+
elif isinstance(vault_val, dict):
|
|
411
|
+
vault_address = cast("dict[str, str]", vault_val)
|
|
412
|
+
break
|
|
413
|
+
|
|
414
|
+
if vault_address is None:
|
|
415
|
+
raise ValueError("Unable to extract vault address from transaction")
|
|
416
|
+
|
|
417
|
+
if isinstance(vault_address, dict) and "inner" in vault_address:
|
|
418
|
+
return vault_address["inner"]
|
|
419
|
+
|
|
420
|
+
if isinstance(vault_address, str):
|
|
421
|
+
return vault_address
|
|
422
|
+
|
|
423
|
+
raise ValueError("Unable to extract vault address from transaction")
|
|
424
|
+
|
|
425
|
+
|
|
426
|
+
def generate_random_replay_protection_nonce() -> int | None:
|
|
427
|
+
buf = [secrets.randbits(32), secrets.randbits(32)]
|
|
428
|
+
|
|
429
|
+
if buf[0] == 0 or buf[1] == 0:
|
|
430
|
+
return None
|
|
431
|
+
|
|
432
|
+
return (buf[0] << 32) | buf[1]
|
decibel/_version.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = "0.1.0"
|
decibel/abi/__init__.py
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
from decibel.abi._registry import (
|
|
2
|
+
AbiRegistry,
|
|
3
|
+
get_abi_data,
|
|
4
|
+
get_default_abi_data,
|
|
5
|
+
)
|
|
6
|
+
from decibel.abi._types import (
|
|
7
|
+
ABIData,
|
|
8
|
+
ABIErrorEntry,
|
|
9
|
+
ABISummary,
|
|
10
|
+
MoveFunction,
|
|
11
|
+
MoveFunctionId,
|
|
12
|
+
)
|
|
13
|
+
|
|
14
|
+
__all__ = [
|
|
15
|
+
"ABIData",
|
|
16
|
+
"ABIErrorEntry",
|
|
17
|
+
"ABISummary",
|
|
18
|
+
"AbiRegistry",
|
|
19
|
+
"MoveFunction",
|
|
20
|
+
"MoveFunctionId",
|
|
21
|
+
"get_abi_data",
|
|
22
|
+
"get_default_abi_data",
|
|
23
|
+
]
|
decibel/abi/__main__.py
ADDED
decibel/abi/_registry.py
ADDED
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import importlib.resources
|
|
4
|
+
import json
|
|
5
|
+
import warnings
|
|
6
|
+
from functools import lru_cache
|
|
7
|
+
from typing import TYPE_CHECKING
|
|
8
|
+
|
|
9
|
+
from decibel.abi._types import ABIData, MoveFunction, MoveFunctionId
|
|
10
|
+
|
|
11
|
+
if TYPE_CHECKING:
|
|
12
|
+
from importlib.abc import Traversable
|
|
13
|
+
|
|
14
|
+
__all__ = [
|
|
15
|
+
"AbiRegistry",
|
|
16
|
+
"get_abi_data",
|
|
17
|
+
"get_default_abi_data",
|
|
18
|
+
]
|
|
19
|
+
|
|
20
|
+
CHAIN_ID_NETNA = 208
|
|
21
|
+
CHAIN_ID_TESTNET = 2
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
@lru_cache(maxsize=4)
|
|
25
|
+
def _load_abi_json(filename: str) -> ABIData:
|
|
26
|
+
json_dir: Traversable = importlib.resources.files("decibel.abi") / "json"
|
|
27
|
+
json_file = json_dir / filename
|
|
28
|
+
with json_file.open("r") as f:
|
|
29
|
+
data = json.load(f)
|
|
30
|
+
return ABIData.model_validate(data)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def get_abi_data(chain_id: int | None) -> ABIData:
|
|
34
|
+
if chain_id == CHAIN_ID_NETNA:
|
|
35
|
+
return _load_abi_json("netna.json")
|
|
36
|
+
elif chain_id == CHAIN_ID_TESTNET:
|
|
37
|
+
return _load_abi_json("testnet.json")
|
|
38
|
+
else:
|
|
39
|
+
warnings.warn(
|
|
40
|
+
f"Unknown chain_id {chain_id}, falling back to NETNA ABIs",
|
|
41
|
+
stacklevel=2,
|
|
42
|
+
)
|
|
43
|
+
return _load_abi_json("netna.json")
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def get_default_abi_data() -> ABIData:
|
|
47
|
+
return _load_abi_json("netna.json")
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
class AbiRegistry:
|
|
51
|
+
def __init__(self, chain_id: int | None = None) -> None:
|
|
52
|
+
self._chain_id = chain_id
|
|
53
|
+
self._abi_data: ABIData | None = None
|
|
54
|
+
|
|
55
|
+
@property
|
|
56
|
+
def abi_data(self) -> ABIData:
|
|
57
|
+
if self._abi_data is None:
|
|
58
|
+
if self._chain_id is None:
|
|
59
|
+
self._abi_data = get_default_abi_data()
|
|
60
|
+
else:
|
|
61
|
+
self._abi_data = get_abi_data(self._chain_id)
|
|
62
|
+
return self._abi_data
|
|
63
|
+
|
|
64
|
+
@property
|
|
65
|
+
def package_address(self) -> str:
|
|
66
|
+
return self.abi_data.package_address
|
|
67
|
+
|
|
68
|
+
@property
|
|
69
|
+
def modules(self) -> list[str]:
|
|
70
|
+
return self.abi_data.modules
|
|
71
|
+
|
|
72
|
+
def get_function(self, function_id: MoveFunctionId) -> MoveFunction | None:
|
|
73
|
+
return self.abi_data.abis.get(function_id)
|
|
74
|
+
|
|
75
|
+
def get_all_functions(self) -> dict[MoveFunctionId, MoveFunction]:
|
|
76
|
+
return self.abi_data.abis
|
|
77
|
+
|
|
78
|
+
def get_entry_functions(self) -> dict[MoveFunctionId, MoveFunction]:
|
|
79
|
+
return {fid: func for fid, func in self.abi_data.abis.items() if func.is_entry}
|
|
80
|
+
|
|
81
|
+
def get_view_functions(self) -> dict[MoveFunctionId, MoveFunction]:
|
|
82
|
+
return {fid: func for fid, func in self.abi_data.abis.items() if func.is_view}
|
|
83
|
+
|
|
84
|
+
def get_module_functions(self, module_name: str) -> dict[MoveFunctionId, MoveFunction]:
|
|
85
|
+
pattern = f"::{module_name}::"
|
|
86
|
+
return {fid: func for fid, func in self.abi_data.abis.items() if pattern in fid}
|
|
87
|
+
|
|
88
|
+
def has_function(self, function_id: MoveFunctionId) -> bool:
|
|
89
|
+
return function_id in self.abi_data.abis
|
decibel/abi/_types.py
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from pydantic import BaseModel, ConfigDict, Field
|
|
4
|
+
|
|
5
|
+
__all__ = [
|
|
6
|
+
"ABIData",
|
|
7
|
+
"ABIErrorEntry",
|
|
8
|
+
"ABISummary",
|
|
9
|
+
"MoveFunction",
|
|
10
|
+
"MoveFunctionId",
|
|
11
|
+
]
|
|
12
|
+
|
|
13
|
+
MoveFunctionId = str
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class MoveFunction(BaseModel):
|
|
17
|
+
model_config = ConfigDict(populate_by_name=True)
|
|
18
|
+
|
|
19
|
+
name: str
|
|
20
|
+
visibility: str
|
|
21
|
+
is_entry: bool
|
|
22
|
+
is_view: bool
|
|
23
|
+
generic_type_params: list[dict[str, object]]
|
|
24
|
+
params: list[str]
|
|
25
|
+
return_: list[str] = Field(alias="return")
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class ABIErrorEntry(BaseModel):
|
|
29
|
+
model_config = ConfigDict(populate_by_name=True)
|
|
30
|
+
|
|
31
|
+
module: str
|
|
32
|
+
function: str
|
|
33
|
+
error: str
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class ABISummary(BaseModel):
|
|
37
|
+
model_config = ConfigDict(populate_by_name=True)
|
|
38
|
+
|
|
39
|
+
total_modules: int = Field(alias="totalModules")
|
|
40
|
+
total_functions: int = Field(alias="totalFunctions")
|
|
41
|
+
successful: int
|
|
42
|
+
failed: int
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class ABIData(BaseModel):
|
|
46
|
+
model_config = ConfigDict(populate_by_name=True)
|
|
47
|
+
|
|
48
|
+
package_address: str = Field(alias="packageAddress")
|
|
49
|
+
network: str
|
|
50
|
+
fullnode_url: str = Field(alias="fullnodeUrl")
|
|
51
|
+
fetched_at: str = Field(alias="fetchedAt")
|
|
52
|
+
abis: dict[MoveFunctionId, MoveFunction]
|
|
53
|
+
errors: list[ABIErrorEntry]
|
|
54
|
+
summary: ABISummary
|
|
55
|
+
modules: list[str]
|