bittensor-cli 8.4.3__py3-none-any.whl → 9.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. bittensor_cli/__init__.py +1 -1
  2. bittensor_cli/cli.py +1827 -1392
  3. bittensor_cli/src/__init__.py +623 -168
  4. bittensor_cli/src/bittensor/balances.py +41 -8
  5. bittensor_cli/src/bittensor/chain_data.py +557 -428
  6. bittensor_cli/src/bittensor/extrinsics/registration.py +129 -23
  7. bittensor_cli/src/bittensor/extrinsics/root.py +3 -3
  8. bittensor_cli/src/bittensor/extrinsics/transfer.py +6 -11
  9. bittensor_cli/src/bittensor/minigraph.py +46 -8
  10. bittensor_cli/src/bittensor/subtensor_interface.py +567 -250
  11. bittensor_cli/src/bittensor/utils.py +399 -25
  12. bittensor_cli/src/commands/stake/__init__.py +154 -0
  13. bittensor_cli/src/commands/stake/add.py +625 -0
  14. bittensor_cli/src/commands/stake/children_hotkeys.py +103 -75
  15. bittensor_cli/src/commands/stake/list.py +687 -0
  16. bittensor_cli/src/commands/stake/move.py +1000 -0
  17. bittensor_cli/src/commands/stake/remove.py +1146 -0
  18. bittensor_cli/src/commands/subnets/__init__.py +0 -0
  19. bittensor_cli/src/commands/subnets/price.py +867 -0
  20. bittensor_cli/src/commands/subnets/subnets.py +2028 -0
  21. bittensor_cli/src/commands/sudo.py +554 -12
  22. bittensor_cli/src/commands/wallets.py +225 -531
  23. bittensor_cli/src/commands/weights.py +2 -2
  24. {bittensor_cli-8.4.3.dist-info → bittensor_cli-9.0.0.dist-info}/METADATA +7 -4
  25. bittensor_cli-9.0.0.dist-info/RECORD +34 -0
  26. bittensor_cli/src/bittensor/async_substrate_interface.py +0 -2748
  27. bittensor_cli/src/commands/root.py +0 -1752
  28. bittensor_cli/src/commands/stake/stake.py +0 -1448
  29. bittensor_cli/src/commands/subnets.py +0 -897
  30. bittensor_cli-8.4.3.dist-info/RECORD +0 -31
  31. {bittensor_cli-8.4.3.dist-info → bittensor_cli-9.0.0.dist-info}/WHEEL +0 -0
  32. {bittensor_cli-8.4.3.dist-info → bittensor_cli-9.0.0.dist-info}/entry_points.txt +0 -0
  33. {bittensor_cli-8.4.3.dist-info → bittensor_cli-9.0.0.dist-info}/top_level.txt +0 -0
@@ -1,2748 +0,0 @@
1
- import asyncio
2
- import json
3
- import random
4
- from collections import defaultdict
5
- from dataclasses import dataclass
6
- from hashlib import blake2b
7
- from typing import Optional, Any, Union, Callable, Awaitable, cast, TYPE_CHECKING
8
-
9
- from async_property import async_property
10
- from bt_decode import PortableRegistry, decode as decode_by_type_string, MetadataV15
11
- from bittensor_wallet import Keypair
12
- from scalecodec import GenericExtrinsic
13
- from scalecodec.base import ScaleBytes, ScaleType, RuntimeConfigurationObject
14
- from scalecodec.type_registry import load_type_registry_preset
15
- from scalecodec.types import GenericCall
16
- from substrateinterface.exceptions import (
17
- SubstrateRequestException,
18
- ExtrinsicNotFound,
19
- BlockNotFound,
20
- )
21
- from substrateinterface.storage import StorageKey
22
- from websockets.asyncio.client import connect
23
- from websockets.exceptions import ConnectionClosed
24
-
25
- from bittensor_cli.src.bittensor.utils import hex_to_bytes
26
-
27
- if TYPE_CHECKING:
28
- from websockets.asyncio.client import ClientConnection
29
-
30
- ResultHandler = Callable[[dict, Any], Awaitable[tuple[dict, bool]]]
31
-
32
-
33
- class TimeoutException(Exception):
34
- pass
35
-
36
-
37
- def timeout_handler(signum, frame):
38
- raise TimeoutException("Operation timed out")
39
-
40
-
41
- class ExtrinsicReceipt:
42
- """
43
- Object containing information of submitted extrinsic. Block hash where extrinsic is included is required
44
- when retrieving triggered events or determine if extrinsic was successful
45
- """
46
-
47
- def __init__(
48
- self,
49
- substrate: "AsyncSubstrateInterface",
50
- extrinsic_hash: Optional[str] = None,
51
- block_hash: Optional[str] = None,
52
- block_number: Optional[int] = None,
53
- extrinsic_idx: Optional[int] = None,
54
- finalized=None,
55
- ):
56
- """
57
- Object containing information of submitted extrinsic. Block hash where extrinsic is included is required
58
- when retrieving triggered events or determine if extrinsic was successful
59
-
60
- Parameters
61
- ----------
62
- substrate
63
- extrinsic_hash
64
- block_hash
65
- finalized
66
- """
67
- self.substrate = substrate
68
- self.extrinsic_hash = extrinsic_hash
69
- self.block_hash = block_hash
70
- self.block_number = block_number
71
- self.finalized = finalized
72
-
73
- self.__extrinsic_idx = extrinsic_idx
74
- self.__extrinsic = None
75
-
76
- self.__triggered_events: Optional[list] = None
77
- self.__is_success: Optional[bool] = None
78
- self.__error_message = None
79
- self.__weight = None
80
- self.__total_fee_amount = None
81
-
82
- async def get_extrinsic_identifier(self) -> str:
83
- """
84
- Returns the on-chain identifier for this extrinsic in format "[block_number]-[extrinsic_idx]" e.g. 134324-2
85
- Returns
86
- -------
87
- str
88
- """
89
- if self.block_number is None:
90
- if self.block_hash is None:
91
- raise ValueError(
92
- "Cannot create extrinsic identifier: block_hash is not set"
93
- )
94
-
95
- self.block_number = await self.substrate.get_block_number(self.block_hash)
96
-
97
- if self.block_number is None:
98
- raise ValueError(
99
- "Cannot create extrinsic identifier: unknown block_hash"
100
- )
101
-
102
- return f"{self.block_number}-{await self.extrinsic_idx}"
103
-
104
- async def retrieve_extrinsic(self):
105
- if not self.block_hash:
106
- raise ValueError(
107
- "ExtrinsicReceipt can't retrieve events because it's unknown which block_hash it is "
108
- "included, manually set block_hash or use `wait_for_inclusion` when sending extrinsic"
109
- )
110
- # Determine extrinsic idx
111
-
112
- block = await self.substrate.get_block(block_hash=self.block_hash)
113
-
114
- extrinsics = block["extrinsics"]
115
-
116
- if len(extrinsics) > 0:
117
- if self.__extrinsic_idx is None:
118
- self.__extrinsic_idx = self.__get_extrinsic_index(
119
- block_extrinsics=extrinsics, extrinsic_hash=self.extrinsic_hash
120
- )
121
-
122
- if self.__extrinsic_idx >= len(extrinsics):
123
- raise ExtrinsicNotFound()
124
-
125
- self.__extrinsic = extrinsics[self.__extrinsic_idx]
126
-
127
- @async_property
128
- async def extrinsic_idx(self) -> int:
129
- """
130
- Retrieves the index of this extrinsic in containing block
131
-
132
- Returns
133
- -------
134
- int
135
- """
136
- if self.__extrinsic_idx is None:
137
- await self.retrieve_extrinsic()
138
- return self.__extrinsic_idx
139
-
140
- @async_property
141
- async def triggered_events(self) -> list:
142
- """
143
- Gets triggered events for submitted extrinsic. block_hash where extrinsic is included is required, manually
144
- set block_hash or use `wait_for_inclusion` when submitting extrinsic
145
-
146
- Returns
147
- -------
148
- list
149
- """
150
- if self.__triggered_events is None:
151
- if not self.block_hash:
152
- raise ValueError(
153
- "ExtrinsicReceipt can't retrieve events because it's unknown which block_hash it is "
154
- "included, manually set block_hash or use `wait_for_inclusion` when sending extrinsic"
155
- )
156
-
157
- if await self.extrinsic_idx is None:
158
- await self.retrieve_extrinsic()
159
-
160
- self.__triggered_events = []
161
-
162
- for event in await self.substrate.get_events(block_hash=self.block_hash):
163
- if event["extrinsic_idx"] == await self.extrinsic_idx:
164
- self.__triggered_events.append(event)
165
-
166
- return cast(list, self.__triggered_events)
167
-
168
- async def process_events(self):
169
- if await self.triggered_events:
170
- self.__total_fee_amount = 0
171
-
172
- # Process fees
173
- has_transaction_fee_paid_event = False
174
-
175
- for event in await self.triggered_events:
176
- if (
177
- event["event"]["module_id"] == "TransactionPayment"
178
- and event["event"]["event_id"] == "TransactionFeePaid"
179
- ):
180
- self.__total_fee_amount = event["event"]["attributes"]["actual_fee"]
181
- has_transaction_fee_paid_event = True
182
-
183
- # Process other events
184
- for event in await self.triggered_events:
185
- # Check events
186
- if (
187
- event["event"]["module_id"] == "System"
188
- and event["event"]["event_id"] == "ExtrinsicSuccess"
189
- ):
190
- self.__is_success = True
191
- self.__error_message = None
192
-
193
- if "dispatch_info" in event["event"]["attributes"]:
194
- self.__weight = event["event"]["attributes"]["dispatch_info"][
195
- "weight"
196
- ]
197
- else:
198
- # Backwards compatibility
199
- self.__weight = event["event"]["attributes"]["weight"]
200
-
201
- elif (
202
- event["event"]["module_id"] == "System"
203
- and event["event"]["event_id"] == "ExtrinsicFailed"
204
- ):
205
- self.__is_success = False
206
-
207
- dispatch_info = event["event"]["attributes"]["dispatch_info"]
208
- dispatch_error = event["event"]["attributes"]["dispatch_error"]
209
-
210
- self.__weight = dispatch_info["weight"]
211
-
212
- if "Module" in dispatch_error:
213
- module_index = dispatch_error["Module"][0]["index"]
214
- error_index = int.from_bytes(
215
- bytes(dispatch_error["Module"][0]["error"]),
216
- byteorder="little",
217
- signed=False,
218
- )
219
-
220
- if isinstance(error_index, str):
221
- # Actual error index is first u8 in new [u8; 4] format
222
- error_index = int(error_index[2:4], 16)
223
- module_error = self.substrate.metadata.get_module_error(
224
- module_index=module_index, error_index=error_index
225
- )
226
- self.__error_message = {
227
- "type": "Module",
228
- "name": module_error.name,
229
- "docs": module_error.docs,
230
- }
231
- elif "BadOrigin" in dispatch_error:
232
- self.__error_message = {
233
- "type": "System",
234
- "name": "BadOrigin",
235
- "docs": "Bad origin",
236
- }
237
- elif "CannotLookup" in dispatch_error:
238
- self.__error_message = {
239
- "type": "System",
240
- "name": "CannotLookup",
241
- "docs": "Cannot lookup",
242
- }
243
- elif "Other" in dispatch_error:
244
- self.__error_message = {
245
- "type": "System",
246
- "name": "Other",
247
- "docs": "Unspecified error occurred",
248
- }
249
-
250
- elif not has_transaction_fee_paid_event:
251
- if (
252
- event["event"]["module_id"] == "Treasury"
253
- and event["event"]["event_id"] == "Deposit"
254
- ):
255
- self.__total_fee_amount += event["event"]["attributes"]["value"]
256
- elif (
257
- event["event"]["module_id"] == "Balances"
258
- and event["event"]["event_id"] == "Deposit"
259
- ):
260
- self.__total_fee_amount += event.value["attributes"]["amount"]
261
-
262
- @async_property
263
- async def is_success(self) -> bool:
264
- """
265
- Returns `True` if `ExtrinsicSuccess` event is triggered, `False` in case of `ExtrinsicFailed`
266
- In case of False `error_message` will contain more details about the error
267
-
268
-
269
- Returns
270
- -------
271
- bool
272
- """
273
- if self.__is_success is None:
274
- await self.process_events()
275
-
276
- return cast(bool, self.__is_success)
277
-
278
- @async_property
279
- async def error_message(self) -> Optional[dict]:
280
- """
281
- Returns the error message if the extrinsic failed in format e.g.:
282
-
283
- `{'type': 'System', 'name': 'BadOrigin', 'docs': 'Bad origin'}`
284
-
285
- Returns
286
- -------
287
- dict
288
- """
289
- if self.__error_message is None:
290
- if await self.is_success:
291
- return None
292
- await self.process_events()
293
- return self.__error_message
294
-
295
- @async_property
296
- async def weight(self) -> Union[int, dict]:
297
- """
298
- Contains the actual weight when executing this extrinsic
299
-
300
- Returns
301
- -------
302
- int (WeightV1) or dict (WeightV2)
303
- """
304
- if self.__weight is None:
305
- await self.process_events()
306
- return self.__weight
307
-
308
- @async_property
309
- async def total_fee_amount(self) -> int:
310
- """
311
- Contains the total fee costs deducted when executing this extrinsic. This includes fee for the validator (
312
- (`Balances.Deposit` event) and the fee deposited for the treasury (`Treasury.Deposit` event)
313
-
314
- Returns
315
- -------
316
- int
317
- """
318
- if self.__total_fee_amount is None:
319
- await self.process_events()
320
- return cast(int, self.__total_fee_amount)
321
-
322
- # Helper functions
323
- @staticmethod
324
- def __get_extrinsic_index(block_extrinsics: list, extrinsic_hash: str) -> int:
325
- """
326
- Returns the index of a provided extrinsic
327
- """
328
- for idx, extrinsic in enumerate(block_extrinsics):
329
- if (
330
- extrinsic.extrinsic_hash
331
- and f"0x{extrinsic.extrinsic_hash.hex()}" == extrinsic_hash
332
- ):
333
- return idx
334
- raise ExtrinsicNotFound()
335
-
336
- # Backwards compatibility methods
337
- def __getitem__(self, item):
338
- return getattr(self, item)
339
-
340
- def __iter__(self):
341
- for item in self.__dict__.items():
342
- yield item
343
-
344
- def get(self, name):
345
- return self[name]
346
-
347
-
348
- class QueryMapResult:
349
- def __init__(
350
- self,
351
- records: list,
352
- page_size: int,
353
- substrate: "AsyncSubstrateInterface",
354
- module: Optional[str] = None,
355
- storage_function: Optional[str] = None,
356
- params: Optional[list] = None,
357
- block_hash: Optional[str] = None,
358
- last_key: Optional[str] = None,
359
- max_results: Optional[int] = None,
360
- ignore_decoding_errors: bool = False,
361
- ):
362
- self.records = records
363
- self.page_size = page_size
364
- self.module = module
365
- self.storage_function = storage_function
366
- self.block_hash = block_hash
367
- self.substrate = substrate
368
- self.last_key = last_key
369
- self.max_results = max_results
370
- self.params = params
371
- self.ignore_decoding_errors = ignore_decoding_errors
372
- self.loading_complete = False
373
- self._buffer = iter(self.records) # Initialize the buffer with initial records
374
-
375
- async def retrieve_next_page(self, start_key) -> list:
376
- result = await self.substrate.query_map(
377
- module=self.module,
378
- storage_function=self.storage_function,
379
- params=self.params,
380
- page_size=self.page_size,
381
- block_hash=self.block_hash,
382
- start_key=start_key,
383
- max_results=self.max_results,
384
- ignore_decoding_errors=self.ignore_decoding_errors,
385
- )
386
-
387
- # Update last key from new result set to use as offset for next page
388
- self.last_key = result.last_key
389
- return result.records
390
-
391
- def __aiter__(self):
392
- return self
393
-
394
- async def __anext__(self):
395
- try:
396
- # Try to get the next record from the buffer
397
- return next(self._buffer)
398
- except StopIteration:
399
- # If no more records in the buffer, try to fetch the next page
400
- if self.loading_complete:
401
- raise StopAsyncIteration
402
-
403
- next_page = await self.retrieve_next_page(self.last_key)
404
- if not next_page:
405
- self.loading_complete = True
406
- raise StopAsyncIteration
407
-
408
- # Update the buffer with the newly fetched records
409
- self._buffer = iter(next_page)
410
- return next(self._buffer)
411
-
412
- def __getitem__(self, item):
413
- return self.records[item]
414
-
415
-
416
- @dataclass
417
- class Preprocessed:
418
- queryable: str
419
- method: str
420
- params: list
421
- value_scale_type: str
422
- storage_item: ScaleType
423
-
424
-
425
- class RuntimeCache:
426
- blocks: dict[int, "Runtime"]
427
- block_hashes: dict[str, "Runtime"]
428
-
429
- def __init__(self):
430
- self.blocks = {}
431
- self.block_hashes = {}
432
-
433
- def add_item(
434
- self, block: Optional[int], block_hash: Optional[str], runtime: "Runtime"
435
- ):
436
- if block is not None:
437
- self.blocks[block] = runtime
438
- if block_hash is not None:
439
- self.block_hashes[block_hash] = runtime
440
-
441
- def retrieve(
442
- self, block: Optional[int] = None, block_hash: Optional[str] = None
443
- ) -> Optional["Runtime"]:
444
- if block is not None:
445
- return self.blocks.get(block)
446
- elif block_hash is not None:
447
- return self.block_hashes.get(block_hash)
448
- else:
449
- return None
450
-
451
-
452
- class Runtime:
453
- block_hash: str
454
- block_id: int
455
- runtime_version = None
456
- transaction_version = None
457
- cache_region = None
458
- metadata = None
459
- type_registry_preset = None
460
-
461
- def __init__(self, chain, runtime_config, metadata, type_registry):
462
- self.runtime_config = RuntimeConfigurationObject()
463
- self.config = {}
464
- self.chain = chain
465
- self.type_registry = type_registry
466
- self.runtime_config = runtime_config
467
- self.metadata = metadata
468
-
469
- def __str__(self):
470
- return f"Runtime: {self.chain} | {self.config}"
471
-
472
- @property
473
- def implements_scaleinfo(self) -> bool:
474
- """
475
- Returns True if current runtime implementation a `PortableRegistry` (`MetadataV14` and higher)
476
- """
477
- if self.metadata:
478
- return self.metadata.portable_registry is not None
479
- else:
480
- return False
481
-
482
- def reload_type_registry(
483
- self, use_remote_preset: bool = True, auto_discover: bool = True
484
- ):
485
- """
486
- Reload type registry and preset used to instantiate the SubstrateInterface object. Useful to periodically apply
487
- changes in type definitions when a runtime upgrade occurred
488
-
489
- Parameters
490
- ----------
491
- use_remote_preset: When True preset is downloaded from Github master, otherwise use files from local installed
492
- scalecodec package
493
- auto_discover
494
-
495
- Returns
496
- -------
497
-
498
- """
499
- self.runtime_config.clear_type_registry()
500
-
501
- self.runtime_config.implements_scale_info = self.implements_scaleinfo
502
-
503
- # Load metadata types in runtime configuration
504
- self.runtime_config.update_type_registry(load_type_registry_preset(name="core"))
505
- self.apply_type_registry_presets(
506
- use_remote_preset=use_remote_preset, auto_discover=auto_discover
507
- )
508
-
509
- def apply_type_registry_presets(
510
- self,
511
- use_remote_preset: bool = True,
512
- auto_discover: bool = True,
513
- ):
514
- """
515
- Applies type registry presets to the runtime
516
- :param use_remote_preset: bool, whether to use presets from remote
517
- :param auto_discover: bool, whether to use presets from local installed scalecodec package
518
- """
519
- if self.type_registry_preset is not None:
520
- # Load type registry according to preset
521
- type_registry_preset_dict = load_type_registry_preset(
522
- name=self.type_registry_preset, use_remote_preset=use_remote_preset
523
- )
524
-
525
- if not type_registry_preset_dict:
526
- raise ValueError(
527
- f"Type registry preset '{self.type_registry_preset}' not found"
528
- )
529
-
530
- elif auto_discover:
531
- # Try to auto discover type registry preset by chain name
532
- type_registry_name = self.chain.lower().replace(" ", "-")
533
- try:
534
- type_registry_preset_dict = load_type_registry_preset(
535
- type_registry_name
536
- )
537
- self.type_registry_preset = type_registry_name
538
- except ValueError:
539
- type_registry_preset_dict = None
540
-
541
- else:
542
- type_registry_preset_dict = None
543
-
544
- if type_registry_preset_dict:
545
- # Load type registries in runtime configuration
546
- if self.implements_scaleinfo is False:
547
- # Only runtime with no embedded types in metadata need the default set of explicit defined types
548
- self.runtime_config.update_type_registry(
549
- load_type_registry_preset(
550
- "legacy", use_remote_preset=use_remote_preset
551
- )
552
- )
553
-
554
- if self.type_registry_preset != "legacy":
555
- self.runtime_config.update_type_registry(type_registry_preset_dict)
556
-
557
- if self.type_registry:
558
- # Load type registries in runtime configuration
559
- self.runtime_config.update_type_registry(self.type_registry)
560
-
561
-
562
- class RequestManager:
563
- RequestResults = dict[Union[str, int], list[Union[ScaleType, dict]]]
564
-
565
- def __init__(self, payloads):
566
- self.response_map = {}
567
- self.responses = defaultdict(lambda: {"complete": False, "results": []})
568
- self.payloads_count = len(payloads)
569
-
570
- def add_request(self, item_id: int, request_id: Any):
571
- """
572
- Adds an outgoing request to the responses map for later retrieval
573
- """
574
- self.response_map[item_id] = request_id
575
-
576
- def overwrite_request(self, item_id: int, request_id: Any):
577
- """
578
- Overwrites an existing request in the responses map with a new request_id. This is used
579
- for multipart responses that generate a subscription id we need to watch, rather than the initial
580
- request_id.
581
- """
582
- self.response_map[request_id] = self.response_map.pop(item_id)
583
- return request_id
584
-
585
- def add_response(self, item_id: int, response: dict, complete: bool):
586
- """
587
- Maps a response to the request for later retrieval
588
- """
589
- request_id = self.response_map[item_id]
590
- self.responses[request_id]["results"].append(response)
591
- self.responses[request_id]["complete"] = complete
592
-
593
- @property
594
- def is_complete(self) -> bool:
595
- """
596
- Returns whether all requests in the manager have completed
597
- """
598
- return (
599
- all(info["complete"] for info in self.responses.values())
600
- and len(self.responses) == self.payloads_count
601
- )
602
-
603
- def get_results(self) -> RequestResults:
604
- """
605
- Generates a dictionary mapping the requests initiated to the responses received.
606
- """
607
- return {
608
- request_id: info["results"] for request_id, info in self.responses.items()
609
- }
610
-
611
-
612
- class Websocket:
613
- def __init__(
614
- self,
615
- ws_url: str,
616
- max_subscriptions=1024,
617
- max_connections=100,
618
- shutdown_timer=5,
619
- options: Optional[dict] = None,
620
- ):
621
- """
622
- Websocket manager object. Allows for the use of a single websocket connection by multiple
623
- calls.
624
-
625
- :param ws_url: Websocket URL to connect to
626
- :param max_subscriptions: Maximum number of subscriptions per websocket connection
627
- :param max_connections: Maximum number of connections total
628
- :param shutdown_timer: Number of seconds to shut down websocket connection after last use
629
- """
630
- # TODO allow setting max concurrent connections and rpc subscriptions per connection
631
- # TODO reconnection logic
632
- self.ws_url = ws_url
633
- self.ws: Optional["ClientConnection"] = None
634
- self.id = 0
635
- self.max_subscriptions = max_subscriptions
636
- self.max_connections = max_connections
637
- self.shutdown_timer = shutdown_timer
638
- self._received = {}
639
- self._in_use = 0
640
- self._receiving_task = None
641
- self._attempts = 0
642
- self._initialized = False
643
- self._lock = asyncio.Lock()
644
- self._exit_task = None
645
- self._open_subscriptions = 0
646
- self._options = options if options else {}
647
-
648
- async def __aenter__(self):
649
- async with self._lock:
650
- self._in_use += 1
651
- if self._exit_task:
652
- self._exit_task.cancel()
653
- if not self._initialized:
654
- self._initialized = True
655
- self.ws = await asyncio.wait_for(
656
- connect(self.ws_url, **self._options), timeout=10
657
- )
658
- self._receiving_task = asyncio.create_task(self._start_receiving())
659
- return self
660
-
661
- async def __aexit__(self, exc_type, exc_val, exc_tb):
662
- async with self._lock:
663
- self._in_use -= 1
664
- if self._exit_task is not None:
665
- self._exit_task.cancel()
666
- try:
667
- await self._exit_task
668
- except asyncio.CancelledError:
669
- pass
670
- if self._in_use == 0 and self.ws is not None:
671
- self.id = 0
672
- self._open_subscriptions = 0
673
- self._exit_task = asyncio.create_task(self._exit_with_timer())
674
-
675
- async def _exit_with_timer(self):
676
- """
677
- Allows for graceful shutdown of websocket connection after specified number of seconds, allowing
678
- for reuse of the websocket connection.
679
- """
680
- try:
681
- await asyncio.sleep(self.shutdown_timer)
682
- await self.shutdown()
683
- except asyncio.CancelledError:
684
- pass
685
-
686
- async def shutdown(self):
687
- async with self._lock:
688
- try:
689
- self._receiving_task.cancel()
690
- await self._receiving_task
691
- await self.ws.close()
692
- except (AttributeError, asyncio.CancelledError):
693
- pass
694
- self.ws = None
695
- self._initialized = False
696
- self._receiving_task = None
697
- self.id = 0
698
-
699
- async def _recv(self) -> None:
700
- try:
701
- response = json.loads(await self.ws.recv())
702
- async with self._lock:
703
- self._open_subscriptions -= 1
704
- if "id" in response:
705
- self._received[response["id"]] = response
706
- elif "params" in response:
707
- self._received[response["params"]["subscription"]] = response
708
- else:
709
- raise KeyError(response)
710
- except ConnectionClosed:
711
- raise
712
- except KeyError as e:
713
- raise e
714
-
715
- async def _start_receiving(self):
716
- try:
717
- while True:
718
- await self._recv()
719
- except asyncio.CancelledError:
720
- pass
721
- except ConnectionClosed:
722
- # TODO try reconnect, but only if it's needed
723
- raise
724
-
725
- async def send(self, payload: dict) -> int:
726
- """
727
- Sends a payload to the websocket connection.
728
-
729
- :param payload: payload, generate a payload with the AsyncSubstrateInterface.make_payload method
730
- """
731
- async with self._lock:
732
- original_id = self.id
733
- self.id += 1
734
- self._open_subscriptions += 1
735
- try:
736
- await self.ws.send(json.dumps({**payload, **{"id": original_id}}))
737
- return original_id
738
- except ConnectionClosed:
739
- raise
740
-
741
- async def retrieve(self, item_id: int) -> Optional[dict]:
742
- """
743
- Retrieves a single item from received responses dict queue
744
-
745
- :param item_id: id of the item to retrieve
746
-
747
- :return: retrieved item
748
- """
749
- while True:
750
- async with self._lock:
751
- if item_id in self._received:
752
- return self._received.pop(item_id)
753
- await asyncio.sleep(0.1)
754
-
755
-
756
- class AsyncSubstrateInterface:
757
- runtime = None
758
- registry: Optional[PortableRegistry] = None
759
-
760
- def __init__(
761
- self,
762
- chain_endpoint: str,
763
- use_remote_preset=False,
764
- auto_discover=True,
765
- auto_reconnect=True,
766
- ss58_format=None,
767
- type_registry=None,
768
- chain_name=None,
769
- ):
770
- """
771
- The asyncio-compatible version of the subtensor interface commands we use in bittensor
772
- """
773
- self.chain_endpoint = chain_endpoint
774
- self.__chain = chain_name
775
- self.ws = Websocket(
776
- chain_endpoint,
777
- options={
778
- "max_size": 2**32,
779
- "write_limit": 2**16,
780
- },
781
- )
782
- self._lock = asyncio.Lock()
783
- self.last_block_hash: Optional[str] = None
784
- self.config = {
785
- "use_remote_preset": use_remote_preset,
786
- "auto_discover": auto_discover,
787
- "auto_reconnect": auto_reconnect,
788
- "rpc_methods": None,
789
- "strict_scale_decode": True,
790
- }
791
- self.initialized = False
792
- self._forgettable_task = None
793
- self.ss58_format = ss58_format
794
- self.type_registry = type_registry
795
- self.runtime_cache = RuntimeCache()
796
- self.block_id: Optional[int] = None
797
- self.runtime_version = None
798
- self.runtime_config = RuntimeConfigurationObject()
799
- self.__metadata_cache = {}
800
- self.type_registry_preset = None
801
- self.transaction_version = None
802
- self.metadata = None
803
- self.metadata_version_hex = "0x0f000000" # v15
804
-
805
- async def __aenter__(self):
806
- await self.initialize()
807
-
808
- async def initialize(self):
809
- """
810
- Initialize the connection to the chain.
811
- """
812
- async with self._lock:
813
- if not self.initialized:
814
- if not self.__chain:
815
- chain = await self.rpc_request("system_chain", [])
816
- self.__chain = chain.get("result")
817
- self.reload_type_registry()
818
- await asyncio.gather(self.load_registry(), self.init_runtime(None))
819
- self.initialized = True
820
-
821
- async def __aexit__(self, exc_type, exc_val, exc_tb):
822
- pass
823
-
824
- @property
825
- def chain(self):
826
- """
827
- Returns the substrate chain currently associated with object
828
- """
829
- return self.__chain
830
-
831
- async def get_storage_item(self, module: str, storage_function: str):
832
- if not self.metadata:
833
- await self.init_runtime()
834
- metadata_pallet = self.metadata.get_metadata_pallet(module)
835
- storage_item = metadata_pallet.get_storage_function(storage_function)
836
- return storage_item
837
-
838
- async def _get_current_block_hash(
839
- self, block_hash: Optional[str], reuse: bool
840
- ) -> Optional[str]:
841
- if block_hash:
842
- self.last_block_hash = block_hash
843
- return block_hash
844
- elif reuse:
845
- if self.last_block_hash:
846
- return self.last_block_hash
847
- return block_hash
848
-
849
- async def load_registry(self):
850
- metadata_rpc_result = await self.rpc_request(
851
- "state_call",
852
- ["Metadata_metadata_at_version", self.metadata_version_hex],
853
- )
854
- metadata_option_hex_str = metadata_rpc_result["result"]
855
- metadata_option_bytes = bytes.fromhex(metadata_option_hex_str[2:])
856
- metadata_v15 = MetadataV15.decode_from_metadata_option(metadata_option_bytes)
857
- self.registry = PortableRegistry.from_metadata_v15(metadata_v15)
858
-
859
- async def decode_scale(
860
- self, type_string, scale_bytes: bytes, return_scale_obj=False
861
- ):
862
- """
863
- Helper function to decode arbitrary SCALE-bytes (e.g. 0x02000000) according to given RUST type_string
864
- (e.g. BlockNumber). The relevant versioning information of the type (if defined) will be applied if block_hash
865
- is set
866
-
867
- Parameters
868
- ----------
869
- type_string
870
- scale_bytes
871
- block_hash
872
- return_scale_obj: if True the SCALE object itself is returned, otherwise the serialized dict value of the object
873
-
874
- Returns
875
- -------
876
-
877
- """
878
- if scale_bytes == b"\x00":
879
- obj = None
880
- else:
881
- obj = decode_by_type_string(type_string, self.registry, scale_bytes)
882
- return obj
883
-
884
- async def init_runtime(
885
- self, block_hash: Optional[str] = None, block_id: Optional[int] = None
886
- ) -> Runtime:
887
- """
888
- This method is used by all other methods that deals with metadata and types defined in the type registry.
889
- It optionally retrieves the block_hash when block_id is given and sets the applicable metadata for that
890
- block_hash. Also, it applies all the versioned types at the time of the block_hash.
891
-
892
- Because parsing of metadata and type registry is quite heavy, the result will be cached per runtime id.
893
- In the future there could be support for caching backends like Redis to make this cache more persistent.
894
-
895
- :param block_hash: optional block hash, should not be specified if block_id is
896
- :param block_id: optional block id, should not be specified if block_hash is
897
-
898
- :returns: Runtime object
899
- """
900
-
901
- async def get_runtime(block_hash, block_id) -> Runtime:
902
- # Check if runtime state already set to current block
903
- if (
904
- (block_hash and block_hash == self.last_block_hash)
905
- or (block_id and block_id == self.block_id)
906
- ) and self.metadata is not None:
907
- return Runtime(
908
- self.chain,
909
- self.runtime_config,
910
- self.metadata,
911
- self.type_registry,
912
- )
913
-
914
- if block_id is not None:
915
- block_hash = await self.get_block_hash(block_id)
916
-
917
- if not block_hash:
918
- block_hash = await self.get_chain_head()
919
-
920
- self.last_block_hash = block_hash
921
- self.block_id = block_id
922
-
923
- # In fact calls and storage functions are decoded against runtime of previous block, therefor retrieve
924
- # metadata and apply type registry of runtime of parent block
925
- block_header = await self.rpc_request(
926
- "chain_getHeader", [self.last_block_hash]
927
- )
928
-
929
- if block_header["result"] is None:
930
- raise SubstrateRequestException(
931
- f'Block not found for "{self.last_block_hash}"'
932
- )
933
-
934
- parent_block_hash: str = block_header["result"]["parentHash"]
935
-
936
- if (
937
- parent_block_hash
938
- == "0x0000000000000000000000000000000000000000000000000000000000000000"
939
- ):
940
- runtime_block_hash = self.last_block_hash
941
- else:
942
- runtime_block_hash = parent_block_hash
943
-
944
- runtime_info = await self.get_block_runtime_version(
945
- block_hash=runtime_block_hash
946
- )
947
-
948
- if runtime_info is None:
949
- raise SubstrateRequestException(
950
- f"No runtime information for block '{block_hash}'"
951
- )
952
- # Check if runtime state already set to current block
953
- if (
954
- runtime_info.get("specVersion") == self.runtime_version
955
- and self.metadata is not None
956
- ):
957
- return Runtime(
958
- self.chain,
959
- self.runtime_config,
960
- self.metadata,
961
- self.type_registry,
962
- )
963
-
964
- self.runtime_version = runtime_info.get("specVersion")
965
- self.transaction_version = runtime_info.get("transactionVersion")
966
-
967
- if not self.metadata:
968
- if self.runtime_version in self.__metadata_cache:
969
- # Get metadata from cache
970
- # self.debug_message('Retrieved metadata for {} from memory'.format(self.runtime_version))
971
- metadata = self.metadata = self.__metadata_cache[
972
- self.runtime_version
973
- ]
974
- else:
975
- metadata = self.metadata = await self.get_block_metadata(
976
- block_hash=runtime_block_hash, decode=True
977
- )
978
- # self.debug_message('Retrieved metadata for {} from Substrate node'.format(self.runtime_version))
979
-
980
- # Update metadata cache
981
- self.__metadata_cache[self.runtime_version] = self.metadata
982
- else:
983
- metadata = self.metadata
984
- # Update type registry
985
- self.reload_type_registry(use_remote_preset=False, auto_discover=True)
986
-
987
- if self.implements_scaleinfo:
988
- # self.debug_message('Add PortableRegistry from metadata to type registry')
989
- self.runtime_config.add_portable_registry(self.metadata)
990
-
991
- # Set active runtime version
992
- self.runtime_config.set_active_spec_version_id(self.runtime_version)
993
-
994
- # Check and apply runtime constants
995
- ss58_prefix_constant = await self.get_constant(
996
- "System", "SS58Prefix", block_hash=block_hash
997
- )
998
-
999
- if ss58_prefix_constant:
1000
- self.ss58_format = ss58_prefix_constant
1001
-
1002
- # Set runtime compatibility flags
1003
- try:
1004
- _ = self.runtime_config.create_scale_object(
1005
- "sp_weights::weight_v2::Weight"
1006
- )
1007
- self.config["is_weight_v2"] = True
1008
- self.runtime_config.update_type_registry_types(
1009
- {"Weight": "sp_weights::weight_v2::Weight"}
1010
- )
1011
- except NotImplementedError:
1012
- self.config["is_weight_v2"] = False
1013
- self.runtime_config.update_type_registry_types({"Weight": "WeightV1"})
1014
- return Runtime(
1015
- self.chain,
1016
- self.runtime_config,
1017
- self.metadata,
1018
- self.type_registry,
1019
- )
1020
-
1021
- if block_id and block_hash:
1022
- raise ValueError("Cannot provide block_hash and block_id at the same time")
1023
-
1024
- if (
1025
- not (runtime := self.runtime_cache.retrieve(block_id, block_hash))
1026
- or runtime.metadata is None
1027
- ):
1028
- runtime = await get_runtime(block_hash, block_id)
1029
- self.runtime_cache.add_item(block_id, block_hash, runtime)
1030
- return runtime
1031
-
1032
- def reload_type_registry(
1033
- self, use_remote_preset: bool = True, auto_discover: bool = True
1034
- ):
1035
- """
1036
- Reload type registry and preset used to instantiate the SubtrateInterface object. Useful to periodically apply
1037
- changes in type definitions when a runtime upgrade occurred
1038
-
1039
- Parameters
1040
- ----------
1041
- use_remote_preset: When True preset is downloaded from Github master, otherwise use files from local installed scalecodec package
1042
- auto_discover
1043
-
1044
- Returns
1045
- -------
1046
-
1047
- """
1048
- self.runtime_config.clear_type_registry()
1049
-
1050
- self.runtime_config.implements_scale_info = self.implements_scaleinfo
1051
-
1052
- # Load metadata types in runtime configuration
1053
- self.runtime_config.update_type_registry(load_type_registry_preset(name="core"))
1054
- self.apply_type_registry_presets(
1055
- use_remote_preset=use_remote_preset, auto_discover=auto_discover
1056
- )
1057
-
1058
- def apply_type_registry_presets(
1059
- self, use_remote_preset: bool = True, auto_discover: bool = True
1060
- ):
1061
- if self.type_registry_preset is not None:
1062
- # Load type registry according to preset
1063
- type_registry_preset_dict = load_type_registry_preset(
1064
- name=self.type_registry_preset, use_remote_preset=use_remote_preset
1065
- )
1066
-
1067
- if not type_registry_preset_dict:
1068
- raise ValueError(
1069
- f"Type registry preset '{self.type_registry_preset}' not found"
1070
- )
1071
-
1072
- elif auto_discover:
1073
- # Try to auto discover type registry preset by chain name
1074
- type_registry_name = self.chain.lower().replace(" ", "-")
1075
- try:
1076
- type_registry_preset_dict = load_type_registry_preset(
1077
- type_registry_name
1078
- )
1079
- # self.debug_message(f"Auto set type_registry_preset to {type_registry_name} ...")
1080
- self.type_registry_preset = type_registry_name
1081
- except ValueError:
1082
- type_registry_preset_dict = None
1083
-
1084
- else:
1085
- type_registry_preset_dict = None
1086
-
1087
- if type_registry_preset_dict:
1088
- # Load type registries in runtime configuration
1089
- if self.implements_scaleinfo is False:
1090
- # Only runtime with no embedded types in metadata need the default set of explicit defined types
1091
- self.runtime_config.update_type_registry(
1092
- load_type_registry_preset(
1093
- "legacy", use_remote_preset=use_remote_preset
1094
- )
1095
- )
1096
-
1097
- if self.type_registry_preset != "legacy":
1098
- self.runtime_config.update_type_registry(type_registry_preset_dict)
1099
-
1100
- if self.type_registry:
1101
- # Load type registries in runtime configuration
1102
- self.runtime_config.update_type_registry(self.type_registry)
1103
-
1104
- @property
1105
- def implements_scaleinfo(self) -> Optional[bool]:
1106
- """
1107
- Returns True if current runtime implementation a `PortableRegistry` (`MetadataV14` and higher)
1108
-
1109
- Returns
1110
- -------
1111
- bool
1112
- """
1113
- if self.metadata:
1114
- return self.metadata.portable_registry is not None
1115
- else:
1116
- return None
1117
-
1118
- async def create_storage_key(
1119
- self,
1120
- pallet: str,
1121
- storage_function: str,
1122
- params: Optional[list] = None,
1123
- block_hash: str = None,
1124
- ) -> StorageKey:
1125
- """
1126
- Create a `StorageKey` instance providing storage function details. See `subscribe_storage()`.
1127
-
1128
- Parameters
1129
- ----------
1130
- pallet: name of pallet
1131
- storage_function: name of storage function
1132
- params: Optional list of parameters in case of a Mapped storage function
1133
-
1134
- Returns
1135
- -------
1136
- StorageKey
1137
- """
1138
- await self.init_runtime(block_hash=block_hash)
1139
-
1140
- return StorageKey.create_from_storage_function(
1141
- pallet,
1142
- storage_function,
1143
- params,
1144
- runtime_config=self.runtime_config,
1145
- metadata=self.metadata,
1146
- )
1147
-
1148
- async def _get_block_handler(
1149
- self,
1150
- block_hash: str,
1151
- ignore_decoding_errors: bool = False,
1152
- include_author: bool = False,
1153
- header_only: bool = False,
1154
- finalized_only: bool = False,
1155
- subscription_handler: Optional[Callable] = None,
1156
- ):
1157
- try:
1158
- await self.init_runtime(block_hash=block_hash)
1159
- except BlockNotFound:
1160
- return None
1161
-
1162
- async def decode_block(block_data, block_data_hash=None):
1163
- if block_data:
1164
- if block_data_hash:
1165
- block_data["header"]["hash"] = block_data_hash
1166
-
1167
- if type(block_data["header"]["number"]) is str:
1168
- # Convert block number from hex (backwards compatibility)
1169
- block_data["header"]["number"] = int(
1170
- block_data["header"]["number"], 16
1171
- )
1172
-
1173
- extrinsic_cls = self.runtime_config.get_decoder_class("Extrinsic")
1174
-
1175
- if "extrinsics" in block_data:
1176
- for idx, extrinsic_data in enumerate(block_data["extrinsics"]):
1177
- extrinsic_decoder = extrinsic_cls(
1178
- data=ScaleBytes(extrinsic_data),
1179
- metadata=self.metadata,
1180
- runtime_config=self.runtime_config,
1181
- )
1182
- try:
1183
- extrinsic_decoder.decode(check_remaining=True)
1184
- block_data["extrinsics"][idx] = extrinsic_decoder
1185
-
1186
- except Exception as e:
1187
- if not ignore_decoding_errors:
1188
- raise
1189
- block_data["extrinsics"][idx] = None
1190
-
1191
- for idx, log_data in enumerate(block_data["header"]["digest"]["logs"]):
1192
- if type(log_data) is str:
1193
- # Convert digest log from hex (backwards compatibility)
1194
- try:
1195
- log_digest_cls = self.runtime_config.get_decoder_class(
1196
- "sp_runtime::generic::digest::DigestItem"
1197
- )
1198
-
1199
- if log_digest_cls is None:
1200
- raise NotImplementedError(
1201
- "No decoding class found for 'DigestItem'"
1202
- )
1203
-
1204
- log_digest = log_digest_cls(data=ScaleBytes(log_data))
1205
- log_digest.decode(
1206
- check_remaining=self.config.get("strict_scale_decode")
1207
- )
1208
-
1209
- block_data["header"]["digest"]["logs"][idx] = log_digest
1210
-
1211
- if include_author and "PreRuntime" in log_digest.value:
1212
- if self.implements_scaleinfo:
1213
- engine = bytes(log_digest[1][0])
1214
- # Retrieve validator set
1215
- parent_hash = block_data["header"]["parentHash"]
1216
- validator_set = await self.query(
1217
- "Session", "Validators", block_hash=parent_hash
1218
- )
1219
-
1220
- if engine == b"BABE":
1221
- babe_predigest = (
1222
- self.runtime_config.create_scale_object(
1223
- type_string="RawBabePreDigest",
1224
- data=ScaleBytes(
1225
- bytes(log_digest[1][1])
1226
- ),
1227
- )
1228
- )
1229
-
1230
- babe_predigest.decode(
1231
- check_remaining=self.config.get(
1232
- "strict_scale_decode"
1233
- )
1234
- )
1235
-
1236
- rank_validator = babe_predigest[1].value[
1237
- "authority_index"
1238
- ]
1239
-
1240
- block_author = validator_set[rank_validator]
1241
- block_data["author"] = block_author.value
1242
-
1243
- elif engine == b"aura":
1244
- aura_predigest = (
1245
- self.runtime_config.create_scale_object(
1246
- type_string="RawAuraPreDigest",
1247
- data=ScaleBytes(
1248
- bytes(log_digest[1][1])
1249
- ),
1250
- )
1251
- )
1252
-
1253
- aura_predigest.decode(check_remaining=True)
1254
-
1255
- rank_validator = aura_predigest.value[
1256
- "slot_number"
1257
- ] % len(validator_set)
1258
-
1259
- block_author = validator_set[rank_validator]
1260
- block_data["author"] = block_author.value
1261
- else:
1262
- raise NotImplementedError(
1263
- f"Cannot extract author for engine {log_digest.value['PreRuntime'][0]}"
1264
- )
1265
- else:
1266
- if (
1267
- log_digest.value["PreRuntime"]["engine"]
1268
- == "BABE"
1269
- ):
1270
- validator_set = await self.query(
1271
- "Session",
1272
- "Validators",
1273
- block_hash=block_hash,
1274
- )
1275
- rank_validator = log_digest.value["PreRuntime"][
1276
- "data"
1277
- ]["authority_index"]
1278
-
1279
- block_author = validator_set.elements[
1280
- rank_validator
1281
- ]
1282
- block_data["author"] = block_author.value
1283
- else:
1284
- raise NotImplementedError(
1285
- f"Cannot extract author for engine {log_digest.value['PreRuntime']['engine']}"
1286
- )
1287
-
1288
- except Exception:
1289
- if not ignore_decoding_errors:
1290
- raise
1291
- block_data["header"]["digest"]["logs"][idx] = None
1292
-
1293
- return block_data
1294
-
1295
- if callable(subscription_handler):
1296
- rpc_method_prefix = "Finalized" if finalized_only else "New"
1297
-
1298
- async def result_handler(message, update_nr, subscription_id):
1299
- new_block = await decode_block({"header": message["params"]["result"]})
1300
-
1301
- subscription_result = subscription_handler(
1302
- new_block, update_nr, subscription_id
1303
- )
1304
-
1305
- if subscription_result is not None:
1306
- # Handler returned end result: unsubscribe from further updates
1307
- self._forgettable_task = asyncio.create_task(
1308
- self.rpc_request(
1309
- f"chain_unsubscribe{rpc_method_prefix}Heads",
1310
- [subscription_id],
1311
- )
1312
- )
1313
-
1314
- return subscription_result
1315
-
1316
- result = await self._make_rpc_request(
1317
- [
1318
- self.make_payload(
1319
- "_get_block_handler",
1320
- f"chain_subscribe{rpc_method_prefix}Heads",
1321
- [],
1322
- )
1323
- ],
1324
- result_handler=result_handler,
1325
- )
1326
-
1327
- return result
1328
-
1329
- else:
1330
- if header_only:
1331
- response = await self.rpc_request("chain_getHeader", [block_hash])
1332
- return await decode_block(
1333
- {"header": response["result"]}, block_data_hash=block_hash
1334
- )
1335
-
1336
- else:
1337
- response = await self.rpc_request("chain_getBlock", [block_hash])
1338
- return await decode_block(
1339
- response["result"]["block"], block_data_hash=block_hash
1340
- )
1341
-
1342
- async def get_block(
1343
- self,
1344
- block_hash: Optional[str] = None,
1345
- block_number: Optional[int] = None,
1346
- ignore_decoding_errors: bool = False,
1347
- include_author: bool = False,
1348
- finalized_only: bool = False,
1349
- ) -> Optional[dict]:
1350
- """
1351
- Retrieves a block and decodes its containing extrinsics and log digest items. If `block_hash` and `block_number`
1352
- is omitted the chain tip will be retrieve, or the finalized head if `finalized_only` is set to true.
1353
-
1354
- Either `block_hash` or `block_number` should be set, or both omitted.
1355
-
1356
- Parameters
1357
- ----------
1358
- block_hash: the hash of the block to be retrieved
1359
- block_number: the block number to retrieved
1360
- ignore_decoding_errors: When set this will catch all decoding errors, set the item to None and continue decoding
1361
- include_author: This will retrieve the block author from the validator set and add to the result
1362
- finalized_only: when no `block_hash` or `block_number` is set, this will retrieve the finalized head
1363
-
1364
- Returns
1365
- -------
1366
- A dict containing the extrinsic and digest logs data
1367
- """
1368
- if block_hash and block_number:
1369
- raise ValueError("Either block_hash or block_number should be be set")
1370
-
1371
- if block_number is not None:
1372
- block_hash = await self.get_block_hash(block_number)
1373
-
1374
- if block_hash is None:
1375
- return
1376
-
1377
- if block_hash and finalized_only:
1378
- raise ValueError(
1379
- "finalized_only cannot be True when block_hash is provided"
1380
- )
1381
-
1382
- if block_hash is None:
1383
- # Retrieve block hash
1384
- if finalized_only:
1385
- block_hash = await self.get_chain_finalised_head()
1386
- else:
1387
- block_hash = await self.get_chain_head()
1388
-
1389
- return await self._get_block_handler(
1390
- block_hash=block_hash,
1391
- ignore_decoding_errors=ignore_decoding_errors,
1392
- header_only=False,
1393
- include_author=include_author,
1394
- )
1395
-
1396
- async def get_events(self, block_hash: Optional[str] = None) -> list:
1397
- """
1398
- Convenience method to get events for a certain block (storage call for module 'System' and function 'Events')
1399
-
1400
- Parameters
1401
- ----------
1402
- block_hash
1403
-
1404
- Returns
1405
- -------
1406
- list
1407
- """
1408
-
1409
- def convert_event_data(data):
1410
- # Extract phase information
1411
- phase_key, phase_value = next(iter(data["phase"].items()))
1412
- try:
1413
- extrinsic_idx = phase_value[0]
1414
- except IndexError:
1415
- extrinsic_idx = None
1416
-
1417
- # Extract event details
1418
- module_id, event_data = next(iter(data["event"].items()))
1419
- event_id, attributes_data = next(iter(event_data[0].items()))
1420
-
1421
- # Convert class and pays_fee dictionaries to their string equivalents if they exist
1422
- attributes = attributes_data
1423
- if isinstance(attributes, dict):
1424
- for key, value in attributes.items():
1425
- if isinstance(value, dict):
1426
- # Convert nested single-key dictionaries to their keys as strings
1427
- sub_key = next(iter(value.keys()))
1428
- if value[sub_key] == ():
1429
- attributes[key] = sub_key
1430
-
1431
- # Create the converted dictionary
1432
- converted = {
1433
- "phase": phase_key,
1434
- "extrinsic_idx": extrinsic_idx,
1435
- "event": {
1436
- "module_id": module_id,
1437
- "event_id": event_id,
1438
- "attributes": attributes,
1439
- },
1440
- "topics": list(data["topics"]), # Convert topics tuple to a list
1441
- }
1442
-
1443
- return converted
1444
-
1445
- events = []
1446
-
1447
- if not block_hash:
1448
- block_hash = await self.get_chain_head()
1449
-
1450
- storage_obj = await self.query(
1451
- module="System", storage_function="Events", block_hash=block_hash
1452
- )
1453
- if storage_obj:
1454
- for item in list(storage_obj):
1455
- # print("item!", item)
1456
- events.append(convert_event_data(item))
1457
- # events += list(storage_obj)
1458
- return events
1459
-
1460
- async def get_block_runtime_version(self, block_hash: str) -> dict:
1461
- """
1462
- Retrieve the runtime version id of given block_hash
1463
- """
1464
- response = await self.rpc_request("state_getRuntimeVersion", [block_hash])
1465
- return response.get("result")
1466
-
1467
- async def get_block_metadata(
1468
- self, block_hash: Optional[str] = None, decode: bool = True
1469
- ) -> Union[dict, ScaleType]:
1470
- """
1471
- A pass-though to existing JSONRPC method `state_getMetadata`.
1472
-
1473
- Parameters
1474
- ----------
1475
- block_hash
1476
- decode: True for decoded version
1477
-
1478
- Returns
1479
- -------
1480
-
1481
- """
1482
- params = None
1483
- if decode and not self.runtime_config:
1484
- raise ValueError(
1485
- "Cannot decode runtime configuration without a supplied runtime_config"
1486
- )
1487
-
1488
- if block_hash:
1489
- params = [block_hash]
1490
- response = await self.rpc_request("state_getMetadata", params)
1491
-
1492
- if "error" in response:
1493
- raise SubstrateRequestException(response["error"]["message"])
1494
-
1495
- if response.get("result") and decode:
1496
- metadata_decoder = self.runtime_config.create_scale_object(
1497
- "MetadataVersioned", data=ScaleBytes(response.get("result"))
1498
- )
1499
- metadata_decoder.decode()
1500
-
1501
- return metadata_decoder
1502
-
1503
- return response
1504
-
1505
- async def _preprocess(
1506
- self,
1507
- query_for: Optional[list],
1508
- block_hash: Optional[str],
1509
- storage_function: str,
1510
- module: str,
1511
- ) -> Preprocessed:
1512
- """
1513
- Creates a Preprocessed data object for passing to `_make_rpc_request`
1514
- """
1515
- params = query_for if query_for else []
1516
- # Search storage call in metadata
1517
- metadata_pallet = self.metadata.get_metadata_pallet(module)
1518
-
1519
- if not metadata_pallet:
1520
- raise SubstrateRequestException(f'Pallet "{module}" not found')
1521
-
1522
- storage_item = metadata_pallet.get_storage_function(storage_function)
1523
-
1524
- if not metadata_pallet or not storage_item:
1525
- raise SubstrateRequestException(
1526
- f'Storage function "{module}.{storage_function}" not found'
1527
- )
1528
-
1529
- # SCALE type string of value
1530
- param_types = storage_item.get_params_type_string()
1531
- value_scale_type = storage_item.get_value_type_string()
1532
-
1533
- if len(params) != len(param_types):
1534
- raise ValueError(
1535
- f"Storage function requires {len(param_types)} parameters, {len(params)} given"
1536
- )
1537
-
1538
- storage_key = StorageKey.create_from_storage_function(
1539
- module,
1540
- storage_item.value["name"],
1541
- params,
1542
- runtime_config=self.runtime_config,
1543
- metadata=self.metadata,
1544
- )
1545
- method = "state_getStorageAt"
1546
- return Preprocessed(
1547
- str(query_for),
1548
- method,
1549
- [storage_key.to_hex(), block_hash],
1550
- value_scale_type,
1551
- storage_item,
1552
- )
1553
-
1554
- async def _process_response(
1555
- self,
1556
- response: dict,
1557
- subscription_id: Union[int, str],
1558
- value_scale_type: Optional[str] = None,
1559
- storage_item: Optional[ScaleType] = None,
1560
- runtime: Optional[Runtime] = None,
1561
- result_handler: Optional[ResultHandler] = None,
1562
- ) -> tuple[Union[ScaleType, dict], bool]:
1563
- """
1564
- Processes the RPC call response by decoding it, returning it as is, or setting a handler for subscriptions,
1565
- depending on the specific call.
1566
-
1567
- :param response: the RPC call response
1568
- :param subscription_id: the subscription id for subscriptions, used only for subscriptions with a result handler
1569
- :param value_scale_type: Scale Type string used for decoding ScaleBytes results
1570
- :param storage_item: The ScaleType object used for decoding ScaleBytes results
1571
- :param runtime: the runtime object, used for decoding ScaleBytes results
1572
- :param result_handler: the result handler coroutine used for handling longer-running subscriptions
1573
-
1574
- :return: (decoded response, completion)
1575
- """
1576
- result: Union[dict, ScaleType] = response
1577
- if value_scale_type and isinstance(storage_item, ScaleType):
1578
- if not runtime:
1579
- async with self._lock:
1580
- runtime = Runtime(
1581
- self.chain,
1582
- self.runtime_config,
1583
- self.metadata,
1584
- self.type_registry,
1585
- )
1586
- if response.get("result") is not None:
1587
- query_value = response.get("result")
1588
- elif storage_item.value["modifier"] == "Default":
1589
- # Fallback to default value of storage function if no result
1590
- query_value = storage_item.value_object["default"].value_object
1591
- else:
1592
- # No result is interpreted as an Option<...> result
1593
- value_scale_type = f"Option<{value_scale_type}>"
1594
- query_value = storage_item.value_object["default"].value_object
1595
- if isinstance(query_value, str):
1596
- q = bytes.fromhex(query_value[2:])
1597
- elif isinstance(query_value, bytearray):
1598
- q = bytes(query_value)
1599
- else:
1600
- q = query_value
1601
- obj = await self.decode_scale(value_scale_type, q, True)
1602
- result = obj
1603
- if asyncio.iscoroutinefunction(result_handler):
1604
- # For multipart responses as a result of subscriptions.
1605
- message, bool_result = await result_handler(response, subscription_id)
1606
- return message, bool_result
1607
- return result, True
1608
-
1609
- async def _make_rpc_request(
1610
- self,
1611
- payloads: list[dict],
1612
- value_scale_type: Optional[str] = None,
1613
- storage_item: Optional[ScaleType] = None,
1614
- runtime: Optional[Runtime] = None,
1615
- result_handler: Optional[ResultHandler] = None,
1616
- ) -> RequestManager.RequestResults:
1617
- request_manager = RequestManager(payloads)
1618
-
1619
- subscription_added = False
1620
-
1621
- async with self.ws as ws:
1622
- for item in payloads:
1623
- item_id = await ws.send(item["payload"])
1624
- request_manager.add_request(item_id, item["id"])
1625
-
1626
- while True:
1627
- for item_id in request_manager.response_map.keys():
1628
- if (
1629
- item_id not in request_manager.responses
1630
- or asyncio.iscoroutinefunction(result_handler)
1631
- ):
1632
- if response := await ws.retrieve(item_id):
1633
- if (
1634
- asyncio.iscoroutinefunction(result_handler)
1635
- and not subscription_added
1636
- ):
1637
- # handles subscriptions, overwrites the previous mapping of {item_id : payload_id}
1638
- # with {subscription_id : payload_id}
1639
- try:
1640
- item_id = request_manager.overwrite_request(
1641
- item_id, response["result"]
1642
- )
1643
- except KeyError:
1644
- raise SubstrateRequestException(str(response))
1645
- decoded_response, complete = await self._process_response(
1646
- response,
1647
- item_id,
1648
- value_scale_type,
1649
- storage_item,
1650
- runtime,
1651
- result_handler,
1652
- )
1653
- request_manager.add_response(
1654
- item_id, decoded_response, complete
1655
- )
1656
- if (
1657
- asyncio.iscoroutinefunction(result_handler)
1658
- and not subscription_added
1659
- ):
1660
- subscription_added = True
1661
- break
1662
-
1663
- if request_manager.is_complete:
1664
- break
1665
-
1666
- return request_manager.get_results()
1667
-
1668
- @staticmethod
1669
- def make_payload(id_: str, method: str, params: list) -> dict:
1670
- """
1671
- Creates a payload for making an rpc_request with _make_rpc_request
1672
-
1673
- :param id_: a unique name you would like to give to this request
1674
- :param method: the method in the RPC request
1675
- :param params: the params in the RPC request
1676
-
1677
- :return: the payload dict
1678
- """
1679
- return {
1680
- "id": id_,
1681
- "payload": {"jsonrpc": "2.0", "method": method, "params": params},
1682
- }
1683
-
1684
- async def rpc_request(
1685
- self,
1686
- method: str,
1687
- params: Optional[list],
1688
- block_hash: Optional[str] = None,
1689
- reuse_block_hash: bool = False,
1690
- ) -> Any:
1691
- """
1692
- Makes an RPC request to the subtensor. Use this only if ``self.query`` and ``self.query_multiple`` and
1693
- ``self.query_map`` do not meet your needs.
1694
-
1695
- :param method: str the method in the RPC request
1696
- :param params: list of the params in the RPC request
1697
- :param block_hash: optional str, the hash of the block — only supply this if not supplying the block
1698
- hash in the params, and not reusing the block hash
1699
- :param reuse_block_hash: optional bool, whether to reuse the block hash in the params — only mark as True
1700
- if not supplying the block hash in the params, or via the `block_hash` parameter
1701
-
1702
- :return: the response from the RPC request
1703
- """
1704
- block_hash = await self._get_current_block_hash(block_hash, reuse_block_hash)
1705
- params = params or []
1706
- payload_id = f"{method}{random.randint(0, 7000)}"
1707
- payloads = [
1708
- self.make_payload(
1709
- payload_id,
1710
- method,
1711
- params + [block_hash] if block_hash else params,
1712
- )
1713
- ]
1714
- runtime = Runtime(
1715
- self.chain,
1716
- self.runtime_config,
1717
- self.metadata,
1718
- self.type_registry,
1719
- )
1720
- result = await self._make_rpc_request(payloads, runtime=runtime)
1721
- if "error" in result[payload_id][0]:
1722
- raise SubstrateRequestException(result[payload_id][0]["error"]["message"])
1723
- if "result" in result[payload_id][0]:
1724
- return result[payload_id][0]
1725
- else:
1726
- raise SubstrateRequestException(result[payload_id][0])
1727
-
1728
- async def get_block_hash(self, block_id: int) -> str:
1729
- return (await self.rpc_request("chain_getBlockHash", [block_id]))["result"]
1730
-
1731
- async def get_chain_head(self) -> str:
1732
- result = await self._make_rpc_request(
1733
- [
1734
- self.make_payload(
1735
- "rpc_request",
1736
- "chain_getHead",
1737
- [],
1738
- )
1739
- ],
1740
- runtime=Runtime(
1741
- self.chain,
1742
- self.runtime_config,
1743
- self.metadata,
1744
- self.type_registry,
1745
- ),
1746
- )
1747
- self.last_block_hash = result["rpc_request"][0]["result"]
1748
- return result["rpc_request"][0]["result"]
1749
-
1750
- async def compose_call(
1751
- self,
1752
- call_module: str,
1753
- call_function: str,
1754
- call_params: Optional[dict] = None,
1755
- block_hash: Optional[str] = None,
1756
- ) -> GenericCall:
1757
- """
1758
- Composes a call payload which can be used in an extrinsic.
1759
-
1760
- :param call_module: Name of the runtime module e.g. Balances
1761
- :param call_function: Name of the call function e.g. transfer
1762
- :param call_params: This is a dict containing the params of the call. e.g.
1763
- `{'dest': 'EaG2CRhJWPb7qmdcJvy3LiWdh26Jreu9Dx6R1rXxPmYXoDk', 'value': 1000000000000}`
1764
- :param block_hash: Use metadata at given block_hash to compose call
1765
-
1766
- :return: A composed call
1767
- """
1768
- if call_params is None:
1769
- call_params = {}
1770
-
1771
- await self.init_runtime(block_hash=block_hash)
1772
- call = self.runtime_config.create_scale_object(
1773
- type_string="Call", metadata=self.metadata
1774
- )
1775
-
1776
- call.encode(
1777
- {
1778
- "call_module": call_module,
1779
- "call_function": call_function,
1780
- "call_args": call_params,
1781
- }
1782
- )
1783
-
1784
- return call
1785
-
1786
- async def query_multiple(
1787
- self,
1788
- params: list,
1789
- storage_function: str,
1790
- module: str,
1791
- block_hash: Optional[str] = None,
1792
- reuse_block_hash: bool = False,
1793
- ) -> dict[str, ScaleType]:
1794
- """
1795
- Queries the subtensor. Only use this when making multiple queries, else use ``self.query``
1796
- """
1797
- # By allowing for specifying the block hash, users, if they have multiple query types they want
1798
- # to do, can simply query the block hash first, and then pass multiple query_subtensor calls
1799
- # into an asyncio.gather, with the specified block hash
1800
- block_hash = await self._get_current_block_hash(block_hash, reuse_block_hash)
1801
- if block_hash:
1802
- self.last_block_hash = block_hash
1803
- runtime = await self.init_runtime(block_hash=block_hash)
1804
- preprocessed: tuple[Preprocessed] = await asyncio.gather(
1805
- *[
1806
- self._preprocess([x], block_hash, storage_function, module)
1807
- for x in params
1808
- ]
1809
- )
1810
- all_info = [
1811
- self.make_payload(item.queryable, item.method, item.params)
1812
- for item in preprocessed
1813
- ]
1814
- # These will always be the same throughout the preprocessed list, so we just grab the first one
1815
- value_scale_type = preprocessed[0].value_scale_type
1816
- storage_item = preprocessed[0].storage_item
1817
-
1818
- responses = await self._make_rpc_request(
1819
- all_info, value_scale_type, storage_item, runtime
1820
- )
1821
- return {
1822
- param: responses[p.queryable][0] for (param, p) in zip(params, preprocessed)
1823
- }
1824
-
1825
- async def query_multi(
1826
- self, storage_keys: list[StorageKey], block_hash: Optional[str] = None
1827
- ) -> list:
1828
- """
1829
- Query multiple storage keys in one request.
1830
-
1831
- Example:
1832
-
1833
- ```
1834
- storage_keys = [
1835
- substrate.create_storage_key(
1836
- "System", "Account", ["F4xQKRUagnSGjFqafyhajLs94e7Vvzvr8ebwYJceKpr8R7T"]
1837
- ),
1838
- substrate.create_storage_key(
1839
- "System", "Account", ["GSEX8kR4Kz5UZGhvRUCJG93D5hhTAoVZ5tAe6Zne7V42DSi"]
1840
- )
1841
- ]
1842
-
1843
- result = substrate.query_multi(storage_keys)
1844
- ```
1845
-
1846
- Parameters
1847
- ----------
1848
- storage_keys: list of StorageKey objects
1849
- block_hash: Optional block_hash of state snapshot
1850
-
1851
- Returns
1852
- -------
1853
- list of `(storage_key, scale_obj)` tuples
1854
- """
1855
-
1856
- await self.init_runtime(block_hash=block_hash)
1857
-
1858
- # Retrieve corresponding value
1859
- response = await self.rpc_request(
1860
- "state_queryStorageAt", [[s.to_hex() for s in storage_keys], block_hash]
1861
- )
1862
-
1863
- if "error" in response:
1864
- raise SubstrateRequestException(response["error"]["message"])
1865
-
1866
- result = []
1867
-
1868
- storage_key_map = {s.to_hex(): s for s in storage_keys}
1869
-
1870
- for result_group in response["result"]:
1871
- for change_storage_key, change_data in result_group["changes"]:
1872
- # Decode result for specified storage_key
1873
- storage_key = storage_key_map[change_storage_key]
1874
- if change_data is None:
1875
- change_data = b"\x00"
1876
- else:
1877
- change_data = bytes.fromhex(change_data[2:])
1878
- result.append(
1879
- (
1880
- storage_key,
1881
- await self.decode_scale(
1882
- storage_key.value_scale_type, change_data
1883
- ),
1884
- )
1885
- )
1886
-
1887
- return result
1888
-
1889
- async def create_scale_object(
1890
- self,
1891
- type_string: str,
1892
- data: Optional[ScaleBytes] = None,
1893
- block_hash: Optional[str] = None,
1894
- **kwargs,
1895
- ) -> "ScaleType":
1896
- """
1897
- Convenience method to create a SCALE object of type `type_string`, this will initialize the runtime
1898
- automatically at moment of `block_hash`, or chain tip if omitted.
1899
-
1900
- :param type_string: str Name of SCALE type to create
1901
- :param data: ScaleBytes Optional ScaleBytes to decode
1902
- :param block_hash: Optional block hash for moment of decoding, when omitted the chain tip will be used
1903
- :param kwargs: keyword args for the Scale Type constructor
1904
-
1905
- :return: The created Scale Type object
1906
- """
1907
- runtime = await self.init_runtime(block_hash=block_hash)
1908
- if "metadata" not in kwargs:
1909
- kwargs["metadata"] = runtime.metadata
1910
-
1911
- return runtime.runtime_config.create_scale_object(
1912
- type_string, data=data, **kwargs
1913
- )
1914
-
1915
- async def generate_signature_payload(
1916
- self,
1917
- call: GenericCall,
1918
- era=None,
1919
- nonce: int = 0,
1920
- tip: int = 0,
1921
- tip_asset_id: Optional[int] = None,
1922
- include_call_length: bool = False,
1923
- ) -> ScaleBytes:
1924
- # Retrieve genesis hash
1925
- genesis_hash = await self.get_block_hash(0)
1926
-
1927
- if not era:
1928
- era = "00"
1929
-
1930
- if era == "00":
1931
- # Immortal extrinsic
1932
- block_hash = genesis_hash
1933
- else:
1934
- # Determine mortality of extrinsic
1935
- era_obj = self.runtime_config.create_scale_object("Era")
1936
-
1937
- if isinstance(era, dict) and "current" not in era and "phase" not in era:
1938
- raise ValueError(
1939
- 'The era dict must contain either "current" or "phase" element to encode a valid era'
1940
- )
1941
-
1942
- era_obj.encode(era)
1943
- block_hash = await self.get_block_hash(
1944
- block_id=era_obj.birth(era.get("current"))
1945
- )
1946
-
1947
- # Create signature payload
1948
- signature_payload = self.runtime_config.create_scale_object(
1949
- "ExtrinsicPayloadValue"
1950
- )
1951
-
1952
- # Process signed extensions in metadata
1953
- if "signed_extensions" in self.metadata[1][1]["extrinsic"]:
1954
- # Base signature payload
1955
- signature_payload.type_mapping = [["call", "CallBytes"]]
1956
-
1957
- # Add signed extensions to payload
1958
- signed_extensions = self.metadata.get_signed_extensions()
1959
-
1960
- if "CheckMortality" in signed_extensions:
1961
- signature_payload.type_mapping.append(
1962
- ["era", signed_extensions["CheckMortality"]["extrinsic"]]
1963
- )
1964
-
1965
- if "CheckEra" in signed_extensions:
1966
- signature_payload.type_mapping.append(
1967
- ["era", signed_extensions["CheckEra"]["extrinsic"]]
1968
- )
1969
-
1970
- if "CheckNonce" in signed_extensions:
1971
- signature_payload.type_mapping.append(
1972
- ["nonce", signed_extensions["CheckNonce"]["extrinsic"]]
1973
- )
1974
-
1975
- if "ChargeTransactionPayment" in signed_extensions:
1976
- signature_payload.type_mapping.append(
1977
- ["tip", signed_extensions["ChargeTransactionPayment"]["extrinsic"]]
1978
- )
1979
-
1980
- if "ChargeAssetTxPayment" in signed_extensions:
1981
- signature_payload.type_mapping.append(
1982
- ["asset_id", signed_extensions["ChargeAssetTxPayment"]["extrinsic"]]
1983
- )
1984
-
1985
- if "CheckMetadataHash" in signed_extensions:
1986
- signature_payload.type_mapping.append(
1987
- ["mode", signed_extensions["CheckMetadataHash"]["extrinsic"]]
1988
- )
1989
-
1990
- if "CheckSpecVersion" in signed_extensions:
1991
- signature_payload.type_mapping.append(
1992
- [
1993
- "spec_version",
1994
- signed_extensions["CheckSpecVersion"]["additional_signed"],
1995
- ]
1996
- )
1997
-
1998
- if "CheckTxVersion" in signed_extensions:
1999
- signature_payload.type_mapping.append(
2000
- [
2001
- "transaction_version",
2002
- signed_extensions["CheckTxVersion"]["additional_signed"],
2003
- ]
2004
- )
2005
-
2006
- if "CheckGenesis" in signed_extensions:
2007
- signature_payload.type_mapping.append(
2008
- [
2009
- "genesis_hash",
2010
- signed_extensions["CheckGenesis"]["additional_signed"],
2011
- ]
2012
- )
2013
-
2014
- if "CheckMortality" in signed_extensions:
2015
- signature_payload.type_mapping.append(
2016
- [
2017
- "block_hash",
2018
- signed_extensions["CheckMortality"]["additional_signed"],
2019
- ]
2020
- )
2021
-
2022
- if "CheckEra" in signed_extensions:
2023
- signature_payload.type_mapping.append(
2024
- ["block_hash", signed_extensions["CheckEra"]["additional_signed"]]
2025
- )
2026
-
2027
- if "CheckMetadataHash" in signed_extensions:
2028
- signature_payload.type_mapping.append(
2029
- [
2030
- "metadata_hash",
2031
- signed_extensions["CheckMetadataHash"]["additional_signed"],
2032
- ]
2033
- )
2034
-
2035
- if include_call_length:
2036
- length_obj = self.runtime_config.create_scale_object("Bytes")
2037
- call_data = str(length_obj.encode(str(call.data)))
2038
-
2039
- else:
2040
- call_data = str(call.data)
2041
-
2042
- payload_dict = {
2043
- "call": call_data,
2044
- "era": era,
2045
- "nonce": nonce,
2046
- "tip": tip,
2047
- "spec_version": self.runtime_version,
2048
- "genesis_hash": genesis_hash,
2049
- "block_hash": block_hash,
2050
- "transaction_version": self.transaction_version,
2051
- "asset_id": {"tip": tip, "asset_id": tip_asset_id},
2052
- "metadata_hash": None,
2053
- "mode": "Disabled",
2054
- }
2055
-
2056
- signature_payload.encode(payload_dict)
2057
-
2058
- if signature_payload.data.length > 256:
2059
- return ScaleBytes(
2060
- data=blake2b(signature_payload.data.data, digest_size=32).digest()
2061
- )
2062
-
2063
- return signature_payload.data
2064
-
2065
- async def create_signed_extrinsic(
2066
- self,
2067
- call: GenericCall,
2068
- keypair: Keypair,
2069
- era: Optional[dict] = None,
2070
- nonce: Optional[int] = None,
2071
- tip: int = 0,
2072
- tip_asset_id: Optional[int] = None,
2073
- signature: Optional[Union[bytes, str]] = None,
2074
- ) -> "GenericExtrinsic":
2075
- """
2076
- Creates an extrinsic signed by given account details
2077
-
2078
- :param call: GenericCall to create extrinsic for
2079
- :param keypair: Keypair used to sign the extrinsic
2080
- :param era: Specify mortality in blocks in follow format:
2081
- {'period': [amount_blocks]} If omitted the extrinsic is immortal
2082
- :param nonce: nonce to include in extrinsics, if omitted the current nonce is retrieved on-chain
2083
- :param tip: The tip for the block author to gain priority during network congestion
2084
- :param tip_asset_id: Optional asset ID with which to pay the tip
2085
- :param signature: Optionally provide signature if externally signed
2086
-
2087
- :return: The signed Extrinsic
2088
- """
2089
- if not self.metadata:
2090
- await self.init_runtime()
2091
-
2092
- # Check requirements
2093
- if not isinstance(call, GenericCall):
2094
- raise TypeError("'call' must be of type Call")
2095
-
2096
- # Check if extrinsic version is supported
2097
- if self.metadata[1][1]["extrinsic"]["version"] != 4: # type: ignore
2098
- raise NotImplementedError(
2099
- f"Extrinsic version {self.metadata[1][1]['extrinsic']['version']} not supported" # type: ignore
2100
- )
2101
-
2102
- # Retrieve nonce
2103
- if nonce is None:
2104
- nonce = await self.get_account_nonce(keypair.ss58_address) or 0
2105
-
2106
- # Process era
2107
- if era is None:
2108
- era = "00"
2109
- else:
2110
- if isinstance(era, dict) and "current" not in era and "phase" not in era:
2111
- # Retrieve current block id
2112
- era["current"] = await self.get_block_number(
2113
- await self.get_chain_finalised_head()
2114
- )
2115
-
2116
- if signature is not None:
2117
- if isinstance(signature, str) and signature[0:2] == "0x":
2118
- signature = bytes.fromhex(signature[2:])
2119
-
2120
- # Check if signature is a MultiSignature and contains signature version
2121
- if len(signature) == 65:
2122
- signature_version = signature[0]
2123
- signature = signature[1:]
2124
- else:
2125
- signature_version = keypair.crypto_type
2126
-
2127
- else:
2128
- # Create signature payload
2129
- signature_payload = await self.generate_signature_payload(
2130
- call=call, era=era, nonce=nonce, tip=tip, tip_asset_id=tip_asset_id
2131
- )
2132
-
2133
- # Set Signature version to crypto type of keypair
2134
- signature_version = keypair.crypto_type
2135
-
2136
- # Sign payload
2137
- signature = keypair.sign(signature_payload)
2138
-
2139
- # Create extrinsic
2140
- extrinsic = self.runtime_config.create_scale_object(
2141
- type_string="Extrinsic", metadata=self.metadata
2142
- )
2143
- value = {
2144
- "account_id": f"0x{keypair.public_key.hex()}",
2145
- "signature": f"0x{signature.hex()}",
2146
- "call_function": call.value["call_function"],
2147
- "call_module": call.value["call_module"],
2148
- "call_args": call.value["call_args"],
2149
- "nonce": nonce,
2150
- "era": era,
2151
- "tip": tip,
2152
- "asset_id": {"tip": tip, "asset_id": tip_asset_id},
2153
- "mode": "Disabled",
2154
- }
2155
-
2156
- # Check if ExtrinsicSignature is MultiSignature, otherwise omit signature_version
2157
- signature_cls = self.runtime_config.get_decoder_class("ExtrinsicSignature")
2158
- if issubclass(signature_cls, self.runtime_config.get_decoder_class("Enum")):
2159
- value["signature_version"] = signature_version
2160
- extrinsic.encode(value)
2161
- return extrinsic
2162
-
2163
- async def get_chain_finalised_head(self):
2164
- """
2165
- A pass-though to existing JSONRPC method `chain_getFinalizedHead`
2166
-
2167
- Returns
2168
- -------
2169
-
2170
- """
2171
- response = await self.rpc_request("chain_getFinalizedHead", [])
2172
-
2173
- if response is not None:
2174
- if "error" in response:
2175
- raise SubstrateRequestException(response["error"]["message"])
2176
-
2177
- return response.get("result")
2178
-
2179
- async def runtime_call(
2180
- self,
2181
- api: str,
2182
- method: str,
2183
- params: Optional[Union[list, dict]] = None,
2184
- block_hash: Optional[str] = None,
2185
- ) -> ScaleType:
2186
- """
2187
- Calls a runtime API method
2188
-
2189
- :param api: Name of the runtime API e.g. 'TransactionPaymentApi'
2190
- :param method: Name of the method e.g. 'query_fee_details'
2191
- :param params: List of parameters needed to call the runtime API
2192
- :param block_hash: Hash of the block at which to make the runtime API call
2193
-
2194
- :return: ScaleType from the runtime call
2195
- """
2196
- await self.init_runtime()
2197
-
2198
- if params is None:
2199
- params = {}
2200
-
2201
- try:
2202
- runtime_call_def = self.runtime_config.type_registry["runtime_api"][api][
2203
- "methods"
2204
- ][method]
2205
- runtime_api_types = self.runtime_config.type_registry["runtime_api"][
2206
- api
2207
- ].get("types", {})
2208
- except KeyError:
2209
- raise ValueError(f"Runtime API Call '{api}.{method}' not found in registry")
2210
-
2211
- if isinstance(params, list) and len(params) != len(runtime_call_def["params"]):
2212
- raise ValueError(
2213
- f"Number of parameter provided ({len(params)}) does not "
2214
- f"match definition {len(runtime_call_def['params'])}"
2215
- )
2216
-
2217
- # Add runtime API types to registry
2218
- self.runtime_config.update_type_registry_types(runtime_api_types)
2219
- runtime = Runtime(
2220
- self.chain,
2221
- self.runtime_config,
2222
- self.metadata,
2223
- self.type_registry,
2224
- )
2225
-
2226
- # Encode params
2227
- param_data = ScaleBytes(bytes())
2228
- for idx, param in enumerate(runtime_call_def["params"]):
2229
- scale_obj = runtime.runtime_config.create_scale_object(param["type"])
2230
- if isinstance(params, list):
2231
- param_data += scale_obj.encode(params[idx])
2232
- else:
2233
- if param["name"] not in params:
2234
- raise ValueError(f"Runtime Call param '{param['name']}' is missing")
2235
-
2236
- param_data += scale_obj.encode(params[param["name"]])
2237
-
2238
- # RPC request
2239
- result_data = await self.rpc_request(
2240
- "state_call", [f"{api}_{method}", str(param_data), block_hash]
2241
- )
2242
-
2243
- # Decode result
2244
- # TODO update this to use bt-decode
2245
- result_obj = runtime.runtime_config.create_scale_object(
2246
- runtime_call_def["type"]
2247
- )
2248
- result_obj.decode(
2249
- ScaleBytes(result_data["result"]),
2250
- check_remaining=self.config.get("strict_scale_decode"),
2251
- )
2252
-
2253
- return result_obj
2254
-
2255
- async def get_account_nonce(self, account_address: str) -> int:
2256
- """
2257
- Returns current nonce for given account address
2258
-
2259
- :param account_address: SS58 formatted address
2260
-
2261
- :return: Nonce for given account address
2262
- """
2263
- nonce_obj = await self.runtime_call(
2264
- "AccountNonceApi", "account_nonce", [account_address]
2265
- )
2266
- return nonce_obj.value
2267
-
2268
- async def get_metadata_constant(self, module_name, constant_name, block_hash=None):
2269
- """
2270
- Retrieves the details of a constant for given module name, call function name and block_hash
2271
- (or chaintip if block_hash is omitted)
2272
-
2273
- Parameters
2274
- ----------
2275
- module_name
2276
- constant_name
2277
- block_hash
2278
-
2279
- Returns
2280
- -------
2281
- MetadataModuleConstants
2282
- """
2283
-
2284
- await self.init_runtime(block_hash=block_hash)
2285
-
2286
- for module in self.metadata.pallets:
2287
- if module_name == module.name and module.constants:
2288
- for constant in module.constants:
2289
- if constant_name == constant.value["name"]:
2290
- return constant
2291
-
2292
- async def get_constant(
2293
- self,
2294
- module_name: str,
2295
- constant_name: str,
2296
- block_hash: Optional[str] = None,
2297
- reuse_block_hash: bool = False,
2298
- ) -> Optional["ScaleType"]:
2299
- """
2300
- Returns the decoded `ScaleType` object of the constant for given module name, call function name and block_hash
2301
- (or chaintip if block_hash is omitted)
2302
-
2303
- Parameters
2304
- ----------
2305
- :param module_name: Name of the module to query
2306
- :param constant_name: Name of the constant to query
2307
- :param block_hash: Hash of the block at which to make the runtime API call
2308
- :param reuse_block_hash: Reuse last-used block hash if set to true
2309
-
2310
- :return: ScaleType from the runtime call
2311
- """
2312
- block_hash = await self._get_current_block_hash(block_hash, reuse_block_hash)
2313
- constant = await self.get_metadata_constant(
2314
- module_name, constant_name, block_hash=block_hash
2315
- )
2316
- if constant:
2317
- # Decode to ScaleType
2318
- return await self.decode_scale(
2319
- constant.type,
2320
- bytes(constant.constant_value),
2321
- return_scale_obj=True,
2322
- )
2323
- else:
2324
- return None
2325
-
2326
- async def get_payment_info(
2327
- self, call: GenericCall, keypair: Keypair
2328
- ) -> dict[str, Any]:
2329
- """
2330
- Retrieves fee estimation via RPC for given extrinsic
2331
-
2332
- Parameters
2333
- ----------
2334
- call: Call object to estimate fees for
2335
- keypair: Keypair of the sender, does not have to include private key because no valid signature is required
2336
-
2337
- Returns
2338
- -------
2339
- Dict with payment info
2340
-
2341
- E.g. `{'class': 'normal', 'partialFee': 151000000, 'weight': {'ref_time': 143322000}}`
2342
-
2343
- """
2344
-
2345
- # Check requirements
2346
- if not isinstance(call, GenericCall):
2347
- raise TypeError("'call' must be of type Call")
2348
-
2349
- if not isinstance(keypair, Keypair):
2350
- raise TypeError("'keypair' must be of type Keypair")
2351
-
2352
- # No valid signature is required for fee estimation
2353
- signature = "0x" + "00" * 64
2354
-
2355
- # Create extrinsic
2356
- extrinsic = await self.create_signed_extrinsic(
2357
- call=call, keypair=keypair, signature=signature
2358
- )
2359
- extrinsic_len = self.runtime_config.create_scale_object("u32")
2360
- extrinsic_len.encode(len(extrinsic.data))
2361
-
2362
- result = await self.runtime_call(
2363
- "TransactionPaymentApi", "query_info", [extrinsic, extrinsic_len]
2364
- )
2365
-
2366
- return result.value
2367
-
2368
- async def query(
2369
- self,
2370
- module: str,
2371
- storage_function: str,
2372
- params: Optional[list] = None,
2373
- block_hash: Optional[str] = None,
2374
- raw_storage_key: Optional[bytes] = None,
2375
- subscription_handler=None,
2376
- reuse_block_hash: bool = False,
2377
- ) -> "ScaleType":
2378
- """
2379
- Queries subtensor. This should only be used when making a single request. For multiple requests,
2380
- you should use ``self.query_multiple``
2381
- """
2382
- block_hash = await self._get_current_block_hash(block_hash, reuse_block_hash)
2383
- if block_hash:
2384
- self.last_block_hash = block_hash
2385
- runtime = await self.init_runtime(block_hash=block_hash)
2386
- preprocessed: Preprocessed = await self._preprocess(
2387
- params, block_hash, storage_function, module
2388
- )
2389
- payload = [
2390
- self.make_payload(
2391
- preprocessed.queryable, preprocessed.method, preprocessed.params
2392
- )
2393
- ]
2394
- value_scale_type = preprocessed.value_scale_type
2395
- storage_item = preprocessed.storage_item
2396
-
2397
- responses = await self._make_rpc_request(
2398
- payload,
2399
- value_scale_type,
2400
- storage_item,
2401
- runtime,
2402
- result_handler=subscription_handler,
2403
- )
2404
- return responses[preprocessed.queryable][0]
2405
-
2406
- async def query_map(
2407
- self,
2408
- module: str,
2409
- storage_function: str,
2410
- params: Optional[list] = None,
2411
- block_hash: Optional[str] = None,
2412
- max_results: Optional[int] = None,
2413
- start_key: Optional[str] = None,
2414
- page_size: int = 100,
2415
- ignore_decoding_errors: bool = False,
2416
- reuse_block_hash: bool = False,
2417
- ) -> "QueryMapResult":
2418
- """
2419
- Iterates over all key-pairs located at the given module and storage_function. The storage
2420
- item must be a map.
2421
-
2422
- Example:
2423
-
2424
- ```
2425
- result = await substrate.query_map('System', 'Account', max_results=100)
2426
-
2427
- async for account, account_info in result:
2428
- print(f"Free balance of account '{account.value}': {account_info.value['data']['free']}")
2429
- ```
2430
-
2431
- Note: it is important that you do not use `for x in result.records`, as this will sidestep possible
2432
- pagination. You must do `async for x in result`.
2433
-
2434
- :param module: The module name in the metadata, e.g. System or Balances.
2435
- :param storage_function: The storage function name, e.g. Account or Locks.
2436
- :param params: The input parameters in case of for example a `DoubleMap` storage function
2437
- :param block_hash: Optional block hash for result at given block, when left to None the chain tip will be used.
2438
- :param max_results: the maximum of results required, if set the query will stop fetching results when number is
2439
- reached
2440
- :param start_key: The storage key used as offset for the results, for pagination purposes
2441
- :param page_size: The results are fetched from the node RPC in chunks of this size
2442
- :param ignore_decoding_errors: When set this will catch all decoding errors, set the item to None and continue
2443
- decoding
2444
- :param reuse_block_hash: use True if you wish to make the query using the last-used block hash. Do not mark True
2445
- if supplying a block_hash
2446
-
2447
- :return: QueryMapResult object
2448
- """
2449
- params = params or []
2450
- block_hash = await self._get_current_block_hash(block_hash, reuse_block_hash)
2451
- if block_hash:
2452
- self.last_block_hash = block_hash
2453
- runtime = await self.init_runtime(block_hash=block_hash)
2454
-
2455
- metadata_pallet = runtime.metadata.get_metadata_pallet(module)
2456
- if not metadata_pallet:
2457
- raise ValueError(f'Pallet "{module}" not found')
2458
- storage_item = metadata_pallet.get_storage_function(storage_function)
2459
-
2460
- if not metadata_pallet or not storage_item:
2461
- raise ValueError(
2462
- f'Storage function "{module}.{storage_function}" not found'
2463
- )
2464
-
2465
- value_type = storage_item.get_value_type_string()
2466
- param_types = storage_item.get_params_type_string()
2467
- key_hashers = storage_item.get_param_hashers()
2468
-
2469
- # Check MapType conditions
2470
- if len(param_types) == 0:
2471
- raise ValueError("Given storage function is not a map")
2472
- if len(params) > len(param_types) - 1:
2473
- raise ValueError(
2474
- f"Storage function map can accept max {len(param_types) - 1} parameters, {len(params)} given"
2475
- )
2476
-
2477
- # Generate storage key prefix
2478
- storage_key = StorageKey.create_from_storage_function(
2479
- module,
2480
- storage_item.value["name"],
2481
- params,
2482
- runtime_config=runtime.runtime_config,
2483
- metadata=runtime.metadata,
2484
- )
2485
- prefix = storage_key.to_hex()
2486
-
2487
- if not start_key:
2488
- start_key = prefix
2489
-
2490
- # Make sure if the max result is smaller than the page size, adjust the page size
2491
- if max_results is not None and max_results < page_size:
2492
- page_size = max_results
2493
-
2494
- # Retrieve storage keys
2495
- response = await self.rpc_request(
2496
- method="state_getKeysPaged",
2497
- params=[prefix, page_size, start_key, block_hash],
2498
- )
2499
-
2500
- if "error" in response:
2501
- raise SubstrateRequestException(response["error"]["message"])
2502
-
2503
- result_keys = response.get("result")
2504
-
2505
- result = []
2506
- last_key = None
2507
-
2508
- def concat_hash_len(key_hasher: str) -> int:
2509
- """
2510
- Helper function to avoid if statements
2511
- """
2512
- if key_hasher == "Blake2_128Concat":
2513
- return 16
2514
- elif key_hasher == "Twox64Concat":
2515
- return 8
2516
- elif key_hasher == "Identity":
2517
- return 0
2518
- else:
2519
- raise ValueError("Unsupported hash type")
2520
-
2521
- if len(result_keys) > 0:
2522
- last_key = result_keys[-1]
2523
-
2524
- # Retrieve corresponding value
2525
- response = await self.rpc_request(
2526
- method="state_queryStorageAt", params=[result_keys, block_hash]
2527
- )
2528
-
2529
- if "error" in response:
2530
- raise SubstrateRequestException(response["error"]["message"])
2531
-
2532
- for result_group in response["result"]:
2533
- for item in result_group["changes"]:
2534
- try:
2535
- # Determine type string
2536
- key_type_string = []
2537
- for n in range(len(params), len(param_types)):
2538
- key_type_string.append(
2539
- f"[u8; {concat_hash_len(key_hashers[n])}]"
2540
- )
2541
- key_type_string.append(param_types[n])
2542
-
2543
- item_key_obj = await self.decode_scale(
2544
- type_string=f"({', '.join(key_type_string)})",
2545
- scale_bytes=bytes.fromhex(item[0][len(prefix) :]),
2546
- return_scale_obj=True,
2547
- )
2548
-
2549
- # strip key_hashers to use as item key
2550
- if len(param_types) - len(params) == 1:
2551
- item_key = item_key_obj[1]
2552
- else:
2553
- item_key = tuple(
2554
- item_key_obj[key + 1]
2555
- for key in range(len(params), len(param_types) + 1, 2)
2556
- )
2557
-
2558
- except Exception as _:
2559
- if not ignore_decoding_errors:
2560
- raise
2561
- item_key = None
2562
-
2563
- try:
2564
- item_bytes = hex_to_bytes(item[1])
2565
-
2566
- item_value = await self.decode_scale(
2567
- type_string=value_type,
2568
- scale_bytes=item_bytes,
2569
- return_scale_obj=True,
2570
- )
2571
- except Exception as _:
2572
- if not ignore_decoding_errors:
2573
- raise
2574
- item_value = None
2575
-
2576
- result.append([item_key, item_value])
2577
-
2578
- return QueryMapResult(
2579
- records=result,
2580
- page_size=page_size,
2581
- module=module,
2582
- storage_function=storage_function,
2583
- params=params,
2584
- block_hash=block_hash,
2585
- substrate=self,
2586
- last_key=last_key,
2587
- max_results=max_results,
2588
- ignore_decoding_errors=ignore_decoding_errors,
2589
- )
2590
-
2591
- async def submit_extrinsic(
2592
- self,
2593
- extrinsic: GenericExtrinsic,
2594
- wait_for_inclusion: bool = False,
2595
- wait_for_finalization: bool = False,
2596
- ) -> "ExtrinsicReceipt":
2597
- """
2598
- Submit an extrinsic to the connected node, with the possibility to wait until the extrinsic is included
2599
- in a block and/or the block is finalized. The receipt returned provided information about the block and
2600
- triggered events
2601
-
2602
- Parameters
2603
- ----------
2604
- extrinsic: Extrinsic The extrinsic to be sent to the network
2605
- wait_for_inclusion: wait until extrinsic is included in a block (only works for websocket connections)
2606
- wait_for_finalization: wait until extrinsic is finalized (only works for websocket connections)
2607
-
2608
- Returns
2609
- -------
2610
- ExtrinsicReceipt
2611
-
2612
- """
2613
-
2614
- # Check requirements
2615
- if not isinstance(extrinsic, GenericExtrinsic):
2616
- raise TypeError("'extrinsic' must be of type Extrinsics")
2617
-
2618
- async def result_handler(message: dict, subscription_id) -> tuple[dict, bool]:
2619
- """
2620
- Result handler function passed as an arg to _make_rpc_request as the result_handler
2621
- to handle the results of the extrinsic rpc call, which are multipart, and require
2622
- subscribing to the message
2623
-
2624
- :param message: message received from the rpc call
2625
- :param subscription_id: subscription id received from the initial rpc call for the subscription
2626
-
2627
- :returns: tuple containing the dict of the block info for the subscription, and bool for whether
2628
- the subscription is completed.
2629
- """
2630
- # Check if extrinsic is included and finalized
2631
- if "params" in message and isinstance(message["params"]["result"], dict):
2632
- # Convert result enum to lower for backwards compatibility
2633
- message_result = {
2634
- k.lower(): v for k, v in message["params"]["result"].items()
2635
- }
2636
-
2637
- if "finalized" in message_result and wait_for_finalization:
2638
- # Created as a task because we don't actually care about the result
2639
- self._forgettable_task = asyncio.create_task(
2640
- self.rpc_request("author_unwatchExtrinsic", [subscription_id])
2641
- )
2642
- return {
2643
- "block_hash": message_result["finalized"],
2644
- "extrinsic_hash": "0x{}".format(extrinsic.extrinsic_hash.hex()),
2645
- "finalized": True,
2646
- }, True
2647
- elif (
2648
- "inblock" in message_result
2649
- and wait_for_inclusion
2650
- and not wait_for_finalization
2651
- ):
2652
- # Created as a task because we don't actually care about the result
2653
- self._forgettable_task = asyncio.create_task(
2654
- self.rpc_request("author_unwatchExtrinsic", [subscription_id])
2655
- )
2656
- return {
2657
- "block_hash": message_result["inblock"],
2658
- "extrinsic_hash": "0x{}".format(extrinsic.extrinsic_hash.hex()),
2659
- "finalized": False,
2660
- }, True
2661
- return message, False
2662
-
2663
- if wait_for_inclusion or wait_for_finalization:
2664
- responses = (
2665
- await self._make_rpc_request(
2666
- [
2667
- self.make_payload(
2668
- "rpc_request",
2669
- "author_submitAndWatchExtrinsic",
2670
- [str(extrinsic.data)],
2671
- )
2672
- ],
2673
- result_handler=result_handler,
2674
- )
2675
- )["rpc_request"]
2676
- response = next(
2677
- (r for r in responses if "block_hash" in r and "extrinsic_hash" in r),
2678
- None,
2679
- )
2680
-
2681
- if not response:
2682
- raise SubstrateRequestException(responses)
2683
-
2684
- # Also, this will be a multipart response, so maybe should change to everything after the first response?
2685
- # The following code implies this will be a single response after the initial subscription id.
2686
- result = ExtrinsicReceipt(
2687
- substrate=self,
2688
- extrinsic_hash=response["extrinsic_hash"],
2689
- block_hash=response["block_hash"],
2690
- finalized=response["finalized"],
2691
- )
2692
-
2693
- else:
2694
- response = await self.rpc_request(
2695
- "author_submitExtrinsic", [str(extrinsic.data)]
2696
- )
2697
-
2698
- if "result" not in response:
2699
- raise SubstrateRequestException(response.get("error"))
2700
-
2701
- result = ExtrinsicReceipt(substrate=self, extrinsic_hash=response["result"])
2702
-
2703
- return result
2704
-
2705
- async def get_metadata_call_function(
2706
- self,
2707
- module_name: str,
2708
- call_function_name: str,
2709
- block_hash: Optional[str] = None,
2710
- ) -> Optional[list]:
2711
- """
2712
- Retrieves a list of all call functions in metadata active for given block_hash (or chaintip if block_hash
2713
- is omitted)
2714
-
2715
- :param module_name: name of the module
2716
- :param call_function_name: name of the call function
2717
- :param block_hash: optional block hash
2718
-
2719
- :return: list of call functions
2720
- """
2721
- runtime = await self.init_runtime(block_hash=block_hash)
2722
-
2723
- for pallet in runtime.metadata.pallets:
2724
- if pallet.name == module_name and pallet.calls:
2725
- for call in pallet.calls:
2726
- if call.name == call_function_name:
2727
- return call
2728
- return None
2729
-
2730
- async def get_block_number(self, block_hash: Optional[str]) -> int:
2731
- """Async version of `substrateinterface.base.get_block_number` method."""
2732
- response = await self.rpc_request("chain_getHeader", [block_hash])
2733
-
2734
- if "error" in response:
2735
- raise SubstrateRequestException(response["error"]["message"])
2736
-
2737
- elif "result" in response:
2738
- if response["result"]:
2739
- return int(response["result"]["number"], 16)
2740
-
2741
- async def close(self):
2742
- """
2743
- Closes the substrate connection, and the websocket connection.
2744
- """
2745
- try:
2746
- await self.ws.shutdown()
2747
- except AttributeError:
2748
- pass