hive-nectar 0.2.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hive_nectar-0.2.9.dist-info/METADATA +194 -0
- hive_nectar-0.2.9.dist-info/RECORD +87 -0
- hive_nectar-0.2.9.dist-info/WHEEL +4 -0
- hive_nectar-0.2.9.dist-info/entry_points.txt +2 -0
- hive_nectar-0.2.9.dist-info/licenses/LICENSE.txt +23 -0
- nectar/__init__.py +37 -0
- nectar/account.py +5076 -0
- nectar/amount.py +553 -0
- nectar/asciichart.py +303 -0
- nectar/asset.py +122 -0
- nectar/block.py +574 -0
- nectar/blockchain.py +1242 -0
- nectar/blockchaininstance.py +2590 -0
- nectar/blockchainobject.py +263 -0
- nectar/cli.py +5937 -0
- nectar/comment.py +1552 -0
- nectar/community.py +854 -0
- nectar/constants.py +95 -0
- nectar/discussions.py +1437 -0
- nectar/exceptions.py +152 -0
- nectar/haf.py +381 -0
- nectar/hive.py +630 -0
- nectar/imageuploader.py +114 -0
- nectar/instance.py +113 -0
- nectar/market.py +876 -0
- nectar/memo.py +542 -0
- nectar/message.py +379 -0
- nectar/nodelist.py +309 -0
- nectar/price.py +603 -0
- nectar/profile.py +74 -0
- nectar/py.typed +0 -0
- nectar/rc.py +333 -0
- nectar/snapshot.py +1024 -0
- nectar/storage.py +62 -0
- nectar/transactionbuilder.py +659 -0
- nectar/utils.py +630 -0
- nectar/version.py +3 -0
- nectar/vote.py +722 -0
- nectar/wallet.py +472 -0
- nectar/witness.py +728 -0
- nectarapi/__init__.py +12 -0
- nectarapi/exceptions.py +126 -0
- nectarapi/graphenerpc.py +596 -0
- nectarapi/node.py +194 -0
- nectarapi/noderpc.py +79 -0
- nectarapi/openapi.py +107 -0
- nectarapi/py.typed +0 -0
- nectarapi/rpcutils.py +98 -0
- nectarapi/version.py +3 -0
- nectarbase/__init__.py +15 -0
- nectarbase/ledgertransactions.py +106 -0
- nectarbase/memo.py +242 -0
- nectarbase/objects.py +521 -0
- nectarbase/objecttypes.py +21 -0
- nectarbase/operationids.py +102 -0
- nectarbase/operations.py +1357 -0
- nectarbase/py.typed +0 -0
- nectarbase/signedtransactions.py +89 -0
- nectarbase/transactions.py +11 -0
- nectarbase/version.py +3 -0
- nectargraphenebase/__init__.py +27 -0
- nectargraphenebase/account.py +1121 -0
- nectargraphenebase/aes.py +49 -0
- nectargraphenebase/base58.py +197 -0
- nectargraphenebase/bip32.py +575 -0
- nectargraphenebase/bip38.py +110 -0
- nectargraphenebase/chains.py +15 -0
- nectargraphenebase/dictionary.py +2 -0
- nectargraphenebase/ecdsasig.py +309 -0
- nectargraphenebase/objects.py +130 -0
- nectargraphenebase/objecttypes.py +8 -0
- nectargraphenebase/operationids.py +5 -0
- nectargraphenebase/operations.py +25 -0
- nectargraphenebase/prefix.py +13 -0
- nectargraphenebase/py.typed +0 -0
- nectargraphenebase/signedtransactions.py +221 -0
- nectargraphenebase/types.py +557 -0
- nectargraphenebase/unsignedtransactions.py +288 -0
- nectargraphenebase/version.py +3 -0
- nectarstorage/__init__.py +57 -0
- nectarstorage/base.py +317 -0
- nectarstorage/exceptions.py +15 -0
- nectarstorage/interfaces.py +244 -0
- nectarstorage/masterpassword.py +237 -0
- nectarstorage/py.typed +0 -0
- nectarstorage/ram.py +27 -0
- nectarstorage/sqlite.py +343 -0
|
@@ -0,0 +1,2590 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import ast
|
|
4
|
+
import json
|
|
5
|
+
import logging
|
|
6
|
+
import math
|
|
7
|
+
import os
|
|
8
|
+
import re
|
|
9
|
+
from datetime import datetime, timezone
|
|
10
|
+
from typing import Any
|
|
11
|
+
|
|
12
|
+
from nectar.amount import Amount
|
|
13
|
+
from nectar.constants import (
|
|
14
|
+
CURVE_CONSTANT,
|
|
15
|
+
CURVE_CONSTANT_X4,
|
|
16
|
+
HIVE_1_PERCENT,
|
|
17
|
+
HIVE_100_PERCENT,
|
|
18
|
+
HIVE_RC_REGEN_TIME,
|
|
19
|
+
HIVE_VOTE_REGENERATION_SECONDS,
|
|
20
|
+
SQUARED_CURVE_CONSTANT,
|
|
21
|
+
)
|
|
22
|
+
from nectarapi.noderpc import NodeRPC
|
|
23
|
+
from nectarbase import operations
|
|
24
|
+
from nectargraphenebase.account import PrivateKey, PublicKey
|
|
25
|
+
from nectargraphenebase.chains import known_chains
|
|
26
|
+
|
|
27
|
+
from .account import Account
|
|
28
|
+
from .exceptions import AccountDoesNotExistsException, AccountExistsException
|
|
29
|
+
from .price import Price
|
|
30
|
+
from .storage import get_default_config_store
|
|
31
|
+
from .transactionbuilder import TransactionBuilder
|
|
32
|
+
from .utils import (
|
|
33
|
+
derive_permlink,
|
|
34
|
+
remove_from_dict,
|
|
35
|
+
resolve_authorperm,
|
|
36
|
+
sanitize_permlink,
|
|
37
|
+
)
|
|
38
|
+
from .version import version as nectar_version
|
|
39
|
+
from .wallet import Wallet
|
|
40
|
+
|
|
41
|
+
log = logging.getLogger(__name__)
|
|
42
|
+
|
|
43
|
+
RPC_NOT_ESTABLISHED = "RPC connection not established"
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
class BlockChainInstance:
|
|
47
|
+
"""Connect to a Graphene network.
|
|
48
|
+
|
|
49
|
+
:param str node: Node to connect to *(optional)*
|
|
50
|
+
:param str rpcuser: RPC user *(optional)*
|
|
51
|
+
:param str rpcpassword: RPC password *(optional)*
|
|
52
|
+
:param bool nobroadcast: Do **not** broadcast a transaction!
|
|
53
|
+
*(optional)*
|
|
54
|
+
:param bool unsigned: Do **not** sign a transaction! *(optional)*
|
|
55
|
+
:param bool debug: Enable Debugging *(optional)*
|
|
56
|
+
:param keys: Predefine the wif keys to shortcut the
|
|
57
|
+
wallet database *(optional)*
|
|
58
|
+
:type keys: array, dict, string
|
|
59
|
+
:param wif: Predefine the wif keys to shortcut the
|
|
60
|
+
wallet database *(optional)*
|
|
61
|
+
:type wif: array, dict, string
|
|
62
|
+
:param bool offline: Boolean to prevent connecting to network (defaults
|
|
63
|
+
to ``False``) *(optional)*
|
|
64
|
+
:param int expiration: Delay in seconds until transactions are supposed
|
|
65
|
+
to expire *(optional)* (default is 300)
|
|
66
|
+
:param str blocking: Wait for broadcasted transactions to be included
|
|
67
|
+
in a block and return full transaction (can be "head" or
|
|
68
|
+
"irreversible")
|
|
69
|
+
:param bool bundle: Do not broadcast transactions right away, but allow
|
|
70
|
+
to bundle operations. It is not possible to send out more than one
|
|
71
|
+
vote operation and more than one comment operation in a single broadcast *(optional)*
|
|
72
|
+
:param dict custom_chains: custom chain which should be added to the known chains
|
|
73
|
+
|
|
74
|
+
Three wallet operation modes are possible:
|
|
75
|
+
|
|
76
|
+
* **Wallet Database**: Here, the nectar libraries load the keys from the
|
|
77
|
+
locally stored wallet SQLite database (see ``storage.py``).
|
|
78
|
+
To use this mode, simply call ``Hive()`` without the
|
|
79
|
+
``keys`` parameter
|
|
80
|
+
* **Providing Keys**: Here, you can provide the keys for
|
|
81
|
+
your accounts manually. All you need to do is add the wif
|
|
82
|
+
keys for the accounts you want to use as a simple array
|
|
83
|
+
using the ``keys`` parameter to ``Hive()``.
|
|
84
|
+
* **Force keys**: This more is for advanced users and
|
|
85
|
+
requires that you know what you are doing. Here, the
|
|
86
|
+
``keys`` parameter is a dictionary that overwrite the
|
|
87
|
+
``active``, ``owner``, ``posting`` or ``memo`` keys for
|
|
88
|
+
any account. This mode is only used for *foreign*
|
|
89
|
+
signatures!
|
|
90
|
+
|
|
91
|
+
If no node is provided, it will connect to the default Hive nodes.
|
|
92
|
+
Default settings can be changed with:
|
|
93
|
+
|
|
94
|
+
.. code-block:: python
|
|
95
|
+
|
|
96
|
+
hive = Hive(<host>)
|
|
97
|
+
|
|
98
|
+
where ``<host>`` starts with ``https://``, ``ws://`` or ``wss://``.
|
|
99
|
+
|
|
100
|
+
The purpose of this class is to simplify interaction with
|
|
101
|
+
Hive.
|
|
102
|
+
|
|
103
|
+
The idea is to have a class that allows to do this:
|
|
104
|
+
|
|
105
|
+
.. code-block:: python
|
|
106
|
+
|
|
107
|
+
>>> from nectar import Hive
|
|
108
|
+
>>> hive = Hive()
|
|
109
|
+
>>> print(hive.get_blockchain_version()) # doctest: +SKIP
|
|
110
|
+
|
|
111
|
+
This class also deals with edits, votes and reading content.
|
|
112
|
+
|
|
113
|
+
Example for adding a custom chain:
|
|
114
|
+
|
|
115
|
+
.. code-block:: python
|
|
116
|
+
|
|
117
|
+
from nectar import Hive
|
|
118
|
+
hv = Hive(node=["https://mytstnet.com"], custom_chains={"MYTESTNET":
|
|
119
|
+
{'chain_assets': [{'asset': 'HBD', 'id': 0, 'precision': 3, 'symbol': 'HBD'},
|
|
120
|
+
{'asset': 'HIVE', 'id': 1, 'precision': 3, 'symbol': 'HIVE'},
|
|
121
|
+
{'asset': 'VESTS', 'id': 2, 'precision': 6, 'symbol': 'VESTS'}],
|
|
122
|
+
'chain_id': '79276aea5d4877d9a25892eaa01b0adf019d3e5cb12a97478df3298ccdd01674',
|
|
123
|
+
'min_version': '0.0.0',
|
|
124
|
+
'prefix': 'MTN'}
|
|
125
|
+
}
|
|
126
|
+
)
|
|
127
|
+
|
|
128
|
+
"""
|
|
129
|
+
|
|
130
|
+
def __init__(
|
|
131
|
+
self,
|
|
132
|
+
node: str | list[str] | None = None,
|
|
133
|
+
rpcuser: str | None = None,
|
|
134
|
+
rpcpassword: str | None = None,
|
|
135
|
+
debug: bool = False,
|
|
136
|
+
data_refresh_time_seconds: int = 900,
|
|
137
|
+
**kwargs,
|
|
138
|
+
) -> None:
|
|
139
|
+
"""
|
|
140
|
+
Initialize the BlockChainInstance, set up connection (unless offline), load configuration, initialize caches and transaction buffers, and create the Wallet and optional HiveSigner/ledger signing support.
|
|
141
|
+
|
|
142
|
+
Parameters:
|
|
143
|
+
node (str): RPC node URL to connect to (optional; ignored if offline).
|
|
144
|
+
rpcuser (str), rpcpassword (str): Optional RPC credentials for the node.
|
|
145
|
+
data_refresh_time_seconds (int): Default cache refresh interval in seconds.
|
|
146
|
+
debug (bool): Enable debug mode.
|
|
147
|
+
**kwargs: Additional options (commonly used keys)
|
|
148
|
+
- offline (bool): If True, skip connecting to a node.
|
|
149
|
+
- nobroadcast (bool): If True, do not broadcast transactions.
|
|
150
|
+
- unsigned (bool): If True, do not sign transactions.
|
|
151
|
+
- expiration (int): Transaction expiration delay in seconds.
|
|
152
|
+
- bundle (bool): If True, enable bundling of operations instead of immediate broadcast.
|
|
153
|
+
- blocking (str|bool): Wait mode for broadcasts ("head" or "irreversible").
|
|
154
|
+
- custom_chains (dict): Custom chain definitions.
|
|
155
|
+
- use_ledger (bool): If True, enable Ledger Nano signing.
|
|
156
|
+
- path (str): BIP32 path to derive pubkey from when using Ledger.
|
|
157
|
+
- config_store: Configuration store object (defaults to the global default).
|
|
158
|
+
"""
|
|
159
|
+
|
|
160
|
+
self.rpc = None
|
|
161
|
+
self.debug = debug
|
|
162
|
+
|
|
163
|
+
self.offline = bool(kwargs.get("offline", False))
|
|
164
|
+
self.nobroadcast = bool(kwargs.get("nobroadcast", False))
|
|
165
|
+
self.unsigned = bool(kwargs.get("unsigned", False))
|
|
166
|
+
# Default transaction expiration window (seconds). Increased from 30s to 300s for better tolerance to node clock skew/network latency.
|
|
167
|
+
self.expiration = int(kwargs.get("expiration", 300))
|
|
168
|
+
self.bundle = bool(kwargs.get("bundle", False))
|
|
169
|
+
self.blocking = kwargs.get("blocking", False)
|
|
170
|
+
self.custom_chains = kwargs.get("custom_chains", {})
|
|
171
|
+
self.use_ledger = bool(kwargs.get("use_ledger", False))
|
|
172
|
+
self.path = kwargs.get("path", None)
|
|
173
|
+
|
|
174
|
+
# Store config for access through other Classes
|
|
175
|
+
self.config = kwargs.get("config_store", get_default_config_store(**kwargs))
|
|
176
|
+
if self.path is None:
|
|
177
|
+
self.path = self.config["default_path"]
|
|
178
|
+
|
|
179
|
+
if not self.offline:
|
|
180
|
+
if node:
|
|
181
|
+
# Type assertion: we know node is not None here
|
|
182
|
+
assert node is not None
|
|
183
|
+
self.connect(
|
|
184
|
+
node=node,
|
|
185
|
+
rpcuser=rpcuser or "",
|
|
186
|
+
rpcpassword=rpcpassword or "",
|
|
187
|
+
**kwargs,
|
|
188
|
+
)
|
|
189
|
+
else:
|
|
190
|
+
self.connect(
|
|
191
|
+
node="",
|
|
192
|
+
rpcuser=rpcuser or "",
|
|
193
|
+
rpcpassword=rpcpassword or "",
|
|
194
|
+
**kwargs,
|
|
195
|
+
)
|
|
196
|
+
|
|
197
|
+
self.clear_data()
|
|
198
|
+
self.data_refresh_time_seconds = data_refresh_time_seconds
|
|
199
|
+
# self.refresh_data()
|
|
200
|
+
|
|
201
|
+
# txbuffers/propbuffer are initialized and cleared
|
|
202
|
+
self.clear()
|
|
203
|
+
|
|
204
|
+
self.wallet = Wallet(blockchain_instance=self, **kwargs)
|
|
205
|
+
|
|
206
|
+
# -------------------------------------------------------------------------
|
|
207
|
+
# Basic Calls
|
|
208
|
+
# -------------------------------------------------------------------------
|
|
209
|
+
def connect(
|
|
210
|
+
self,
|
|
211
|
+
node: str | list[str] = "",
|
|
212
|
+
rpcuser: str = "",
|
|
213
|
+
rpcpassword: str = "",
|
|
214
|
+
**kwargs,
|
|
215
|
+
) -> None:
|
|
216
|
+
"""
|
|
217
|
+
Connect to a Hive node and initialize the internal RPC client.
|
|
218
|
+
|
|
219
|
+
If node is empty, the method will attempt to use the configured default nodes; if none are available a ValueError is raised.
|
|
220
|
+
If rpcuser or rpcpassword are not provided, values are read from self.config when present. The config key "use_tor" (if set) will be used to enable Tor for the connection.
|
|
221
|
+
Any additional keyword arguments are forwarded to the NodeRPC constructor.
|
|
222
|
+
|
|
223
|
+
Parameters:
|
|
224
|
+
node (str | list): Node URL or list of node URLs to connect to. If omitted, default nodes are used.
|
|
225
|
+
rpcuser (str): Optional RPC username; falls back to self.config["rpcuser"] when not supplied.
|
|
226
|
+
rpcpassword (str): Optional RPC password; falls back to self.config["rpcpassword"] when not supplied.
|
|
227
|
+
|
|
228
|
+
Raises:
|
|
229
|
+
ValueError: If no node is provided and no default nodes are configured.
|
|
230
|
+
"""
|
|
231
|
+
if not node:
|
|
232
|
+
node = self.get_default_nodes()
|
|
233
|
+
if not bool(node):
|
|
234
|
+
raise ValueError("A Hive node needs to be provided!")
|
|
235
|
+
|
|
236
|
+
if not rpcuser and "rpcuser" in self.config:
|
|
237
|
+
rpcuser = self.config["rpcuser"]
|
|
238
|
+
|
|
239
|
+
if not rpcpassword and "rpcpassword" in self.config:
|
|
240
|
+
rpcpassword = self.config["rpcpassword"]
|
|
241
|
+
|
|
242
|
+
if "use_tor" in self.config:
|
|
243
|
+
use_tor = self.config["use_tor"]
|
|
244
|
+
else:
|
|
245
|
+
use_tor = False
|
|
246
|
+
|
|
247
|
+
self.rpc = NodeRPC(node, rpcuser, rpcpassword, use_tor=use_tor, **kwargs)
|
|
248
|
+
|
|
249
|
+
def is_connected(self) -> bool:
|
|
250
|
+
"""Returns if rpc is connected"""
|
|
251
|
+
# Consider the instance connected only if an RPC client exists AND
|
|
252
|
+
# it has an active URL set by rpcconnect(). Previously, this returned
|
|
253
|
+
# True when self.rpc was merely instantiated but without a selected
|
|
254
|
+
# working node (i.e., self.rpc.url was None), which caused downstream
|
|
255
|
+
# RPC calls to raise RPCConnection("RPC is not connected!").
|
|
256
|
+
return self.rpc is not None and bool(getattr(self.rpc, "url", None))
|
|
257
|
+
|
|
258
|
+
def __repr__(self) -> str:
|
|
259
|
+
if self.offline:
|
|
260
|
+
return "<%s offline=True>" % (self.__class__.__name__)
|
|
261
|
+
elif self.rpc is not None and self.rpc.url and len(self.rpc.url) > 0:
|
|
262
|
+
return "<{} node={}, nobroadcast={}>".format(
|
|
263
|
+
self.__class__.__name__,
|
|
264
|
+
str(self.rpc.url),
|
|
265
|
+
str(self.nobroadcast),
|
|
266
|
+
)
|
|
267
|
+
else:
|
|
268
|
+
return "<{}, nobroadcast={}>".format(self.__class__.__name__, str(self.nobroadcast))
|
|
269
|
+
|
|
270
|
+
def clear_data(self) -> None:
|
|
271
|
+
"""
|
|
272
|
+
Reset the internal cache of blockchain-derived data.
|
|
273
|
+
|
|
274
|
+
This clears stored values used to cache node-dependent blockchain parameters (dynamic global properties, feed history,
|
|
275
|
+
hardfork properties, network info, witness schedule, config, reward funds) and their per-key refresh timestamps. It does
|
|
276
|
+
not affect network connection, wallet state, transaction buffers, or other non-cache attributes.
|
|
277
|
+
"""
|
|
278
|
+
self.data = {
|
|
279
|
+
"last_refresh": None,
|
|
280
|
+
"last_node": None,
|
|
281
|
+
"last_refresh_dynamic_global_properties": None,
|
|
282
|
+
"dynamic_global_properties": None,
|
|
283
|
+
"feed_history": None,
|
|
284
|
+
"get_feed_history": None,
|
|
285
|
+
"last_refresh_feed_history": None,
|
|
286
|
+
"hardfork_properties": None,
|
|
287
|
+
"last_refresh_hardfork_properties": None,
|
|
288
|
+
"network": None,
|
|
289
|
+
"last_refresh_network": None,
|
|
290
|
+
"witness_schedule": None,
|
|
291
|
+
"last_refresh_witness_schedule": None,
|
|
292
|
+
"config": None,
|
|
293
|
+
"last_refresh_config": None,
|
|
294
|
+
"reward_funds": None,
|
|
295
|
+
"last_refresh_reward_funds": None,
|
|
296
|
+
}
|
|
297
|
+
|
|
298
|
+
def refresh_data(
|
|
299
|
+
self,
|
|
300
|
+
chain_property: str,
|
|
301
|
+
force_refresh: bool = False,
|
|
302
|
+
data_refresh_time_seconds: int | None = None,
|
|
303
|
+
) -> None:
|
|
304
|
+
"""
|
|
305
|
+
Refresh and cache a specific blockchain data category in self.data.
|
|
306
|
+
|
|
307
|
+
This updates the cached value for the given chain_property (one of:
|
|
308
|
+
"dynamic_global_properties", "feed_history", "hardfork_properties",
|
|
309
|
+
"witness_schedule", "config", "reward_funds"). If the cached value was
|
|
310
|
+
refreshed recently (within self.data_refresh_time_seconds) and force_refresh
|
|
311
|
+
is False, the method will skip the RPC call. When online, timestamps
|
|
312
|
+
(last_refresh_*) and last_node are updated to reflect the refresh.
|
|
313
|
+
|
|
314
|
+
Parameters:
|
|
315
|
+
chain_property (str): The cache key to refresh; must be one of the supported properties.
|
|
316
|
+
force_refresh (bool): If True, bypass the time-based refresh guard and force an update.
|
|
317
|
+
data_refresh_time_seconds (float | None): If provided, set a new minimal refresh interval
|
|
318
|
+
(in seconds) before evaluating whether to skip refreshing.
|
|
319
|
+
|
|
320
|
+
Raises:
|
|
321
|
+
ValueError: If chain_property is not one of the supported keys.
|
|
322
|
+
"""
|
|
323
|
+
# if self.offline:
|
|
324
|
+
# return
|
|
325
|
+
if data_refresh_time_seconds is not None:
|
|
326
|
+
self.data_refresh_time_seconds = data_refresh_time_seconds
|
|
327
|
+
if chain_property == "dynamic_global_properties":
|
|
328
|
+
if not self.offline:
|
|
329
|
+
if (
|
|
330
|
+
self.data["last_refresh_dynamic_global_properties"] is not None
|
|
331
|
+
and not force_refresh
|
|
332
|
+
and self.rpc is not None
|
|
333
|
+
and self.data["last_node"] == self.rpc.url
|
|
334
|
+
):
|
|
335
|
+
if (
|
|
336
|
+
datetime.now(timezone.utc)
|
|
337
|
+
- self.data["last_refresh_dynamic_global_properties"]
|
|
338
|
+
).total_seconds() < self.data_refresh_time_seconds:
|
|
339
|
+
return
|
|
340
|
+
self.data["last_refresh_dynamic_global_properties"] = datetime.now(timezone.utc)
|
|
341
|
+
self.data["last_refresh"] = datetime.now(timezone.utc)
|
|
342
|
+
if self.rpc is not None:
|
|
343
|
+
self.data["last_node"] = self.rpc.url
|
|
344
|
+
self.data["dynamic_global_properties"] = self.get_dynamic_global_properties(False)
|
|
345
|
+
elif chain_property == "feed_history":
|
|
346
|
+
if not self.offline:
|
|
347
|
+
if (
|
|
348
|
+
self.data["last_refresh_feed_history"] is not None
|
|
349
|
+
and not force_refresh
|
|
350
|
+
and self.rpc is not None
|
|
351
|
+
and self.data["last_node"] == self.rpc.url
|
|
352
|
+
):
|
|
353
|
+
if (
|
|
354
|
+
datetime.now(timezone.utc) - self.data["last_refresh_feed_history"]
|
|
355
|
+
).total_seconds() < self.data_refresh_time_seconds:
|
|
356
|
+
return
|
|
357
|
+
|
|
358
|
+
self.data["last_refresh_feed_history"] = datetime.now(timezone.utc)
|
|
359
|
+
self.data["last_refresh"] = datetime.now(timezone.utc)
|
|
360
|
+
if self.rpc is not None:
|
|
361
|
+
self.data["last_node"] = self.rpc.url
|
|
362
|
+
try:
|
|
363
|
+
self.data["feed_history"] = self.get_feed_history(False)
|
|
364
|
+
except Exception:
|
|
365
|
+
self.data["feed_history"] = None
|
|
366
|
+
self.data["get_feed_history"] = self.data["feed_history"]
|
|
367
|
+
elif chain_property == "hardfork_properties":
|
|
368
|
+
if not self.offline:
|
|
369
|
+
if (
|
|
370
|
+
self.data["last_refresh_hardfork_properties"] is not None
|
|
371
|
+
and not force_refresh
|
|
372
|
+
and self.rpc is not None
|
|
373
|
+
and self.data["last_node"] == self.rpc.url
|
|
374
|
+
):
|
|
375
|
+
if (
|
|
376
|
+
datetime.now(timezone.utc) - self.data["last_refresh_hardfork_properties"]
|
|
377
|
+
).total_seconds() < self.data_refresh_time_seconds:
|
|
378
|
+
return
|
|
379
|
+
|
|
380
|
+
self.data["last_refresh_hardfork_properties"] = datetime.now(timezone.utc)
|
|
381
|
+
self.data["last_refresh"] = datetime.now(timezone.utc)
|
|
382
|
+
if self.rpc is not None:
|
|
383
|
+
self.data["last_node"] = self.rpc.url
|
|
384
|
+
try:
|
|
385
|
+
self.data["hardfork_properties"] = self.get_hardfork_properties(False)
|
|
386
|
+
except Exception:
|
|
387
|
+
self.data["hardfork_properties"] = None
|
|
388
|
+
elif chain_property == "witness_schedule":
|
|
389
|
+
if not self.offline:
|
|
390
|
+
if (
|
|
391
|
+
self.data["last_refresh_witness_schedule"] is not None
|
|
392
|
+
and not force_refresh
|
|
393
|
+
and self.rpc is not None
|
|
394
|
+
and self.data["last_node"] == self.rpc.url
|
|
395
|
+
):
|
|
396
|
+
if (
|
|
397
|
+
datetime.now(timezone.utc) - self.data["last_refresh_witness_schedule"]
|
|
398
|
+
).total_seconds() < 3:
|
|
399
|
+
return
|
|
400
|
+
self.data["last_refresh_witness_schedule"] = datetime.now(timezone.utc)
|
|
401
|
+
self.data["last_refresh"] = datetime.now(timezone.utc)
|
|
402
|
+
if self.rpc is not None:
|
|
403
|
+
self.data["last_node"] = self.rpc.url
|
|
404
|
+
self.data["witness_schedule"] = self.get_witness_schedule(False)
|
|
405
|
+
elif chain_property == "config":
|
|
406
|
+
if not self.offline:
|
|
407
|
+
if (
|
|
408
|
+
self.data["last_refresh_config"] is not None
|
|
409
|
+
and not force_refresh
|
|
410
|
+
and self.rpc is not None
|
|
411
|
+
and self.data["last_node"] == self.rpc.url
|
|
412
|
+
):
|
|
413
|
+
if (
|
|
414
|
+
datetime.now(timezone.utc) - self.data["last_refresh_config"]
|
|
415
|
+
).total_seconds() < self.data_refresh_time_seconds:
|
|
416
|
+
return
|
|
417
|
+
self.data["last_refresh_config"] = datetime.now(timezone.utc)
|
|
418
|
+
self.data["last_refresh"] = datetime.now(timezone.utc)
|
|
419
|
+
if self.rpc is not None:
|
|
420
|
+
self.data["last_node"] = self.rpc.url
|
|
421
|
+
self.data["config"] = self.get_config(False)
|
|
422
|
+
self.data["network"] = self.get_network(False, config=self.data["config"])
|
|
423
|
+
elif chain_property == "reward_funds":
|
|
424
|
+
if not self.offline:
|
|
425
|
+
if (
|
|
426
|
+
self.data["last_refresh_reward_funds"] is not None
|
|
427
|
+
and not force_refresh
|
|
428
|
+
and self.rpc is not None
|
|
429
|
+
and self.data["last_node"] == self.rpc.url
|
|
430
|
+
):
|
|
431
|
+
if (
|
|
432
|
+
datetime.now(timezone.utc) - self.data["last_refresh_reward_funds"]
|
|
433
|
+
).total_seconds() < self.data_refresh_time_seconds:
|
|
434
|
+
return
|
|
435
|
+
|
|
436
|
+
self.data["last_refresh_reward_funds"] = datetime.now(timezone.utc)
|
|
437
|
+
self.data["last_refresh"] = datetime.now(timezone.utc)
|
|
438
|
+
if self.rpc is not None:
|
|
439
|
+
self.data["last_node"] = self.rpc.url
|
|
440
|
+
self.data["reward_funds"] = self.get_reward_funds(False)
|
|
441
|
+
else:
|
|
442
|
+
raise ValueError("%s is not unkown" % str(chain_property))
|
|
443
|
+
|
|
444
|
+
def get_dynamic_global_properties(self, use_stored_data: bool = True) -> dict[str, Any] | None:
|
|
445
|
+
"""This call returns the *dynamic global properties*
|
|
446
|
+
|
|
447
|
+
:param bool use_stored_data: if True, stored data will be returned. If stored data are
|
|
448
|
+
empty or old, refresh_data() is used.
|
|
449
|
+
|
|
450
|
+
"""
|
|
451
|
+
if use_stored_data:
|
|
452
|
+
self.refresh_data("dynamic_global_properties")
|
|
453
|
+
return self.data["dynamic_global_properties"]
|
|
454
|
+
if self.rpc is None:
|
|
455
|
+
return None
|
|
456
|
+
self.rpc.set_next_node_on_empty_reply(True)
|
|
457
|
+
return self.rpc.get_dynamic_global_properties()
|
|
458
|
+
|
|
459
|
+
def get_reserve_ratio(self) -> dict[str, Any] | None:
|
|
460
|
+
"""This call returns the *reserve ratio*"""
|
|
461
|
+
if self.rpc is None:
|
|
462
|
+
return None
|
|
463
|
+
self.rpc.set_next_node_on_empty_reply(True)
|
|
464
|
+
|
|
465
|
+
props = self.get_dynamic_global_properties()
|
|
466
|
+
# conf = self.get_config()
|
|
467
|
+
if props is None:
|
|
468
|
+
return {
|
|
469
|
+
"id": 0,
|
|
470
|
+
"average_block_size": None,
|
|
471
|
+
"current_reserve_ratio": None,
|
|
472
|
+
"max_virtual_bandwidth": None,
|
|
473
|
+
}
|
|
474
|
+
try:
|
|
475
|
+
reserve_ratio = {
|
|
476
|
+
"id": 0,
|
|
477
|
+
"average_block_size": props["average_block_size"],
|
|
478
|
+
"current_reserve_ratio": props["current_reserve_ratio"],
|
|
479
|
+
"max_virtual_bandwidth": props["max_virtual_bandwidth"],
|
|
480
|
+
}
|
|
481
|
+
except Exception:
|
|
482
|
+
reserve_ratio = {
|
|
483
|
+
"id": 0,
|
|
484
|
+
"average_block_size": None,
|
|
485
|
+
"current_reserve_ratio": None,
|
|
486
|
+
"max_virtual_bandwidth": None,
|
|
487
|
+
}
|
|
488
|
+
return reserve_ratio
|
|
489
|
+
|
|
490
|
+
def get_feed_history(self, use_stored_data: bool = True) -> dict[str, Any] | None:
|
|
491
|
+
"""Returns the feed_history
|
|
492
|
+
|
|
493
|
+
:param bool use_stored_data: if True, stored data will be returned. If stored data are
|
|
494
|
+
empty or old, refresh_data() is used.
|
|
495
|
+
|
|
496
|
+
"""
|
|
497
|
+
if use_stored_data:
|
|
498
|
+
self.refresh_data("feed_history")
|
|
499
|
+
return self.data["feed_history"]
|
|
500
|
+
if self.rpc is None:
|
|
501
|
+
return None
|
|
502
|
+
self.rpc.set_next_node_on_empty_reply(True)
|
|
503
|
+
return self.rpc.get_feed_history()
|
|
504
|
+
|
|
505
|
+
def get_reward_funds(self, use_stored_data: bool = True) -> dict[str, Any] | None:
|
|
506
|
+
"""Get details for a reward fund.
|
|
507
|
+
|
|
508
|
+
:param bool use_stored_data: if True, stored data will be returned. If stored data are
|
|
509
|
+
empty or old, refresh_data() is used.
|
|
510
|
+
|
|
511
|
+
"""
|
|
512
|
+
if use_stored_data:
|
|
513
|
+
self.refresh_data("reward_funds")
|
|
514
|
+
return self.data["reward_funds"]
|
|
515
|
+
|
|
516
|
+
if self.rpc is None:
|
|
517
|
+
return None
|
|
518
|
+
ret = None
|
|
519
|
+
self.rpc.set_next_node_on_empty_reply(True)
|
|
520
|
+
funds = self.rpc.get_reward_funds()
|
|
521
|
+
if funds is not None:
|
|
522
|
+
funds = funds["funds"]
|
|
523
|
+
else:
|
|
524
|
+
return None
|
|
525
|
+
if len(funds) > 0:
|
|
526
|
+
funds = funds[0]
|
|
527
|
+
ret = funds
|
|
528
|
+
else:
|
|
529
|
+
ret = None
|
|
530
|
+
return ret
|
|
531
|
+
|
|
532
|
+
def get_current_median_history(self, use_stored_data: bool = True) -> dict[str, Any] | None:
|
|
533
|
+
"""Returns the current median price
|
|
534
|
+
|
|
535
|
+
:param bool use_stored_data: if True, stored data will be returned. If stored data are
|
|
536
|
+
empty or old, refresh_data() is used.
|
|
537
|
+
"""
|
|
538
|
+
if use_stored_data:
|
|
539
|
+
self.refresh_data("feed_history")
|
|
540
|
+
if self.data["get_feed_history"]:
|
|
541
|
+
return self.data["get_feed_history"]["current_median_history"]
|
|
542
|
+
else:
|
|
543
|
+
return None
|
|
544
|
+
if self.rpc is None:
|
|
545
|
+
return None
|
|
546
|
+
ret = None
|
|
547
|
+
self.rpc.set_next_node_on_empty_reply(True)
|
|
548
|
+
ret = self.rpc.get_feed_history()["current_median_history"]
|
|
549
|
+
return ret
|
|
550
|
+
|
|
551
|
+
def get_hardfork_properties(self, use_stored_data: bool = True) -> dict[str, Any] | None:
|
|
552
|
+
"""Returns Hardfork and live_time of the hardfork
|
|
553
|
+
|
|
554
|
+
:param bool use_stored_data: if True, stored data will be returned. If stored data are
|
|
555
|
+
empty or old, refresh_data() is used.
|
|
556
|
+
"""
|
|
557
|
+
if use_stored_data:
|
|
558
|
+
self.refresh_data("hardfork_properties")
|
|
559
|
+
return self.data["hardfork_properties"]
|
|
560
|
+
if self.rpc is None:
|
|
561
|
+
return None
|
|
562
|
+
ret = None
|
|
563
|
+
self.rpc.set_next_node_on_empty_reply(True)
|
|
564
|
+
ret = self.rpc.get_hardfork_properties()
|
|
565
|
+
return ret
|
|
566
|
+
|
|
567
|
+
def get_network(
|
|
568
|
+
self, use_stored_data: bool = True, config: dict[str, Any] | None = None
|
|
569
|
+
) -> dict[str, Any] | None:
|
|
570
|
+
"""Identify the network
|
|
571
|
+
|
|
572
|
+
:param bool use_stored_data: if True, stored data will be returned. If stored data are
|
|
573
|
+
empty or old, refresh_data() is used.
|
|
574
|
+
|
|
575
|
+
:returns: Network parameters
|
|
576
|
+
:rtype: dictionary
|
|
577
|
+
"""
|
|
578
|
+
if use_stored_data:
|
|
579
|
+
self.refresh_data("config")
|
|
580
|
+
return self.data["network"]
|
|
581
|
+
|
|
582
|
+
if self.rpc is None:
|
|
583
|
+
return None
|
|
584
|
+
try:
|
|
585
|
+
return self.rpc.get_network(props=config)
|
|
586
|
+
except Exception:
|
|
587
|
+
return known_chains["HIVE"]
|
|
588
|
+
|
|
589
|
+
def get_median_price(self, use_stored_data: bool = True) -> dict[str, Any] | None:
|
|
590
|
+
"""Returns the current median history price as Price"""
|
|
591
|
+
median_price = self.get_current_median_history(use_stored_data=use_stored_data)
|
|
592
|
+
if median_price is None:
|
|
593
|
+
return None
|
|
594
|
+
a = Price(
|
|
595
|
+
None,
|
|
596
|
+
base=Amount(median_price["base"], blockchain_instance=self),
|
|
597
|
+
quote=Amount(median_price["quote"], blockchain_instance=self),
|
|
598
|
+
blockchain_instance=self,
|
|
599
|
+
)
|
|
600
|
+
return a.as_base(self.backed_token_symbol)
|
|
601
|
+
|
|
602
|
+
def get_block_interval(self, use_stored_data: bool = True) -> int:
|
|
603
|
+
"""Returns the block interval in seconds"""
|
|
604
|
+
props = self.get_config(use_stored_data=use_stored_data)
|
|
605
|
+
block_interval = 3
|
|
606
|
+
if props is None:
|
|
607
|
+
return block_interval
|
|
608
|
+
for key in props:
|
|
609
|
+
if key[-14:] == "BLOCK_INTERVAL":
|
|
610
|
+
block_interval = props[key]
|
|
611
|
+
|
|
612
|
+
return block_interval
|
|
613
|
+
|
|
614
|
+
def get_blockchain_version(self, use_stored_data: bool = True) -> str | dict[str, Any]:
|
|
615
|
+
"""Returns the blockchain version"""
|
|
616
|
+
props = self.get_config(use_stored_data=use_stored_data)
|
|
617
|
+
blockchain_version = "0.0.0"
|
|
618
|
+
if props is None:
|
|
619
|
+
return blockchain_version
|
|
620
|
+
for key in props:
|
|
621
|
+
if key[-18:] == "BLOCKCHAIN_VERSION":
|
|
622
|
+
blockchain_version = props[key]
|
|
623
|
+
return blockchain_version
|
|
624
|
+
|
|
625
|
+
def get_blockchain_name(self, use_stored_data: bool = True) -> str:
|
|
626
|
+
"""Returns the blockchain version"""
|
|
627
|
+
props = self.get_config(use_stored_data=use_stored_data)
|
|
628
|
+
blockchain_name = ""
|
|
629
|
+
if props is None:
|
|
630
|
+
return blockchain_name
|
|
631
|
+
for key in props:
|
|
632
|
+
if key[-18:] == "BLOCKCHAIN_VERSION":
|
|
633
|
+
blockchain_name = key.split("_")[0].lower()
|
|
634
|
+
return blockchain_name
|
|
635
|
+
|
|
636
|
+
def get_dust_threshold(self, use_stored_data: bool = True) -> float:
|
|
637
|
+
"""Returns the vote dust threshold"""
|
|
638
|
+
props = self.get_config(use_stored_data=use_stored_data)
|
|
639
|
+
dust_threshold = 0
|
|
640
|
+
if props is None:
|
|
641
|
+
return dust_threshold
|
|
642
|
+
for key in props:
|
|
643
|
+
if key[-20:] == "VOTE_DUST_THRESHOLD":
|
|
644
|
+
dust_threshold = props[key]
|
|
645
|
+
return dust_threshold
|
|
646
|
+
|
|
647
|
+
def get_resource_params(self) -> dict[str, Any]:
|
|
648
|
+
"""Returns the resource parameter"""
|
|
649
|
+
if self.rpc is None:
|
|
650
|
+
raise RuntimeError(RPC_NOT_ESTABLISHED)
|
|
651
|
+
return self.rpc.get_resource_params()["resource_params"]
|
|
652
|
+
|
|
653
|
+
def get_resource_pool(self) -> dict[str, Any]:
|
|
654
|
+
"""Returns the resource pool"""
|
|
655
|
+
if self.rpc is None:
|
|
656
|
+
raise RuntimeError(RPC_NOT_ESTABLISHED)
|
|
657
|
+
return self.rpc.get_resource_pool()["resource_pool"]
|
|
658
|
+
|
|
659
|
+
def get_rc_cost(self, resource_count: dict[str, int]) -> int:
|
|
660
|
+
"""
|
|
661
|
+
Compute the total Resource Credits (RC) cost for a set of resource usages.
|
|
662
|
+
|
|
663
|
+
This queries the current resource pool, price curve parameters, and dynamic global properties to compute the RC cost for each resource type in `resource_count` and returns their sum. If the RC regeneration rate is zero, returns 0.
|
|
664
|
+
|
|
665
|
+
Parameters:
|
|
666
|
+
resource_count (dict): Mapping of resource type keys to requested usage counts. Counts are interpreted in resource-specific units and will be scaled by the resource's `resource_unit` parameter.
|
|
667
|
+
|
|
668
|
+
Returns:
|
|
669
|
+
int: Total RC cost (rounded as produced by internal cost calculation).
|
|
670
|
+
"""
|
|
671
|
+
pools = self.get_resource_pool()
|
|
672
|
+
params = self.get_resource_params()
|
|
673
|
+
dyn_param = self.get_dynamic_global_properties()
|
|
674
|
+
if dyn_param is None:
|
|
675
|
+
return 0
|
|
676
|
+
rc_regen = int(Amount(dyn_param["total_vesting_shares"], blockchain_instance=self)) / (
|
|
677
|
+
HIVE_RC_REGEN_TIME / self.get_block_interval()
|
|
678
|
+
)
|
|
679
|
+
total_cost = 0
|
|
680
|
+
if rc_regen == 0:
|
|
681
|
+
return total_cost
|
|
682
|
+
for resource_type in resource_count:
|
|
683
|
+
curve_params = params[resource_type]["price_curve_params"]
|
|
684
|
+
current_pool = int(pools[resource_type]["pool"])
|
|
685
|
+
count = resource_count[resource_type]
|
|
686
|
+
count *= params[resource_type]["resource_dynamics_params"]["resource_unit"]
|
|
687
|
+
cost = self._compute_rc_cost(curve_params, current_pool, int(count), int(rc_regen))
|
|
688
|
+
total_cost += cost
|
|
689
|
+
return total_cost
|
|
690
|
+
|
|
691
|
+
def _compute_rc_cost(
|
|
692
|
+
self,
|
|
693
|
+
curve_params: dict[str, Any],
|
|
694
|
+
current_pool: int,
|
|
695
|
+
resource_count: int,
|
|
696
|
+
rc_regen: int,
|
|
697
|
+
) -> int:
|
|
698
|
+
"""Helper function for computing the RC costs"""
|
|
699
|
+
num = int(rc_regen)
|
|
700
|
+
num *= int(curve_params["coeff_a"])
|
|
701
|
+
num = int(num) >> int(curve_params["shift"])
|
|
702
|
+
num += 1
|
|
703
|
+
num *= int(resource_count)
|
|
704
|
+
denom = int(curve_params["coeff_b"])
|
|
705
|
+
if int(current_pool) > 0:
|
|
706
|
+
denom += int(current_pool)
|
|
707
|
+
num_denom = num / denom
|
|
708
|
+
return int(num_denom) + 1
|
|
709
|
+
|
|
710
|
+
def _max_vote_denom(self, use_stored_data: bool = True) -> int:
|
|
711
|
+
# get props
|
|
712
|
+
"""
|
|
713
|
+
Compute the maximum vote denominator used to scale voting power consumption.
|
|
714
|
+
|
|
715
|
+
This reads the current `vote_power_reserve_rate` from dynamic global properties
|
|
716
|
+
(and may use cached data when `use_stored_data` is True) and multiplies it by
|
|
717
|
+
HIVE_VOTE_REGENERATION_SECONDS to produce the denominator used in vote power
|
|
718
|
+
calculations.
|
|
719
|
+
|
|
720
|
+
Parameters:
|
|
721
|
+
use_stored_data (bool): If True, allow using cached dynamic global properties
|
|
722
|
+
rather than fetching fresh values from the node.
|
|
723
|
+
|
|
724
|
+
Returns:
|
|
725
|
+
int: The computed maximum vote denominator.
|
|
726
|
+
"""
|
|
727
|
+
global_properties = self.get_dynamic_global_properties(use_stored_data=use_stored_data)
|
|
728
|
+
if global_properties is None:
|
|
729
|
+
return HIVE_VOTE_REGENERATION_SECONDS # fallback value
|
|
730
|
+
vote_power_reserve_rate = global_properties["vote_power_reserve_rate"]
|
|
731
|
+
max_vote_denom = vote_power_reserve_rate * HIVE_VOTE_REGENERATION_SECONDS
|
|
732
|
+
return max_vote_denom
|
|
733
|
+
|
|
734
|
+
def _calc_resulting_vote(
|
|
735
|
+
self, current_power: int, weight: int, power: int = HIVE_100_PERCENT
|
|
736
|
+
) -> int:
|
|
737
|
+
# determine voting power used
|
|
738
|
+
"""
|
|
739
|
+
Calculate the internal "used power" for a vote given current voting power and vote percentage.
|
|
740
|
+
|
|
741
|
+
This converts a voter's remaining voting_power and a requested vote_pct (both expressed on the same internal scale where HIVE_100_PERCENT represents 100%) into the integer unit the chain uses for vote consumption. The computation uses the absolute value of vote_pct, scales by a 24-hour factor (60*60*24), then normalizes by the chain's maximum vote denominator (retrieved via _max_vote_denom) with upward rounding.
|
|
742
|
+
|
|
743
|
+
Parameters:
|
|
744
|
+
current_power (int): Current voting power expressed in the node's internal units (HIVE_100_PERCENT == full power).
|
|
745
|
+
weight (int): Vote weight in the node's internal units.
|
|
746
|
+
power (int): Power parameter (defaults to HIVE_100_PERCENT).
|
|
747
|
+
|
|
748
|
+
Returns:
|
|
749
|
+
int: The computed used voting power in the chain's internal units.
|
|
750
|
+
"""
|
|
751
|
+
used_power = int((current_power * abs(weight)) / HIVE_100_PERCENT * (60 * 60 * 24))
|
|
752
|
+
max_vote_denom = self._max_vote_denom(use_stored_data=True)
|
|
753
|
+
used_power = int((used_power + max_vote_denom - 1) / max_vote_denom)
|
|
754
|
+
return used_power
|
|
755
|
+
|
|
756
|
+
def _calc_vote_claim(self, effective_vote_rshares: int, post_rshares: int) -> int | float:
|
|
757
|
+
post_rshares_normalized = post_rshares + CURVE_CONSTANT
|
|
758
|
+
post_rshares_after_vote_normalized = post_rshares + effective_vote_rshares + CURVE_CONSTANT
|
|
759
|
+
post_rshares_curve = (
|
|
760
|
+
post_rshares_normalized * post_rshares_normalized - SQUARED_CURVE_CONSTANT
|
|
761
|
+
) / (post_rshares + CURVE_CONSTANT_X4)
|
|
762
|
+
post_rshares_curve_after_vote = (
|
|
763
|
+
post_rshares_after_vote_normalized * post_rshares_after_vote_normalized
|
|
764
|
+
- SQUARED_CURVE_CONSTANT
|
|
765
|
+
) / (post_rshares + effective_vote_rshares + CURVE_CONSTANT_X4)
|
|
766
|
+
vote_claim = post_rshares_curve_after_vote - post_rshares_curve
|
|
767
|
+
return vote_claim
|
|
768
|
+
|
|
769
|
+
def _calc_revert_vote_claim(self, vote_claim: int, post_rshares: int) -> int | float:
|
|
770
|
+
post_rshares_normalized = post_rshares + CURVE_CONSTANT
|
|
771
|
+
post_rshares_curve = (
|
|
772
|
+
post_rshares_normalized * post_rshares_normalized - SQUARED_CURVE_CONSTANT
|
|
773
|
+
) / (post_rshares + CURVE_CONSTANT_X4)
|
|
774
|
+
post_rshares_curve_after_vote = vote_claim + post_rshares_curve
|
|
775
|
+
|
|
776
|
+
a = 1
|
|
777
|
+
b = -post_rshares_curve_after_vote + 2 * post_rshares_normalized
|
|
778
|
+
c = (
|
|
779
|
+
post_rshares_normalized * post_rshares_normalized - SQUARED_CURVE_CONSTANT
|
|
780
|
+
) - post_rshares_curve_after_vote * (post_rshares + CURVE_CONSTANT_X4)
|
|
781
|
+
# (effective_vote_rshares * effective_vote_rshares) + effective_vote_rshares * (-post_rshares_curve_after_vote + 2 * post_rshares_normalized) + ((post_rshares_normalized * post_rshares_normalized - SQUARED_CURVE_CONSTANT) - post_rshares_curve_after_vote * (post_rshares + CURVE_CONSTANT_X4)) = 0
|
|
782
|
+
|
|
783
|
+
x1 = (-b + math.sqrt(b * b - 4 * a * c)) / (2 * a)
|
|
784
|
+
x2 = (-b - math.sqrt(b * b - 4 * a * c)) / (2 * a)
|
|
785
|
+
if x1 >= 0:
|
|
786
|
+
return x1
|
|
787
|
+
else:
|
|
788
|
+
return x2
|
|
789
|
+
|
|
790
|
+
def vests_to_rshares(
|
|
791
|
+
self,
|
|
792
|
+
vests: float | int | Amount,
|
|
793
|
+
voting_power: int = HIVE_100_PERCENT,
|
|
794
|
+
vote_pct: int = HIVE_100_PERCENT,
|
|
795
|
+
subtract_dust_threshold: bool = True,
|
|
796
|
+
use_stored_data: bool = True,
|
|
797
|
+
post_rshares: int = 0,
|
|
798
|
+
) -> int | float:
|
|
799
|
+
"""
|
|
800
|
+
Convert vesting shares to reward r-shares used for voting.
|
|
801
|
+
|
|
802
|
+
Calculates the signed r-shares produced by a vote from a given amount of vesting shares, taking into account current voting power and vote percentage. Optionally subtracts the chain's dust threshold so small votes become zero.
|
|
803
|
+
|
|
804
|
+
Parameters:
|
|
805
|
+
vests (float|int): Vesting shares (in VESTS units) to convert.
|
|
806
|
+
voting_power (int, optional): Voter's current voting power, where 100% == 10000. Defaults to HIVE_100_PERCENT.
|
|
807
|
+
vote_pct (int, optional): Intended vote strength, where 100% == 10000. Can be negative for downvotes. Defaults to HIVE_100_PERCENT.
|
|
808
|
+
subtract_dust_threshold (bool, optional): If True, subtract the chain's dust threshold from the absolute r-shares and return 0 when the result is at-or-below the threshold. Defaults to True.
|
|
809
|
+
use_stored_data (bool, optional): If True, prefer cached chain parameters when computing vote cost; otherwise fetch fresh values from the node. Defaults to True.
|
|
810
|
+
|
|
811
|
+
Returns:
|
|
812
|
+
int: Signed r-shares corresponding to the provided vesting shares and vote parameters. Returns 0 if the computed r-shares are at-or-below the dust threshold when subtraction is enabled.
|
|
813
|
+
"""
|
|
814
|
+
if isinstance(vests, Amount):
|
|
815
|
+
vests = float(vests)
|
|
816
|
+
used_power = self._calc_resulting_vote(
|
|
817
|
+
current_power=voting_power, weight=vote_pct, power=HIVE_100_PERCENT
|
|
818
|
+
)
|
|
819
|
+
# calculate vote rshares
|
|
820
|
+
rshares = int(math.copysign(vests * 1e6 * used_power / HIVE_100_PERCENT, vote_pct))
|
|
821
|
+
if subtract_dust_threshold:
|
|
822
|
+
if abs(rshares) <= self.get_dust_threshold(use_stored_data=use_stored_data):
|
|
823
|
+
return 0
|
|
824
|
+
rshares -= math.copysign(
|
|
825
|
+
self.get_dust_threshold(use_stored_data=use_stored_data), vote_pct
|
|
826
|
+
)
|
|
827
|
+
# Apply curve adjustment relative to existing post rshares
|
|
828
|
+
rshares = self._calc_vote_claim(int(rshares), post_rshares)
|
|
829
|
+
return rshares
|
|
830
|
+
|
|
831
|
+
def token_power_to_vests(
|
|
832
|
+
self,
|
|
833
|
+
token_power: float,
|
|
834
|
+
timestamp: datetime | None = None,
|
|
835
|
+
use_stored_data: bool = True,
|
|
836
|
+
) -> float:
|
|
837
|
+
"""Converts TokenPower to vests
|
|
838
|
+
|
|
839
|
+
:param float token_power: Token power to convert
|
|
840
|
+
:param datetime timestamp: (Optional) Can be used to calculate
|
|
841
|
+
the conversion rate from the past
|
|
842
|
+
"""
|
|
843
|
+
raise Exception("not implemented")
|
|
844
|
+
|
|
845
|
+
def vests_to_token_power(
|
|
846
|
+
self,
|
|
847
|
+
vests: float | Amount,
|
|
848
|
+
timestamp: int | None = None,
|
|
849
|
+
use_stored_data: bool = True,
|
|
850
|
+
) -> float:
|
|
851
|
+
"""Converts vests to TokenPower
|
|
852
|
+
|
|
853
|
+
:param amount.Amount vests/float vests: Vests to convert
|
|
854
|
+
:param int timestamp: (Optional) Can be used to calculate
|
|
855
|
+
the conversion rate from the past
|
|
856
|
+
|
|
857
|
+
"""
|
|
858
|
+
raise Exception("not implemented")
|
|
859
|
+
|
|
860
|
+
def get_token_per_mvest(
|
|
861
|
+
self, time_stamp: int | datetime | None = None, use_stored_data: bool = True
|
|
862
|
+
) -> float:
|
|
863
|
+
"""Returns the MVEST to TOKEN ratio
|
|
864
|
+
|
|
865
|
+
:param int time_stamp: (optional) if set, return an estimated
|
|
866
|
+
TOKEN per MVEST ratio for the given time stamp. If unset the
|
|
867
|
+
current ratio is returned (default). (can also be a datetime object)
|
|
868
|
+
"""
|
|
869
|
+
raise Exception("not implemented")
|
|
870
|
+
|
|
871
|
+
def rshares_to_token_backed_dollar(
|
|
872
|
+
self,
|
|
873
|
+
rshares: int,
|
|
874
|
+
not_broadcasted_vote: bool = False,
|
|
875
|
+
use_stored_data: bool = True,
|
|
876
|
+
) -> float:
|
|
877
|
+
"""Calculates the current HBD value of a vote"""
|
|
878
|
+
raise Exception("not implemented")
|
|
879
|
+
|
|
880
|
+
def token_power_to_token_backed_dollar(
|
|
881
|
+
self,
|
|
882
|
+
token_power: float,
|
|
883
|
+
post_rshares: int = 0,
|
|
884
|
+
voting_power: int = HIVE_100_PERCENT,
|
|
885
|
+
vote_pct: int = HIVE_100_PERCENT,
|
|
886
|
+
not_broadcasted_vote: bool = True,
|
|
887
|
+
use_stored_data: bool = True,
|
|
888
|
+
) -> float:
|
|
889
|
+
"""
|
|
890
|
+
Estimate the token-backed-dollar (HBD-like) value that a vote from the given token power would yield.
|
|
891
|
+
|
|
892
|
+
Calculates the expected payout (in the blockchain's backed token units) that a vote of `vote_pct` from an account
|
|
893
|
+
with `voting_power` and `token_power` would contribute to a post with `post_rshares`. The estimate accounts for
|
|
894
|
+
the vote rshares mechanics and the reduction of the reward pool when a not-yet-broadcast vote is included.
|
|
895
|
+
|
|
896
|
+
Parameters:
|
|
897
|
+
token_power (float): Voter's token power (in vest/token-equivalent units used by the chain).
|
|
898
|
+
post_rshares (int, optional): Current rshares of the post being voted on. Defaults to 0.
|
|
899
|
+
voting_power (int, optional): Voter's current voting power where 100% == HIVE_100_PERCENT (default full power).
|
|
900
|
+
vote_pct (int, optional): Vote percentage where 100% == HIVE_100_PERCENT (default full vote).
|
|
901
|
+
not_broadcasted_vote (bool, optional): If True, treat the vote as not yet broadcast (reduces available reward pool accordingly).
|
|
902
|
+
use_stored_data (bool, optional): If True, prefer cached chain parameters; otherwise fetch fresh values.
|
|
903
|
+
|
|
904
|
+
Returns:
|
|
905
|
+
float: Estimated payout denominated in the backed token (e.g., HBD).
|
|
906
|
+
|
|
907
|
+
Raises:
|
|
908
|
+
Exception: Not implemented (function is a placeholder).
|
|
909
|
+
"""
|
|
910
|
+
raise Exception("not implemented")
|
|
911
|
+
|
|
912
|
+
def get_chain_properties(self, use_stored_data: bool = True) -> dict[str, Any]:
|
|
913
|
+
"""
|
|
914
|
+
Return the witness-elected chain properties (median_props) used by the network.
|
|
915
|
+
|
|
916
|
+
When cached data is allowed (use_stored_data=True) this reads from the instance cache
|
|
917
|
+
(populated by refresh_data). Otherwise it fetches the latest witness schedule and
|
|
918
|
+
returns its `median_props` object.
|
|
919
|
+
|
|
920
|
+
Parameters:
|
|
921
|
+
use_stored_data (bool): If True, return cached properties when available; if False,
|
|
922
|
+
force fetching the current witness schedule.
|
|
923
|
+
|
|
924
|
+
Returns:
|
|
925
|
+
dict: The `median_props` mapping, e.g.:
|
|
926
|
+
{
|
|
927
|
+
'account_creation_fee': '30.000 HIVE',
|
|
928
|
+
'maximum_block_size': 65536,
|
|
929
|
+
'hbd_interest_rate': 250
|
|
930
|
+
}
|
|
931
|
+
"""
|
|
932
|
+
if use_stored_data:
|
|
933
|
+
self.refresh_data("witness_schedule")
|
|
934
|
+
witness_schedule = self.data.get("witness_schedule")
|
|
935
|
+
if witness_schedule:
|
|
936
|
+
return witness_schedule["median_props"]
|
|
937
|
+
return {}
|
|
938
|
+
else:
|
|
939
|
+
witness_schedule = self.get_witness_schedule(use_stored_data)
|
|
940
|
+
return witness_schedule["median_props"] if witness_schedule else {}
|
|
941
|
+
|
|
942
|
+
def get_witness_schedule(self, use_stored_data: bool = True) -> dict[str, Any] | None:
|
|
943
|
+
"""Return witness elected chain properties"""
|
|
944
|
+
if use_stored_data:
|
|
945
|
+
self.refresh_data("witness_schedule")
|
|
946
|
+
return self.data["witness_schedule"]
|
|
947
|
+
|
|
948
|
+
if self.rpc is None:
|
|
949
|
+
return None
|
|
950
|
+
self.rpc.set_next_node_on_empty_reply(True)
|
|
951
|
+
return self.rpc.get_witness_schedule()
|
|
952
|
+
|
|
953
|
+
def get_config(self, use_stored_data: bool = True) -> dict[str, Any] | None:
|
|
954
|
+
"""Returns internal chain configuration.
|
|
955
|
+
|
|
956
|
+
:param bool use_stored_data: If True, the cached value is returned
|
|
957
|
+
"""
|
|
958
|
+
if use_stored_data:
|
|
959
|
+
self.refresh_data("config")
|
|
960
|
+
config = self.data["config"]
|
|
961
|
+
else:
|
|
962
|
+
if self.rpc is None:
|
|
963
|
+
return None
|
|
964
|
+
self.rpc.set_next_node_on_empty_reply(True)
|
|
965
|
+
config = self.rpc.get_config()
|
|
966
|
+
return config
|
|
967
|
+
|
|
968
|
+
@property
|
|
969
|
+
def chain_params(self) -> dict[str, Any]:
|
|
970
|
+
if self.offline or self.rpc is None:
|
|
971
|
+
return known_chains["HIVE"]
|
|
972
|
+
else:
|
|
973
|
+
network = self.get_network()
|
|
974
|
+
return network if network is not None else known_chains["HIVE"]
|
|
975
|
+
|
|
976
|
+
@property
|
|
977
|
+
def hardfork(self) -> int:
|
|
978
|
+
if self.offline or self.rpc is None:
|
|
979
|
+
versions = known_chains["HIVE"]["min_version"]
|
|
980
|
+
else:
|
|
981
|
+
hf_prop = self.get_hardfork_properties()
|
|
982
|
+
if hf_prop and "current_hardfork_version" in hf_prop:
|
|
983
|
+
versions = hf_prop["current_hardfork_version"]
|
|
984
|
+
else:
|
|
985
|
+
versions = self.get_blockchain_version()
|
|
986
|
+
# Ensure versions is a string before splitting
|
|
987
|
+
if isinstance(versions, dict):
|
|
988
|
+
versions = versions.get("HIVE_BLOCKCHAIN_VERSION", "0.0.0")
|
|
989
|
+
return int(str(versions).split(".")[1])
|
|
990
|
+
|
|
991
|
+
@property
|
|
992
|
+
def prefix(self) -> str:
|
|
993
|
+
return self.chain_params["prefix"]
|
|
994
|
+
|
|
995
|
+
@property
|
|
996
|
+
def is_hive(self) -> bool:
|
|
997
|
+
"""
|
|
998
|
+
Return True if the connected chain appears to be Hive.
|
|
999
|
+
|
|
1000
|
+
Checks the cached chain configuration and returns True when the key "HIVE_CHAIN_ID"
|
|
1001
|
+
is present; returns False if configuration is unavailable or the key is absent.
|
|
1002
|
+
"""
|
|
1003
|
+
config = self.get_config(use_stored_data=True)
|
|
1004
|
+
if config is None:
|
|
1005
|
+
return False
|
|
1006
|
+
return "HIVE_CHAIN_ID" in config
|
|
1007
|
+
|
|
1008
|
+
@property
|
|
1009
|
+
def is_steem(self) -> bool:
|
|
1010
|
+
"""Deprecated compatibility flag; always False in Hive-only nectar."""
|
|
1011
|
+
return False
|
|
1012
|
+
|
|
1013
|
+
def set_default_account(self, account: str) -> None:
|
|
1014
|
+
"""
|
|
1015
|
+
Set the instance default account.
|
|
1016
|
+
|
|
1017
|
+
If given an account name or an Account object, validate/resolve it (an Account is
|
|
1018
|
+
constructed with this blockchain instance) and store the account identifier in
|
|
1019
|
+
the instance configuration under "default_account". This makes the account the
|
|
1020
|
+
implicit default for subsequent operations that omit an explicit account.
|
|
1021
|
+
|
|
1022
|
+
Parameters:
|
|
1023
|
+
account (str | Account): Account name or Account object to set as default.
|
|
1024
|
+
|
|
1025
|
+
Notes:
|
|
1026
|
+
The Account constructor is invoked for validation; errors from account
|
|
1027
|
+
resolution/lookup may propagate.
|
|
1028
|
+
"""
|
|
1029
|
+
Account(account, blockchain_instance=self)
|
|
1030
|
+
self.config["default_account"] = account
|
|
1031
|
+
|
|
1032
|
+
def switch_blockchain(self, blockchain: str, update_nodes: bool = False) -> None:
|
|
1033
|
+
"""
|
|
1034
|
+
Switch the instance to the specified blockchain (Hive only).
|
|
1035
|
+
|
|
1036
|
+
If the requested blockchain is already the configured default and update_nodes is False, this is a no-op.
|
|
1037
|
+
When update_nodes is True, the node list is refreshed via NodeList.update_nodes() and the default nodes
|
|
1038
|
+
are replaced with the Hive node list. The instance's config["default_chain"] is updated and, if the
|
|
1039
|
+
instance is not offline, a reconnect is attempted.
|
|
1040
|
+
|
|
1041
|
+
Parameters:
|
|
1042
|
+
blockchain (str): Target blockchain; must be "hive".
|
|
1043
|
+
update_nodes (bool): If True, refresh and replace the known node list before switching.
|
|
1044
|
+
"""
|
|
1045
|
+
assert blockchain in ["hive"]
|
|
1046
|
+
if blockchain == self.config["default_chain"] and not update_nodes:
|
|
1047
|
+
return
|
|
1048
|
+
from nectar.nodelist import NodeList
|
|
1049
|
+
|
|
1050
|
+
nodelist = NodeList()
|
|
1051
|
+
if update_nodes:
|
|
1052
|
+
nodelist.update_nodes()
|
|
1053
|
+
if blockchain == "hive":
|
|
1054
|
+
self.set_default_nodes(nodelist.get_hive_nodes())
|
|
1055
|
+
self.config["default_chain"] = blockchain
|
|
1056
|
+
if not self.offline:
|
|
1057
|
+
self.connect(node="")
|
|
1058
|
+
|
|
1059
|
+
def set_password_storage(self, password_storage: str) -> None:
|
|
1060
|
+
"""Set the password storage mode.
|
|
1061
|
+
|
|
1062
|
+
When set to "no", the password has to be provided each time.
|
|
1063
|
+
When set to "environment" the password is taken from the
|
|
1064
|
+
UNLOCK variable
|
|
1065
|
+
|
|
1066
|
+
When set to "keyring" the password is taken from the
|
|
1067
|
+
python keyring module. A wallet password can be stored with
|
|
1068
|
+
python -m keyring set nectar wallet password
|
|
1069
|
+
|
|
1070
|
+
:param str password_storage: can be "no",
|
|
1071
|
+
"keyring" or "environment"
|
|
1072
|
+
|
|
1073
|
+
"""
|
|
1074
|
+
self.config["password_storage"] = password_storage
|
|
1075
|
+
|
|
1076
|
+
def set_default_nodes(self, nodes: list[str] | str) -> None:
|
|
1077
|
+
"""Set the default nodes to be used"""
|
|
1078
|
+
if bool(nodes):
|
|
1079
|
+
if isinstance(nodes, list):
|
|
1080
|
+
nodes = str(nodes)
|
|
1081
|
+
self.config["node"] = nodes
|
|
1082
|
+
else:
|
|
1083
|
+
self.config.delete("node")
|
|
1084
|
+
|
|
1085
|
+
def get_default_nodes(self) -> list[str]:
|
|
1086
|
+
"""Returns the default nodes"""
|
|
1087
|
+
if "node" in self.config:
|
|
1088
|
+
nodes = self.config["node"]
|
|
1089
|
+
elif "nodes" in self.config:
|
|
1090
|
+
nodes = self.config["nodes"]
|
|
1091
|
+
elif "node" in self.config.defaults:
|
|
1092
|
+
nodes = self.config["node"]
|
|
1093
|
+
elif "default_nodes" in self.config and bool(self.config["default_nodes"]):
|
|
1094
|
+
nodes = self.config["default_nodes"]
|
|
1095
|
+
else:
|
|
1096
|
+
nodes = []
|
|
1097
|
+
if isinstance(nodes, str) and nodes[0] == "[" and nodes[-1] == "]":
|
|
1098
|
+
nodes = ast.literal_eval(nodes)
|
|
1099
|
+
return nodes
|
|
1100
|
+
|
|
1101
|
+
def move_current_node_to_front(self) -> None:
|
|
1102
|
+
"""Returns the default node list, until the first entry
|
|
1103
|
+
is equal to the current working node url
|
|
1104
|
+
"""
|
|
1105
|
+
node = self.get_default_nodes()
|
|
1106
|
+
if len(node) < 2:
|
|
1107
|
+
return
|
|
1108
|
+
if not isinstance(node, list):
|
|
1109
|
+
return
|
|
1110
|
+
offline = self.offline
|
|
1111
|
+
while not offline and self.rpc is not None and node[0] != self.rpc.url and len(node) > 1:
|
|
1112
|
+
node = node[1:] + [node[0]]
|
|
1113
|
+
self.set_default_nodes(node)
|
|
1114
|
+
|
|
1115
|
+
def set_default_vote_weight(self, vote_weight: int) -> None:
|
|
1116
|
+
"""Set the default vote weight to be used"""
|
|
1117
|
+
self.config["default_vote_weight"] = vote_weight
|
|
1118
|
+
|
|
1119
|
+
def finalizeOp(
|
|
1120
|
+
self, ops: Any, account: Account | str, permission: str, **kwargs
|
|
1121
|
+
) -> dict[str, Any]:
|
|
1122
|
+
"""This method obtains the required private keys if present in
|
|
1123
|
+
the wallet, finalizes the transaction, signs it and
|
|
1124
|
+
broadacasts it
|
|
1125
|
+
|
|
1126
|
+
:param ops: The operation (or list of operations) to
|
|
1127
|
+
broadcast
|
|
1128
|
+
:type ops: list, GrapheneObject
|
|
1129
|
+
:param Account account: The account that authorizes the
|
|
1130
|
+
operation
|
|
1131
|
+
:param string permission: The required permission for
|
|
1132
|
+
signing (active, owner, posting)
|
|
1133
|
+
:param TransactionBuilder append_to: This allows to provide an instance of
|
|
1134
|
+
TransactionBuilder (see :func:`BlockChainInstance.new_tx()`) to specify
|
|
1135
|
+
where to put a specific operation.
|
|
1136
|
+
|
|
1137
|
+
.. note:: ``append_to`` is exposed to every method used in the
|
|
1138
|
+
BlockChainInstance class
|
|
1139
|
+
|
|
1140
|
+
.. note:: If ``ops`` is a list of operation, they all need to be
|
|
1141
|
+
signable by the same key! Thus, you cannot combine ops
|
|
1142
|
+
that require active permission with ops that require
|
|
1143
|
+
posting permission. Neither can you use different
|
|
1144
|
+
accounts for different operations!
|
|
1145
|
+
|
|
1146
|
+
.. note:: This uses :func:`BlockChainInstance.txbuffer` as instance of
|
|
1147
|
+
:class:`nectar.transactionbuilder.TransactionBuilder`.
|
|
1148
|
+
You may want to use your own txbuffer
|
|
1149
|
+
|
|
1150
|
+
.. note:: when doing sign + broadcast, the trx_id is added to the returned dict
|
|
1151
|
+
|
|
1152
|
+
"""
|
|
1153
|
+
if self.offline:
|
|
1154
|
+
return {}
|
|
1155
|
+
if "append_to" in kwargs and kwargs["append_to"]:
|
|
1156
|
+
# Append to the append_to and return
|
|
1157
|
+
append_to = kwargs["append_to"]
|
|
1158
|
+
parent = append_to.get_parent()
|
|
1159
|
+
if not isinstance(append_to, (TransactionBuilder)):
|
|
1160
|
+
raise AssertionError()
|
|
1161
|
+
append_to.appendOps(ops)
|
|
1162
|
+
# Add the signer to the buffer so we sign the tx properly
|
|
1163
|
+
parent.appendSigner(account, permission)
|
|
1164
|
+
# This returns as we used append_to, it does NOT broadcast, or sign
|
|
1165
|
+
return append_to.get_parent()
|
|
1166
|
+
# Go forward to see what the other options do ...
|
|
1167
|
+
else:
|
|
1168
|
+
# Append to the default buffer
|
|
1169
|
+
self.txbuffer.appendOps(ops)
|
|
1170
|
+
|
|
1171
|
+
# Add signing information, signer, sign and optionally broadcast
|
|
1172
|
+
if self.unsigned:
|
|
1173
|
+
# In case we don't want to sign anything
|
|
1174
|
+
self.txbuffer.addSigningInformation(account, permission)
|
|
1175
|
+
return self.txbuffer
|
|
1176
|
+
elif self.bundle:
|
|
1177
|
+
# In case we want to add more ops to the tx (bundle)
|
|
1178
|
+
self.txbuffer.appendSigner(account, permission)
|
|
1179
|
+
return self.txbuffer.json()
|
|
1180
|
+
else:
|
|
1181
|
+
# default behavior: sign + broadcast
|
|
1182
|
+
self.txbuffer.appendSigner(account, permission)
|
|
1183
|
+
ret_sign = self.txbuffer.sign()
|
|
1184
|
+
ret = self.txbuffer.broadcast()
|
|
1185
|
+
if ret_sign is not None:
|
|
1186
|
+
ret["trx_id"] = ret_sign.id
|
|
1187
|
+
return ret
|
|
1188
|
+
|
|
1189
|
+
def sign(
|
|
1190
|
+
self,
|
|
1191
|
+
tx: dict[str, Any] | None = None,
|
|
1192
|
+
wifs: list[str] | str | None = None,
|
|
1193
|
+
reconstruct_tx: bool = True,
|
|
1194
|
+
) -> dict[str, Any]:
|
|
1195
|
+
"""
|
|
1196
|
+
Sign a transaction using provided WIFs or the wallet's missing signatures and return the signed transaction.
|
|
1197
|
+
|
|
1198
|
+
If tx is provided, it is wrapped in a TransactionBuilder; otherwise the instance's current txbuffer is used. Provided wifs (single string or list) are appended before missing required signatures are added. If reconstruct_tx is False and the transaction already contains signatures, it will not be reconstructed.
|
|
1199
|
+
|
|
1200
|
+
Parameters:
|
|
1201
|
+
tx (dict, optional): A transaction object to sign. If omitted, the active txbuffer is used.
|
|
1202
|
+
wifs (str | list[str], optional): One or more WIF private keys to use for signing. If not provided, keys from the wallet for any missing signatures are used.
|
|
1203
|
+
reconstruct_tx (bool, optional): If False, do not reconstruct an already-built transaction; existing signatures are preserved. Defaults to True.
|
|
1204
|
+
|
|
1205
|
+
Returns:
|
|
1206
|
+
dict: The signed transaction JSON with an added "trx_id" field containing the transaction id.
|
|
1207
|
+
"""
|
|
1208
|
+
if wifs is None:
|
|
1209
|
+
wifs = []
|
|
1210
|
+
if tx:
|
|
1211
|
+
txbuffer = TransactionBuilder(tx=tx, blockchain_instance=self)
|
|
1212
|
+
else:
|
|
1213
|
+
txbuffer = self.txbuffer
|
|
1214
|
+
txbuffer.appendWif(wifs)
|
|
1215
|
+
txbuffer.appendMissingSignatures()
|
|
1216
|
+
ret_sign = txbuffer.sign(reconstruct_tx=reconstruct_tx)
|
|
1217
|
+
ret = txbuffer.json()
|
|
1218
|
+
ret["trx_id"] = ret_sign.id
|
|
1219
|
+
return ret
|
|
1220
|
+
|
|
1221
|
+
def broadcast(self, tx: dict[str, Any] | None = None, trx_id: bool = True) -> dict[str, Any]:
|
|
1222
|
+
"""Broadcast a transaction to the Hive network
|
|
1223
|
+
|
|
1224
|
+
:param tx tx: Signed transaction to broadcast
|
|
1225
|
+
:param bool trx_id: when True, the trx_id will be included into the return dict.
|
|
1226
|
+
|
|
1227
|
+
"""
|
|
1228
|
+
if tx:
|
|
1229
|
+
# If tx is provided, we broadcast the tx
|
|
1230
|
+
return TransactionBuilder(tx=tx, blockchain_instance=self).broadcast(trx_id=trx_id)
|
|
1231
|
+
else:
|
|
1232
|
+
return self.txbuffer.broadcast()
|
|
1233
|
+
|
|
1234
|
+
def info(self, use_stored_data: bool = True) -> dict[str, Any] | None:
|
|
1235
|
+
"""Returns the global properties"""
|
|
1236
|
+
return self.get_dynamic_global_properties(use_stored_data=use_stored_data)
|
|
1237
|
+
|
|
1238
|
+
# -------------------------------------------------------------------------
|
|
1239
|
+
# Wallet stuff
|
|
1240
|
+
# -------------------------------------------------------------------------
|
|
1241
|
+
def newWallet(self, pwd: str) -> None:
|
|
1242
|
+
"""Create a new wallet. This method is basically only calls
|
|
1243
|
+
:func:`nectar.wallet.Wallet.create`.
|
|
1244
|
+
|
|
1245
|
+
:param str pwd: Password to use for the new wallet
|
|
1246
|
+
|
|
1247
|
+
:raises WalletExists: if there is already a
|
|
1248
|
+
wallet created
|
|
1249
|
+
|
|
1250
|
+
"""
|
|
1251
|
+
return self.wallet.create(pwd)
|
|
1252
|
+
|
|
1253
|
+
def unlock(self, *args, **kwargs) -> bool | None:
|
|
1254
|
+
"""Unlock the internal wallet"""
|
|
1255
|
+
return self.wallet.unlock(*args, **kwargs)
|
|
1256
|
+
|
|
1257
|
+
# -------------------------------------------------------------------------
|
|
1258
|
+
# Transaction Buffers
|
|
1259
|
+
# -------------------------------------------------------------------------
|
|
1260
|
+
@property
|
|
1261
|
+
def txbuffer(self) -> TransactionBuilder:
|
|
1262
|
+
"""Returns the currently active tx buffer"""
|
|
1263
|
+
return self.tx()
|
|
1264
|
+
|
|
1265
|
+
def tx(self) -> TransactionBuilder:
|
|
1266
|
+
"""Returns the default transaction buffer"""
|
|
1267
|
+
return self._txbuffers[0]
|
|
1268
|
+
|
|
1269
|
+
def new_tx(self, *args, **kwargs) -> TransactionBuilder:
|
|
1270
|
+
"""Let's obtain a new txbuffer
|
|
1271
|
+
|
|
1272
|
+
:returns: id of the new txbuffer
|
|
1273
|
+
:rtype: int
|
|
1274
|
+
"""
|
|
1275
|
+
# Remove blockchain_instance from kwargs if it exists to avoid duplicate
|
|
1276
|
+
kwargs.pop("blockchain_instance", None)
|
|
1277
|
+
|
|
1278
|
+
# Extract tx parameter if present (first positional argument)
|
|
1279
|
+
tx = args[0] if args else None
|
|
1280
|
+
|
|
1281
|
+
# Pass self as blockchain_instance to avoid recursion
|
|
1282
|
+
builder = TransactionBuilder(tx, blockchain_instance=self, **kwargs)
|
|
1283
|
+
self._txbuffers.append(builder)
|
|
1284
|
+
return builder
|
|
1285
|
+
|
|
1286
|
+
def clear(self) -> None:
|
|
1287
|
+
self._txbuffers = []
|
|
1288
|
+
# Base/Default proposal/tx buffers
|
|
1289
|
+
self.new_tx()
|
|
1290
|
+
# self.new_proposal()
|
|
1291
|
+
|
|
1292
|
+
# -------------------------------------------------------------------------
|
|
1293
|
+
# Account related calls
|
|
1294
|
+
# -------------------------------------------------------------------------
|
|
1295
|
+
def claim_account(
|
|
1296
|
+
self, creator: str | Account | None = None, fee: str | None = None, **kwargs
|
|
1297
|
+
) -> dict[str, Any]:
|
|
1298
|
+
"""
|
|
1299
|
+
Claim a subsidized account slot or pay the account-creation fee.
|
|
1300
|
+
|
|
1301
|
+
When `fee` is "0 <TOKEN>" (default), the claim consumes an account slot paid from RC (resource credits)
|
|
1302
|
+
allowing a later call to `create_claimed_account` to create the account. Supplying a nonzero `fee`
|
|
1303
|
+
will pay the registration fee in the chain token (e.g., HIVE).
|
|
1304
|
+
|
|
1305
|
+
Parameters:
|
|
1306
|
+
creator (str): Account that will pay or consume the claim (defaults to configured `default_account`).
|
|
1307
|
+
fee (str, optional): Fee as a string with asset symbol (e.g., "0 HIVE" or "3.000 HIVE"). If omitted, defaults to "0 <token_symbol>".
|
|
1308
|
+
|
|
1309
|
+
Returns:
|
|
1310
|
+
The result of finalizeOp for the submitted Claim_account operation (signed/broadcast transaction or unsigned/buffered result, depending on instance configuration).
|
|
1311
|
+
|
|
1312
|
+
Raises:
|
|
1313
|
+
ValueError: If no `creator` is provided and no `default_account` is configured.
|
|
1314
|
+
"""
|
|
1315
|
+
fee = fee if fee is not None else "0 %s" % (self.token_symbol)
|
|
1316
|
+
if not creator and self.config["default_account"]:
|
|
1317
|
+
creator = self.config["default_account"]
|
|
1318
|
+
if not creator:
|
|
1319
|
+
raise ValueError(
|
|
1320
|
+
"Not creator account given. Define it with "
|
|
1321
|
+
+ "creator=x, or set the default_account using hive-nectar"
|
|
1322
|
+
)
|
|
1323
|
+
assert creator is not None # Type checker: creator is guaranteed not None here
|
|
1324
|
+
creator = Account(creator, blockchain_instance=self) # type: ignore[assignment]
|
|
1325
|
+
op = {
|
|
1326
|
+
"fee": Amount(fee, blockchain_instance=self, json_str=True),
|
|
1327
|
+
"creator": creator["name"],
|
|
1328
|
+
"prefix": self.prefix,
|
|
1329
|
+
"json_str": True,
|
|
1330
|
+
}
|
|
1331
|
+
op = operations.Claim_account(**op)
|
|
1332
|
+
return self.finalizeOp(op, creator, "active", **kwargs)
|
|
1333
|
+
|
|
1334
|
+
def create_claimed_account(
|
|
1335
|
+
self,
|
|
1336
|
+
account_name: str,
|
|
1337
|
+
creator: str | Account | None = None,
|
|
1338
|
+
owner_key: str | None = None,
|
|
1339
|
+
active_key: str | None = None,
|
|
1340
|
+
memo_key: str | None = None,
|
|
1341
|
+
posting_key: str | None = None,
|
|
1342
|
+
password: str | None = None,
|
|
1343
|
+
additional_owner_keys: list[str] | None = None,
|
|
1344
|
+
additional_active_keys: list[str] | None = None,
|
|
1345
|
+
additional_posting_keys: list[str] | None = None,
|
|
1346
|
+
additional_owner_accounts: list[str] | None = None,
|
|
1347
|
+
additional_active_accounts: list[str] | None = None,
|
|
1348
|
+
additional_posting_accounts: list[str] | None = None,
|
|
1349
|
+
storekeys: bool = True,
|
|
1350
|
+
store_owner_key: bool = False,
|
|
1351
|
+
json_meta: dict[str, Any] | None = None,
|
|
1352
|
+
combine_with_claim_account: bool = False,
|
|
1353
|
+
fee: str | None = None,
|
|
1354
|
+
**kwargs,
|
|
1355
|
+
) -> dict[str, Any]:
|
|
1356
|
+
"""Create new claimed account on Hive
|
|
1357
|
+
|
|
1358
|
+
The brainkey/password can be used to recover all generated keys
|
|
1359
|
+
(see :class:`nectargraphenebase.account` for more details.
|
|
1360
|
+
|
|
1361
|
+
By default, this call will use ``default_account`` to
|
|
1362
|
+
register a new name ``account_name`` with all keys being
|
|
1363
|
+
derived from a new brain key that will be returned. The
|
|
1364
|
+
corresponding keys will automatically be installed in the
|
|
1365
|
+
wallet.
|
|
1366
|
+
|
|
1367
|
+
.. warning:: Don't call this method unless you know what
|
|
1368
|
+
you are doing! Be sure to understand what this
|
|
1369
|
+
method does and where to find the private keys
|
|
1370
|
+
for your account.
|
|
1371
|
+
|
|
1372
|
+
.. note:: Please note that this imports private keys
|
|
1373
|
+
(if password is present) into the wallet by
|
|
1374
|
+
default when nobroadcast is set to False.
|
|
1375
|
+
However, it **does not import the owner
|
|
1376
|
+
key** for security reasons by default.
|
|
1377
|
+
If you set store_owner_key to True, the
|
|
1378
|
+
owner key is stored.
|
|
1379
|
+
Do NOT expect to be able to recover it from
|
|
1380
|
+
the wallet if you lose your password!
|
|
1381
|
+
|
|
1382
|
+
.. note:: Account creations cost a fee that is defined by
|
|
1383
|
+
the network. If you create an account, you will
|
|
1384
|
+
need to pay for that fee!
|
|
1385
|
+
|
|
1386
|
+
:param str account_name: (**required**) new account name
|
|
1387
|
+
:param str json_meta: Optional meta data for the account
|
|
1388
|
+
:param str owner_key: Main owner key
|
|
1389
|
+
:param str active_key: Main active key
|
|
1390
|
+
:param str posting_key: Main posting key
|
|
1391
|
+
:param str memo_key: Main memo_key
|
|
1392
|
+
:param str password: Alternatively to providing keys, one
|
|
1393
|
+
can provide a password from which the
|
|
1394
|
+
keys will be derived
|
|
1395
|
+
:param array additional_owner_keys: Additional owner public keys
|
|
1396
|
+
:param array additional_active_keys: Additional active public keys
|
|
1397
|
+
:param array additional_posting_keys: Additional posting public keys
|
|
1398
|
+
:param array additional_owner_accounts: Additional owner account
|
|
1399
|
+
names
|
|
1400
|
+
:param array additional_active_accounts: Additional acctive account
|
|
1401
|
+
names
|
|
1402
|
+
:param bool storekeys: Store new keys in the wallet (default:
|
|
1403
|
+
``True``)
|
|
1404
|
+
:param bool combine_with_claim_account: When set to True, a
|
|
1405
|
+
claim_account operation is additionally broadcasted
|
|
1406
|
+
:param str fee: When combine_with_claim_account is set to True,
|
|
1407
|
+
this parameter is used for the claim_account operation
|
|
1408
|
+
|
|
1409
|
+
:param str creator: which account should pay the registration fee
|
|
1410
|
+
(defaults to ``default_account``)
|
|
1411
|
+
:raises AccountExistsException: if the account already exists on
|
|
1412
|
+
the blockchain
|
|
1413
|
+
|
|
1414
|
+
"""
|
|
1415
|
+
fee = fee if fee is not None else "0 %s" % (self.token_symbol)
|
|
1416
|
+
if not creator and self.config["default_account"]:
|
|
1417
|
+
creator = self.config["default_account"]
|
|
1418
|
+
if not creator:
|
|
1419
|
+
raise ValueError(
|
|
1420
|
+
"Not creator account given. Define it with "
|
|
1421
|
+
+ "creator=x, or set the default_account using hive-nectar"
|
|
1422
|
+
)
|
|
1423
|
+
if password and (owner_key or active_key or memo_key):
|
|
1424
|
+
raise ValueError("You cannot use 'password' AND provide keys!")
|
|
1425
|
+
|
|
1426
|
+
try:
|
|
1427
|
+
Account(account_name, blockchain_instance=self)
|
|
1428
|
+
raise AccountExistsException
|
|
1429
|
+
except AccountDoesNotExistsException:
|
|
1430
|
+
pass
|
|
1431
|
+
|
|
1432
|
+
creator = Account(creator, blockchain_instance=self) # type: ignore[assignment]
|
|
1433
|
+
|
|
1434
|
+
" Generate new keys from password"
|
|
1435
|
+
from nectargraphenebase.account import PasswordKey
|
|
1436
|
+
|
|
1437
|
+
if password:
|
|
1438
|
+
active_key_obj = PasswordKey(account_name, password, role="active", prefix=self.prefix)
|
|
1439
|
+
owner_key_obj = PasswordKey(account_name, password, role="owner", prefix=self.prefix)
|
|
1440
|
+
posting_key_obj = PasswordKey(
|
|
1441
|
+
account_name, password, role="posting", prefix=self.prefix
|
|
1442
|
+
)
|
|
1443
|
+
memo_key_obj = PasswordKey(account_name, password, role="memo", prefix=self.prefix)
|
|
1444
|
+
active_pubkey = active_key_obj.get_public_key()
|
|
1445
|
+
owner_pubkey = owner_key_obj.get_public_key()
|
|
1446
|
+
posting_pubkey = posting_key_obj.get_public_key()
|
|
1447
|
+
memo_pubkey = memo_key_obj.get_public_key()
|
|
1448
|
+
active_privkey = active_key_obj.get_private_key()
|
|
1449
|
+
posting_privkey = posting_key_obj.get_private_key()
|
|
1450
|
+
owner_privkey = owner_key_obj.get_private_key()
|
|
1451
|
+
memo_privkey = memo_key_obj.get_private_key()
|
|
1452
|
+
# store private keys
|
|
1453
|
+
try:
|
|
1454
|
+
if storekeys and not self.nobroadcast:
|
|
1455
|
+
if store_owner_key:
|
|
1456
|
+
self.wallet.addPrivateKey(str(owner_privkey))
|
|
1457
|
+
self.wallet.addPrivateKey(str(active_privkey))
|
|
1458
|
+
self.wallet.addPrivateKey(str(memo_privkey))
|
|
1459
|
+
self.wallet.addPrivateKey(str(posting_privkey))
|
|
1460
|
+
except ValueError as e:
|
|
1461
|
+
log.info(str(e))
|
|
1462
|
+
|
|
1463
|
+
elif owner_key and active_key and memo_key and posting_key:
|
|
1464
|
+
active_pubkey = PublicKey(active_key, prefix=self.prefix)
|
|
1465
|
+
owner_pubkey = PublicKey(owner_key, prefix=self.prefix)
|
|
1466
|
+
posting_pubkey = PublicKey(posting_key, prefix=self.prefix)
|
|
1467
|
+
memo_pubkey = PublicKey(memo_key, prefix=self.prefix)
|
|
1468
|
+
else:
|
|
1469
|
+
raise ValueError("Call incomplete! Provide either a password or public keys!")
|
|
1470
|
+
owner = format(owner_pubkey, self.prefix)
|
|
1471
|
+
active = format(active_pubkey, self.prefix)
|
|
1472
|
+
posting = format(posting_pubkey, self.prefix)
|
|
1473
|
+
memo = format(memo_pubkey, self.prefix)
|
|
1474
|
+
|
|
1475
|
+
owner_key_authority = [[owner, 1]]
|
|
1476
|
+
active_key_authority = [[active, 1]]
|
|
1477
|
+
posting_key_authority = [[posting, 1]]
|
|
1478
|
+
owner_accounts_authority = []
|
|
1479
|
+
active_accounts_authority = []
|
|
1480
|
+
posting_accounts_authority = []
|
|
1481
|
+
|
|
1482
|
+
additional_owner_keys = additional_owner_keys or []
|
|
1483
|
+
additional_active_keys = additional_active_keys or []
|
|
1484
|
+
additional_posting_keys = additional_posting_keys or []
|
|
1485
|
+
additional_owner_accounts = additional_owner_accounts or []
|
|
1486
|
+
additional_active_accounts = additional_active_accounts or []
|
|
1487
|
+
additional_posting_accounts = additional_posting_accounts or []
|
|
1488
|
+
|
|
1489
|
+
# additional authorities
|
|
1490
|
+
for k in additional_owner_keys:
|
|
1491
|
+
owner_key_authority.append([k, 1])
|
|
1492
|
+
for k in additional_active_keys:
|
|
1493
|
+
active_key_authority.append([k, 1])
|
|
1494
|
+
for k in additional_posting_keys:
|
|
1495
|
+
posting_key_authority.append([k, 1])
|
|
1496
|
+
|
|
1497
|
+
for k in additional_owner_accounts:
|
|
1498
|
+
addaccount = Account(k, blockchain_instance=self)
|
|
1499
|
+
owner_accounts_authority.append([addaccount["name"], 1])
|
|
1500
|
+
for k in additional_active_accounts:
|
|
1501
|
+
addaccount = Account(k, blockchain_instance=self)
|
|
1502
|
+
active_accounts_authority.append([addaccount["name"], 1])
|
|
1503
|
+
for k in additional_posting_accounts:
|
|
1504
|
+
addaccount = Account(k, blockchain_instance=self)
|
|
1505
|
+
posting_accounts_authority.append([addaccount["name"], 1])
|
|
1506
|
+
if combine_with_claim_account:
|
|
1507
|
+
op = {
|
|
1508
|
+
"fee": Amount(fee, blockchain_instance=self),
|
|
1509
|
+
"creator": creator["name"],
|
|
1510
|
+
"prefix": self.prefix,
|
|
1511
|
+
}
|
|
1512
|
+
op = operations.Claim_account(**op)
|
|
1513
|
+
ops = [op]
|
|
1514
|
+
op = {
|
|
1515
|
+
"creator": creator["name"],
|
|
1516
|
+
"new_account_name": account_name,
|
|
1517
|
+
"owner": {
|
|
1518
|
+
"account_auths": owner_accounts_authority,
|
|
1519
|
+
"key_auths": owner_key_authority,
|
|
1520
|
+
"address_auths": [],
|
|
1521
|
+
"weight_threshold": 1,
|
|
1522
|
+
},
|
|
1523
|
+
"active": {
|
|
1524
|
+
"account_auths": active_accounts_authority,
|
|
1525
|
+
"key_auths": active_key_authority,
|
|
1526
|
+
"address_auths": [],
|
|
1527
|
+
"weight_threshold": 1,
|
|
1528
|
+
},
|
|
1529
|
+
"posting": {
|
|
1530
|
+
"account_auths": posting_accounts_authority,
|
|
1531
|
+
"key_auths": posting_key_authority,
|
|
1532
|
+
"address_auths": [],
|
|
1533
|
+
"weight_threshold": 1,
|
|
1534
|
+
},
|
|
1535
|
+
"memo_key": memo,
|
|
1536
|
+
"json_metadata": json_meta or {},
|
|
1537
|
+
"prefix": self.prefix,
|
|
1538
|
+
}
|
|
1539
|
+
op = operations.Create_claimed_account(**op)
|
|
1540
|
+
if combine_with_claim_account:
|
|
1541
|
+
ops.append(op)
|
|
1542
|
+
return self.finalizeOp(ops, creator, "active", **kwargs)
|
|
1543
|
+
else:
|
|
1544
|
+
return self.finalizeOp(op, creator, "active", **kwargs)
|
|
1545
|
+
|
|
1546
|
+
def create_account(
|
|
1547
|
+
self,
|
|
1548
|
+
account_name: str,
|
|
1549
|
+
creator: str | Account | None = None,
|
|
1550
|
+
owner_key=None,
|
|
1551
|
+
active_key=None,
|
|
1552
|
+
memo_key=None,
|
|
1553
|
+
posting_key=None,
|
|
1554
|
+
password=None,
|
|
1555
|
+
additional_owner_keys=[],
|
|
1556
|
+
additional_active_keys=[],
|
|
1557
|
+
additional_posting_keys=[],
|
|
1558
|
+
additional_owner_accounts=[],
|
|
1559
|
+
additional_active_accounts=[],
|
|
1560
|
+
additional_posting_accounts=[],
|
|
1561
|
+
storekeys=True,
|
|
1562
|
+
store_owner_key=False,
|
|
1563
|
+
json_meta=None,
|
|
1564
|
+
**kwargs,
|
|
1565
|
+
):
|
|
1566
|
+
"""Create new account on Hive
|
|
1567
|
+
|
|
1568
|
+
The brainkey/password can be used to recover all generated keys
|
|
1569
|
+
(see :class:`nectargraphenebase.account` for more details.
|
|
1570
|
+
|
|
1571
|
+
By default, this call will use ``default_account`` to
|
|
1572
|
+
register a new name ``account_name`` with all keys being
|
|
1573
|
+
derived from a new brain key that will be returned. The
|
|
1574
|
+
corresponding keys will automatically be installed in the
|
|
1575
|
+
wallet.
|
|
1576
|
+
|
|
1577
|
+
.. warning:: Don't call this method unless you know what
|
|
1578
|
+
you are doing! Be sure to understand what this
|
|
1579
|
+
method does and where to find the private keys
|
|
1580
|
+
for your account.
|
|
1581
|
+
|
|
1582
|
+
.. note:: Please note that this imports private keys
|
|
1583
|
+
(if password is present) into the wallet by
|
|
1584
|
+
default when nobroadcast is set to False.
|
|
1585
|
+
However, it **does not import the owner
|
|
1586
|
+
key** for security reasons by default.
|
|
1587
|
+
If you set store_owner_key to True, the
|
|
1588
|
+
owner key is stored.
|
|
1589
|
+
Do NOT expect to be able to recover it from
|
|
1590
|
+
the wallet if you lose your password!
|
|
1591
|
+
|
|
1592
|
+
.. note:: Account creations cost a fee that is defined by
|
|
1593
|
+
the network. If you create an account, you will
|
|
1594
|
+
need to pay for that fee!
|
|
1595
|
+
|
|
1596
|
+
:param str account_name: (**required**) new account name
|
|
1597
|
+
:param str json_meta: Optional meta data for the account
|
|
1598
|
+
:param str owner_key: Main owner key
|
|
1599
|
+
:param str active_key: Main active key
|
|
1600
|
+
:param str posting_key: Main posting key
|
|
1601
|
+
:param str memo_key: Main memo_key
|
|
1602
|
+
:param str password: Alternatively to providing keys, one
|
|
1603
|
+
can provide a password from which the
|
|
1604
|
+
keys will be derived
|
|
1605
|
+
:param array additional_owner_keys: Additional owner public keys
|
|
1606
|
+
:param array additional_active_keys: Additional active public keys
|
|
1607
|
+
:param array additional_posting_keys: Additional posting public keys
|
|
1608
|
+
:param array additional_owner_accounts: Additional owner account
|
|
1609
|
+
names
|
|
1610
|
+
:param array additional_active_accounts: Additional acctive account
|
|
1611
|
+
names
|
|
1612
|
+
:param bool storekeys: Store new keys in the wallet (default:
|
|
1613
|
+
``True``)
|
|
1614
|
+
|
|
1615
|
+
:param str creator: which account should pay the registration fee
|
|
1616
|
+
(defaults to ``default_account``)
|
|
1617
|
+
:raises AccountExistsException: if the account already exists on
|
|
1618
|
+
the blockchain
|
|
1619
|
+
|
|
1620
|
+
"""
|
|
1621
|
+
if not creator and self.config["default_account"]:
|
|
1622
|
+
creator = self.config["default_account"]
|
|
1623
|
+
if not creator:
|
|
1624
|
+
raise ValueError(
|
|
1625
|
+
"Not creator account given. Define it with "
|
|
1626
|
+
+ "creator=x, or set the default_account using hive-nectar"
|
|
1627
|
+
)
|
|
1628
|
+
if password and (owner_key or active_key or memo_key):
|
|
1629
|
+
raise ValueError("You cannot use 'password' AND provide keys!")
|
|
1630
|
+
|
|
1631
|
+
try:
|
|
1632
|
+
Account(account_name, blockchain_instance=self)
|
|
1633
|
+
raise AccountExistsException
|
|
1634
|
+
except AccountDoesNotExistsException:
|
|
1635
|
+
pass
|
|
1636
|
+
|
|
1637
|
+
creator = Account(creator, blockchain_instance=self) # type: ignore[assignment]
|
|
1638
|
+
|
|
1639
|
+
" Generate new keys from password"
|
|
1640
|
+
from nectargraphenebase.account import PasswordKey
|
|
1641
|
+
|
|
1642
|
+
if password:
|
|
1643
|
+
active_key_obj = PasswordKey(account_name, password, role="active", prefix=self.prefix)
|
|
1644
|
+
owner_key_obj = PasswordKey(account_name, password, role="owner", prefix=self.prefix)
|
|
1645
|
+
posting_key_obj = PasswordKey(
|
|
1646
|
+
account_name, password, role="posting", prefix=self.prefix
|
|
1647
|
+
)
|
|
1648
|
+
memo_key_obj = PasswordKey(account_name, password, role="memo", prefix=self.prefix)
|
|
1649
|
+
active_pubkey = active_key_obj.get_public_key()
|
|
1650
|
+
owner_pubkey = owner_key_obj.get_public_key()
|
|
1651
|
+
posting_pubkey = posting_key_obj.get_public_key()
|
|
1652
|
+
memo_pubkey = memo_key_obj.get_public_key()
|
|
1653
|
+
active_privkey = active_key_obj.get_private_key()
|
|
1654
|
+
posting_privkey = posting_key_obj.get_private_key()
|
|
1655
|
+
owner_privkey = owner_key_obj.get_private_key()
|
|
1656
|
+
memo_privkey = memo_key_obj.get_private_key()
|
|
1657
|
+
# store private keys
|
|
1658
|
+
try:
|
|
1659
|
+
if storekeys and not self.nobroadcast:
|
|
1660
|
+
if store_owner_key:
|
|
1661
|
+
self.wallet.addPrivateKey(str(owner_privkey))
|
|
1662
|
+
self.wallet.addPrivateKey(str(active_privkey))
|
|
1663
|
+
self.wallet.addPrivateKey(str(memo_privkey))
|
|
1664
|
+
self.wallet.addPrivateKey(str(posting_privkey))
|
|
1665
|
+
except ValueError as e:
|
|
1666
|
+
log.info(str(e))
|
|
1667
|
+
|
|
1668
|
+
elif owner_key and active_key and memo_key and posting_key:
|
|
1669
|
+
active_pubkey = PublicKey(active_key, prefix=self.prefix)
|
|
1670
|
+
owner_pubkey = PublicKey(owner_key, prefix=self.prefix)
|
|
1671
|
+
posting_pubkey = PublicKey(posting_key, prefix=self.prefix)
|
|
1672
|
+
memo_pubkey = PublicKey(memo_key, prefix=self.prefix)
|
|
1673
|
+
else:
|
|
1674
|
+
raise ValueError("Call incomplete! Provide either a password or public keys!")
|
|
1675
|
+
owner = format(owner_pubkey, self.prefix)
|
|
1676
|
+
active = format(active_pubkey, self.prefix)
|
|
1677
|
+
posting = format(posting_pubkey, self.prefix)
|
|
1678
|
+
memo = format(memo_pubkey, self.prefix)
|
|
1679
|
+
|
|
1680
|
+
owner_key_authority = [[owner, 1]]
|
|
1681
|
+
active_key_authority = [[active, 1]]
|
|
1682
|
+
posting_key_authority = [[posting, 1]]
|
|
1683
|
+
owner_accounts_authority = []
|
|
1684
|
+
active_accounts_authority = []
|
|
1685
|
+
posting_accounts_authority = []
|
|
1686
|
+
|
|
1687
|
+
# additional authorities
|
|
1688
|
+
for k in additional_owner_keys:
|
|
1689
|
+
owner_key_authority.append([k, 1])
|
|
1690
|
+
for k in additional_active_keys:
|
|
1691
|
+
active_key_authority.append([k, 1])
|
|
1692
|
+
for k in additional_posting_keys:
|
|
1693
|
+
posting_key_authority.append([k, 1])
|
|
1694
|
+
|
|
1695
|
+
for k in additional_owner_accounts:
|
|
1696
|
+
addaccount = Account(k, blockchain_instance=self)
|
|
1697
|
+
owner_accounts_authority.append([addaccount["name"], 1])
|
|
1698
|
+
for k in additional_active_accounts:
|
|
1699
|
+
addaccount = Account(k, blockchain_instance=self)
|
|
1700
|
+
active_accounts_authority.append([addaccount["name"], 1])
|
|
1701
|
+
for k in additional_posting_accounts:
|
|
1702
|
+
addaccount = Account(k, blockchain_instance=self)
|
|
1703
|
+
posting_accounts_authority.append([addaccount["name"], 1])
|
|
1704
|
+
|
|
1705
|
+
props = self.get_chain_properties()
|
|
1706
|
+
try:
|
|
1707
|
+
hardfork_version = int(self.hardfork)
|
|
1708
|
+
except (ValueError, TypeError):
|
|
1709
|
+
hardfork_version = 0
|
|
1710
|
+
if hardfork_version >= 20:
|
|
1711
|
+
required_fee = Amount(props["account_creation_fee"], blockchain_instance=self)
|
|
1712
|
+
else:
|
|
1713
|
+
required_fee = Amount(props["account_creation_fee"], blockchain_instance=self) * 30
|
|
1714
|
+
op = {
|
|
1715
|
+
"fee": required_fee,
|
|
1716
|
+
"creator": creator["name"],
|
|
1717
|
+
"new_account_name": account_name,
|
|
1718
|
+
"owner": {
|
|
1719
|
+
"account_auths": owner_accounts_authority,
|
|
1720
|
+
"key_auths": owner_key_authority,
|
|
1721
|
+
"address_auths": [],
|
|
1722
|
+
"weight_threshold": 1,
|
|
1723
|
+
},
|
|
1724
|
+
"active": {
|
|
1725
|
+
"account_auths": active_accounts_authority,
|
|
1726
|
+
"key_auths": active_key_authority,
|
|
1727
|
+
"address_auths": [],
|
|
1728
|
+
"weight_threshold": 1,
|
|
1729
|
+
},
|
|
1730
|
+
"posting": {
|
|
1731
|
+
"account_auths": posting_accounts_authority,
|
|
1732
|
+
"key_auths": posting_key_authority,
|
|
1733
|
+
"address_auths": [],
|
|
1734
|
+
"weight_threshold": 1,
|
|
1735
|
+
},
|
|
1736
|
+
"memo_key": memo,
|
|
1737
|
+
"json_metadata": json_meta or {},
|
|
1738
|
+
"prefix": self.prefix,
|
|
1739
|
+
"json_str": True,
|
|
1740
|
+
}
|
|
1741
|
+
op = operations.Account_create(**op)
|
|
1742
|
+
return self.finalizeOp(op, creator, "active", **kwargs)
|
|
1743
|
+
|
|
1744
|
+
def update_account(
|
|
1745
|
+
self,
|
|
1746
|
+
account: str | Account | None = None,
|
|
1747
|
+
owner_key=None,
|
|
1748
|
+
active_key=None,
|
|
1749
|
+
memo_key=None,
|
|
1750
|
+
posting_key=None,
|
|
1751
|
+
password=None,
|
|
1752
|
+
additional_owner_keys: list[str] | None = None,
|
|
1753
|
+
additional_active_keys: list[str] | None = None,
|
|
1754
|
+
additional_posting_keys: list[str] | None = None,
|
|
1755
|
+
additional_owner_accounts: list[str] | None = None,
|
|
1756
|
+
additional_active_accounts: list[str] | None = None,
|
|
1757
|
+
additional_posting_accounts: list[str] | None = None,
|
|
1758
|
+
storekeys: bool = True,
|
|
1759
|
+
store_owner_key: bool = False,
|
|
1760
|
+
json_meta=None,
|
|
1761
|
+
**kwargs,
|
|
1762
|
+
):
|
|
1763
|
+
"""Update account
|
|
1764
|
+
|
|
1765
|
+
The brainkey/password can be used to recover all generated keys
|
|
1766
|
+
(see :class:`nectargraphenebase.account` for more details.
|
|
1767
|
+
|
|
1768
|
+
The
|
|
1769
|
+
corresponding keys will automatically be installed in the
|
|
1770
|
+
wallet.
|
|
1771
|
+
|
|
1772
|
+
.. warning:: Don't call this method unless you know what
|
|
1773
|
+
you are doing! Be sure to understand what this
|
|
1774
|
+
method does and where to find the private keys
|
|
1775
|
+
for your account.
|
|
1776
|
+
|
|
1777
|
+
.. note:: Please note that this imports private keys
|
|
1778
|
+
(if password is present) into the wallet by
|
|
1779
|
+
default when nobroadcast is set to False.
|
|
1780
|
+
However, it **does not import the owner
|
|
1781
|
+
key** for security reasons by default.
|
|
1782
|
+
If you set store_owner_key to True, the
|
|
1783
|
+
owner key is stored.
|
|
1784
|
+
Do NOT expect to be able to recover it from
|
|
1785
|
+
the wallet if you lose your password!
|
|
1786
|
+
|
|
1787
|
+
:param str account_name: (**required**) account name
|
|
1788
|
+
:param str json_meta: Optional updated meta data for the account
|
|
1789
|
+
:param str owner_key: Main owner (public) key
|
|
1790
|
+
:param str active_key: Main active (public) key
|
|
1791
|
+
:param str posting_key: Main posting (public) key
|
|
1792
|
+
:param str memo_key: Main memo (public) key
|
|
1793
|
+
:param str password: Alternatively to providing keys, one
|
|
1794
|
+
can provide a password from which the
|
|
1795
|
+
keys will be derived
|
|
1796
|
+
:param array additional_owner_keys: Additional owner public keys
|
|
1797
|
+
:param array additional_active_keys: Additional active public keys
|
|
1798
|
+
:param array additional_posting_keys: Additional posting public keys
|
|
1799
|
+
:param array additional_owner_accounts: Additional owner account
|
|
1800
|
+
names
|
|
1801
|
+
:param array additional_active_accounts: Additional acctive account
|
|
1802
|
+
names
|
|
1803
|
+
:param bool storekeys: Store new keys in the wallet (default:
|
|
1804
|
+
``True``)
|
|
1805
|
+
:raises AccountExistsException: if the account already exists on
|
|
1806
|
+
the blockchain
|
|
1807
|
+
|
|
1808
|
+
"""
|
|
1809
|
+
if password and (owner_key or active_key or memo_key):
|
|
1810
|
+
raise ValueError("You cannot use 'password' AND provide keys!")
|
|
1811
|
+
|
|
1812
|
+
account = Account(account, blockchain_instance=self) # type: ignore[assignment]
|
|
1813
|
+
|
|
1814
|
+
" Generate new keys from password"
|
|
1815
|
+
from nectargraphenebase.account import PasswordKey
|
|
1816
|
+
|
|
1817
|
+
if password:
|
|
1818
|
+
active_key = PasswordKey(account["name"], password, role="active", prefix=self.prefix)
|
|
1819
|
+
owner_key = PasswordKey(account["name"], password, role="owner", prefix=self.prefix)
|
|
1820
|
+
posting_key = PasswordKey(account["name"], password, role="posting", prefix=self.prefix)
|
|
1821
|
+
memo_key = PasswordKey(account["name"], password, role="memo", prefix=self.prefix)
|
|
1822
|
+
active_pubkey = active_key.get_public_key()
|
|
1823
|
+
owner_pubkey = owner_key.get_public_key()
|
|
1824
|
+
posting_pubkey = posting_key.get_public_key()
|
|
1825
|
+
memo_pubkey = memo_key.get_public_key()
|
|
1826
|
+
active_privkey = active_key.get_private_key()
|
|
1827
|
+
posting_privkey = posting_key.get_private_key()
|
|
1828
|
+
owner_privkey = owner_key.get_private_key()
|
|
1829
|
+
memo_privkey = memo_key.get_private_key()
|
|
1830
|
+
# store private keys
|
|
1831
|
+
try:
|
|
1832
|
+
if storekeys and not self.nobroadcast:
|
|
1833
|
+
if store_owner_key:
|
|
1834
|
+
self.wallet.addPrivateKey(str(owner_privkey))
|
|
1835
|
+
self.wallet.addPrivateKey(str(active_privkey))
|
|
1836
|
+
self.wallet.addPrivateKey(str(memo_privkey))
|
|
1837
|
+
self.wallet.addPrivateKey(str(posting_privkey))
|
|
1838
|
+
except ValueError as e:
|
|
1839
|
+
log.info(str(e))
|
|
1840
|
+
|
|
1841
|
+
elif owner_key and active_key and memo_key and posting_key:
|
|
1842
|
+
active_pubkey = PublicKey(active_key, prefix=self.prefix)
|
|
1843
|
+
owner_pubkey = PublicKey(owner_key, prefix=self.prefix)
|
|
1844
|
+
posting_pubkey = PublicKey(posting_key, prefix=self.prefix)
|
|
1845
|
+
memo_pubkey = PublicKey(memo_key, prefix=self.prefix)
|
|
1846
|
+
else:
|
|
1847
|
+
raise ValueError("Call incomplete! Provide either a password or public keys!")
|
|
1848
|
+
owner = format(owner_pubkey, self.prefix)
|
|
1849
|
+
active = format(active_pubkey, self.prefix)
|
|
1850
|
+
posting = format(posting_pubkey, self.prefix)
|
|
1851
|
+
memo = format(memo_pubkey, self.prefix)
|
|
1852
|
+
|
|
1853
|
+
owner_key_authority = [[owner, 1]]
|
|
1854
|
+
active_key_authority = [[active, 1]]
|
|
1855
|
+
posting_key_authority = [[posting, 1]]
|
|
1856
|
+
if additional_owner_accounts is None:
|
|
1857
|
+
owner_accounts_authority = account["owner"]["account_auths"]
|
|
1858
|
+
else:
|
|
1859
|
+
owner_accounts_authority = []
|
|
1860
|
+
if additional_active_accounts is None:
|
|
1861
|
+
active_accounts_authority = account["active"]["account_auths"]
|
|
1862
|
+
else:
|
|
1863
|
+
active_accounts_authority = []
|
|
1864
|
+
if additional_posting_accounts is None:
|
|
1865
|
+
posting_accounts_authority = account["posting"]["account_auths"]
|
|
1866
|
+
else:
|
|
1867
|
+
posting_accounts_authority = []
|
|
1868
|
+
|
|
1869
|
+
# additional authorities
|
|
1870
|
+
if additional_owner_keys is not None:
|
|
1871
|
+
for k in additional_owner_keys:
|
|
1872
|
+
owner_key_authority.append([k, 1])
|
|
1873
|
+
if additional_active_keys is not None:
|
|
1874
|
+
for k in additional_active_keys:
|
|
1875
|
+
active_key_authority.append([k, 1])
|
|
1876
|
+
if additional_posting_keys is not None:
|
|
1877
|
+
for k in additional_posting_keys:
|
|
1878
|
+
posting_key_authority.append([k, 1])
|
|
1879
|
+
|
|
1880
|
+
if additional_owner_accounts is not None:
|
|
1881
|
+
for k in additional_owner_accounts:
|
|
1882
|
+
addaccount = Account(k, blockchain_instance=self)
|
|
1883
|
+
owner_accounts_authority.append([addaccount["name"], 1])
|
|
1884
|
+
if additional_active_accounts is not None:
|
|
1885
|
+
for k in additional_active_accounts:
|
|
1886
|
+
addaccount = Account(k, blockchain_instance=self)
|
|
1887
|
+
active_accounts_authority.append([addaccount["name"], 1])
|
|
1888
|
+
if additional_posting_accounts is not None:
|
|
1889
|
+
for k in additional_posting_accounts:
|
|
1890
|
+
addaccount = Account(k, blockchain_instance=self)
|
|
1891
|
+
posting_accounts_authority.append([addaccount["name"], 1])
|
|
1892
|
+
op = {
|
|
1893
|
+
"account": account["name"],
|
|
1894
|
+
"owner": {
|
|
1895
|
+
"account_auths": owner_accounts_authority,
|
|
1896
|
+
"key_auths": owner_key_authority,
|
|
1897
|
+
"address_auths": [],
|
|
1898
|
+
"weight_threshold": 1,
|
|
1899
|
+
},
|
|
1900
|
+
"active": {
|
|
1901
|
+
"account_auths": active_accounts_authority,
|
|
1902
|
+
"key_auths": active_key_authority,
|
|
1903
|
+
"address_auths": [],
|
|
1904
|
+
"weight_threshold": 1,
|
|
1905
|
+
},
|
|
1906
|
+
"posting": {
|
|
1907
|
+
"account_auths": posting_accounts_authority,
|
|
1908
|
+
"key_auths": posting_key_authority,
|
|
1909
|
+
"address_auths": [],
|
|
1910
|
+
"weight_threshold": 1,
|
|
1911
|
+
},
|
|
1912
|
+
"memo_key": memo,
|
|
1913
|
+
"json_metadata": json_meta or account["json_metadata"],
|
|
1914
|
+
"prefix": self.prefix,
|
|
1915
|
+
}
|
|
1916
|
+
op = operations.Account_update(**op)
|
|
1917
|
+
return self.finalizeOp(op, account, "owner", **kwargs)
|
|
1918
|
+
|
|
1919
|
+
def witness_set_properties(
|
|
1920
|
+
self, wif: str, owner: str | Account, props: dict[str, Any]
|
|
1921
|
+
) -> dict[str, Any]:
|
|
1922
|
+
"""Set witness properties
|
|
1923
|
+
|
|
1924
|
+
:param str wif: Private signing key
|
|
1925
|
+
:param dict props: Properties
|
|
1926
|
+
:param str owner: witness account name
|
|
1927
|
+
|
|
1928
|
+
Properties:::
|
|
1929
|
+
|
|
1930
|
+
{
|
|
1931
|
+
"account_creation_fee": x,
|
|
1932
|
+
"account_subsidy_budget": x,
|
|
1933
|
+
"account_subsidy_decay": x,
|
|
1934
|
+
"maximum_block_size": x,
|
|
1935
|
+
"url": x,
|
|
1936
|
+
"sbd_exchange_rate": x,
|
|
1937
|
+
"sbd_interest_rate": x,
|
|
1938
|
+
"new_signing_key": x
|
|
1939
|
+
}
|
|
1940
|
+
|
|
1941
|
+
"""
|
|
1942
|
+
|
|
1943
|
+
owner = Account(owner, blockchain_instance=self)
|
|
1944
|
+
|
|
1945
|
+
try:
|
|
1946
|
+
PrivateKey(wif, prefix=self.prefix)
|
|
1947
|
+
except Exception as e:
|
|
1948
|
+
raise e
|
|
1949
|
+
props_list = [["key", repr(PrivateKey(wif, prefix=self.prefix).pubkey)]]
|
|
1950
|
+
for k in props:
|
|
1951
|
+
props_list.append([k, props[k]])
|
|
1952
|
+
op = operations.Witness_set_properties(
|
|
1953
|
+
{
|
|
1954
|
+
"owner": owner["name"],
|
|
1955
|
+
"props": props_list,
|
|
1956
|
+
"prefix": self.prefix,
|
|
1957
|
+
"json_str": True,
|
|
1958
|
+
}
|
|
1959
|
+
)
|
|
1960
|
+
tb = TransactionBuilder(blockchain_instance=self)
|
|
1961
|
+
tb.appendOps([op])
|
|
1962
|
+
tb.appendWif(wif)
|
|
1963
|
+
tb.sign()
|
|
1964
|
+
return tb.broadcast()
|
|
1965
|
+
|
|
1966
|
+
def witness_update(
|
|
1967
|
+
self,
|
|
1968
|
+
signing_key: str,
|
|
1969
|
+
url: str,
|
|
1970
|
+
props: dict[str, Any],
|
|
1971
|
+
account: str | Account | None = None,
|
|
1972
|
+
**kwargs: Any,
|
|
1973
|
+
) -> dict[str, Any]:
|
|
1974
|
+
"""
|
|
1975
|
+
Create or update a witness (register or modify a block producer).
|
|
1976
|
+
|
|
1977
|
+
Creates a Witness_update operation for the given account with the provided signing key, node URL, and witness properties, then finalizes (signs/broadcasts or returns) the operation via the transaction pipeline.
|
|
1978
|
+
|
|
1979
|
+
Parameters:
|
|
1980
|
+
signing_key (str): Witness block signing public key (must be valid for the chain prefix).
|
|
1981
|
+
url (str): URL for the witness (website or endpoint).
|
|
1982
|
+
props (dict): Witness properties, e.g.:
|
|
1983
|
+
{
|
|
1984
|
+
"account_creation_fee": "3.000 HIVE",
|
|
1985
|
+
"maximum_block_size": 65536,
|
|
1986
|
+
"hbd_interest_rate": 0,
|
|
1987
|
+
}
|
|
1988
|
+
The "account_creation_fee" value will be converted to an Amount if present.
|
|
1989
|
+
account (str, optional): Witness account name. If omitted, the instance default_account config is used.
|
|
1990
|
+
|
|
1991
|
+
Returns:
|
|
1992
|
+
The value returned by finalizeOp (typically a transaction/broadcast result or a transaction builder when unsigned/bundled).
|
|
1993
|
+
|
|
1994
|
+
Raises:
|
|
1995
|
+
ValueError: If no account is provided or resolvable.
|
|
1996
|
+
Exception: If the signing_key is not a valid public key for the chain prefix (propagates the underlying PublicKey error).
|
|
1997
|
+
"""
|
|
1998
|
+
if not account and self.config["default_account"]:
|
|
1999
|
+
account = self.config["default_account"]
|
|
2000
|
+
if not account:
|
|
2001
|
+
raise ValueError("You need to provide an account")
|
|
2002
|
+
|
|
2003
|
+
account = Account(account, blockchain_instance=self) # type: ignore[assignment]
|
|
2004
|
+
|
|
2005
|
+
try:
|
|
2006
|
+
PublicKey(signing_key, prefix=self.prefix)
|
|
2007
|
+
except Exception as e:
|
|
2008
|
+
raise e
|
|
2009
|
+
if "account_creation_fee" in props:
|
|
2010
|
+
props["account_creation_fee"] = Amount(
|
|
2011
|
+
props["account_creation_fee"], blockchain_instance=self, json_str=True
|
|
2012
|
+
)
|
|
2013
|
+
op = operations.Witness_update(
|
|
2014
|
+
**{
|
|
2015
|
+
"owner": account["name"],
|
|
2016
|
+
"url": url,
|
|
2017
|
+
"block_signing_key": signing_key,
|
|
2018
|
+
"props": props,
|
|
2019
|
+
"fee": Amount(0, self.token_symbol, blockchain_instance=self, json_str=True),
|
|
2020
|
+
"prefix": self.prefix,
|
|
2021
|
+
"json_str": True,
|
|
2022
|
+
}
|
|
2023
|
+
)
|
|
2024
|
+
return self.finalizeOp(op, account, "active", **kwargs)
|
|
2025
|
+
|
|
2026
|
+
def update_proposal_votes(
|
|
2027
|
+
self,
|
|
2028
|
+
proposal_ids: list[int],
|
|
2029
|
+
approve: bool,
|
|
2030
|
+
account: str | Account | None = None,
|
|
2031
|
+
**kwargs: Any,
|
|
2032
|
+
) -> dict[str, Any]:
|
|
2033
|
+
"""Update proposal votes
|
|
2034
|
+
|
|
2035
|
+
:param list proposal_ids: list of proposal ids
|
|
2036
|
+
:param bool approve: True/False
|
|
2037
|
+
:param str account: (optional) witness account name
|
|
2038
|
+
|
|
2039
|
+
|
|
2040
|
+
"""
|
|
2041
|
+
if not account and self.config["default_account"]:
|
|
2042
|
+
account = self.config["default_account"]
|
|
2043
|
+
if not account:
|
|
2044
|
+
raise ValueError("You need to provide an account")
|
|
2045
|
+
|
|
2046
|
+
account = Account(account, blockchain_instance=self) # type: ignore[assignment]
|
|
2047
|
+
if not isinstance(proposal_ids, list):
|
|
2048
|
+
proposal_ids = [proposal_ids]
|
|
2049
|
+
|
|
2050
|
+
op = operations.Update_proposal_votes(
|
|
2051
|
+
**{
|
|
2052
|
+
"voter": account["name"],
|
|
2053
|
+
"proposal_ids": proposal_ids,
|
|
2054
|
+
"approve": approve,
|
|
2055
|
+
"prefix": self.prefix,
|
|
2056
|
+
}
|
|
2057
|
+
)
|
|
2058
|
+
return self.finalizeOp(op, account, "active", **kwargs)
|
|
2059
|
+
|
|
2060
|
+
def _test_weights_treshold(self, authority: dict[str, Any]) -> bool:
|
|
2061
|
+
"""This method raises an error if the threshold of an authority cannot
|
|
2062
|
+
be reached by the weights.
|
|
2063
|
+
|
|
2064
|
+
:param dict authority: An authority of an account
|
|
2065
|
+
:raises ValueError: if the threshold is set too high
|
|
2066
|
+
"""
|
|
2067
|
+
weights = 0
|
|
2068
|
+
for a in authority["account_auths"]:
|
|
2069
|
+
weights += int(a[1])
|
|
2070
|
+
for a in authority["key_auths"]:
|
|
2071
|
+
weights += int(a[1])
|
|
2072
|
+
if authority["weight_threshold"] > weights:
|
|
2073
|
+
raise ValueError("Threshold too restrictive!")
|
|
2074
|
+
if authority["weight_threshold"] == 0:
|
|
2075
|
+
raise ValueError("Cannot have threshold of 0")
|
|
2076
|
+
return True
|
|
2077
|
+
|
|
2078
|
+
def custom_json(
|
|
2079
|
+
self,
|
|
2080
|
+
id: str,
|
|
2081
|
+
json_data: Any,
|
|
2082
|
+
required_auths: list[str] = [],
|
|
2083
|
+
required_posting_auths: list[str] = [],
|
|
2084
|
+
**kwargs,
|
|
2085
|
+
) -> dict[str, Any]:
|
|
2086
|
+
"""
|
|
2087
|
+
Create and submit a Custom_json operation.
|
|
2088
|
+
|
|
2089
|
+
Parameters:
|
|
2090
|
+
id (str): Identifier for the custom JSON (max 32 bytes).
|
|
2091
|
+
json_data: JSON-serializable payload to include in the operation.
|
|
2092
|
+
required_auths (list): Accounts that must authorize with active permission. If non-empty, the operation will be finalized using active permission.
|
|
2093
|
+
required_posting_auths (list): Accounts that must authorize with posting permission. Used when `required_auths` is empty.
|
|
2094
|
+
|
|
2095
|
+
Returns:
|
|
2096
|
+
The result returned by finalizeOp (signed and/or broadcast transaction), which may vary based on the instance configuration (e.g., unsigned, nobroadcast, bundle).
|
|
2097
|
+
|
|
2098
|
+
Raises:
|
|
2099
|
+
Exception: If neither `required_auths` nor `required_posting_auths` contains an account.
|
|
2100
|
+
"""
|
|
2101
|
+
account = None
|
|
2102
|
+
if len(required_auths):
|
|
2103
|
+
account = required_auths[0]
|
|
2104
|
+
elif len(required_posting_auths):
|
|
2105
|
+
account = required_posting_auths[0]
|
|
2106
|
+
else:
|
|
2107
|
+
raise Exception("At least one account needs to be specified")
|
|
2108
|
+
account = Account(account, full=False, blockchain_instance=self)
|
|
2109
|
+
op = operations.Custom_json(
|
|
2110
|
+
**{
|
|
2111
|
+
"json": json_data,
|
|
2112
|
+
"required_auths": required_auths,
|
|
2113
|
+
"required_posting_auths": required_posting_auths,
|
|
2114
|
+
"id": id,
|
|
2115
|
+
"prefix": self.prefix,
|
|
2116
|
+
"appbase": True,
|
|
2117
|
+
}
|
|
2118
|
+
)
|
|
2119
|
+
if len(required_auths) > 0:
|
|
2120
|
+
return self.finalizeOp(op, account, "active", **kwargs)
|
|
2121
|
+
else:
|
|
2122
|
+
return self.finalizeOp(op, account, "posting", **kwargs)
|
|
2123
|
+
|
|
2124
|
+
def post(
|
|
2125
|
+
self,
|
|
2126
|
+
title: str,
|
|
2127
|
+
body: str,
|
|
2128
|
+
author: str | None = None,
|
|
2129
|
+
permlink: str | None = None,
|
|
2130
|
+
reply_identifier: str | None = None,
|
|
2131
|
+
json_metadata: dict[str, Any] | None = None,
|
|
2132
|
+
comment_options: dict[str, Any] | None = None,
|
|
2133
|
+
community: str | None = None,
|
|
2134
|
+
app: str | None = None,
|
|
2135
|
+
tags: list[str] | None = None,
|
|
2136
|
+
beneficiaries: list[dict[str, Any]] | None = None,
|
|
2137
|
+
self_vote: bool = False,
|
|
2138
|
+
parse_body: bool = False,
|
|
2139
|
+
**kwargs,
|
|
2140
|
+
) -> dict[str, Any]:
|
|
2141
|
+
"""Create a new post.
|
|
2142
|
+
If this post is intended as a reply/comment, `reply_identifier` needs
|
|
2143
|
+
to be set with the identifier of the parent post/comment (eg.
|
|
2144
|
+
`@author/permlink`).
|
|
2145
|
+
Optionally you can also set json_metadata, comment_options and upvote
|
|
2146
|
+
the newly created post as an author.
|
|
2147
|
+
Setting category, tags or community will override the values provided
|
|
2148
|
+
in json_metadata and/or comment_options where appropriate.
|
|
2149
|
+
|
|
2150
|
+
:param str title: Title of the post
|
|
2151
|
+
:param str body: Body of the post/comment
|
|
2152
|
+
:param str author: Account are you posting from
|
|
2153
|
+
:param str permlink: Manually set the permlink (defaults to None).
|
|
2154
|
+
If left empty, it will be derived from title automatically.
|
|
2155
|
+
:param str reply_identifier: Identifier of the parent post/comment (only
|
|
2156
|
+
if this post is a reply/comment).
|
|
2157
|
+
:param json_metadata: JSON meta object that can be attached to
|
|
2158
|
+
the post.
|
|
2159
|
+
:type json_metadata: str, dict
|
|
2160
|
+
:param dict comment_options: JSON options object that can be
|
|
2161
|
+
attached to the post.
|
|
2162
|
+
|
|
2163
|
+
Example::
|
|
2164
|
+
|
|
2165
|
+
comment_options = {
|
|
2166
|
+
'max_accepted_payout': '1000000.000 HBD',
|
|
2167
|
+
'percent_hbd': 10000,
|
|
2168
|
+
'allow_votes': True,
|
|
2169
|
+
'allow_curation_rewards': True,
|
|
2170
|
+
'extensions': [[0, {
|
|
2171
|
+
'beneficiaries': [
|
|
2172
|
+
{'account': 'account1', 'weight': 5000},
|
|
2173
|
+
{'account': 'account2', 'weight': 5000},
|
|
2174
|
+
]}
|
|
2175
|
+
]]
|
|
2176
|
+
}
|
|
2177
|
+
|
|
2178
|
+
:param str community: (Optional) Name of the community we are posting
|
|
2179
|
+
into. This will also override the community specified in
|
|
2180
|
+
`json_metadata` and the category
|
|
2181
|
+
:param str app: (Optional) Name of the app which are used for posting
|
|
2182
|
+
when not set, nectar/<version> is used
|
|
2183
|
+
:param tags: (Optional) A list of tags to go with the
|
|
2184
|
+
post. This will also override the tags specified in
|
|
2185
|
+
`json_metadata`. The first tag will be used as a 'category' when community is not specified. If
|
|
2186
|
+
provided as a string, it should be space separated.
|
|
2187
|
+
:type tags: str, list
|
|
2188
|
+
:param list beneficiaries: (Optional) A list of beneficiaries
|
|
2189
|
+
for posting reward distribution. This argument overrides
|
|
2190
|
+
beneficiaries as specified in `comment_options`.
|
|
2191
|
+
|
|
2192
|
+
For example, if we would like to split rewards between account1 and
|
|
2193
|
+
account2::
|
|
2194
|
+
|
|
2195
|
+
beneficiaries = [
|
|
2196
|
+
{'account': 'account1', 'weight': 5000},
|
|
2197
|
+
{'account': 'account2', 'weight': 5000}
|
|
2198
|
+
]
|
|
2199
|
+
|
|
2200
|
+
:param bool self_vote: (Optional) Upvote the post as author, right after
|
|
2201
|
+
posting.
|
|
2202
|
+
:param bool parse_body: (Optional) When set to True, all mentioned users,
|
|
2203
|
+
used links and images are put into users, links and images array inside
|
|
2204
|
+
json_metadata. This will override provided links, images and users inside
|
|
2205
|
+
json_metadata. Hashtags will added to tags until its length is below five entries.
|
|
2206
|
+
|
|
2207
|
+
"""
|
|
2208
|
+
|
|
2209
|
+
# prepare json_metadata
|
|
2210
|
+
json_metadata = json_metadata or {}
|
|
2211
|
+
if isinstance(json_metadata, str):
|
|
2212
|
+
json_metadata = json.loads(json_metadata)
|
|
2213
|
+
|
|
2214
|
+
# override the community
|
|
2215
|
+
if community:
|
|
2216
|
+
json_metadata.update({"community": community})
|
|
2217
|
+
if app:
|
|
2218
|
+
json_metadata.update({"app": app})
|
|
2219
|
+
elif "app" not in json_metadata:
|
|
2220
|
+
json_metadata.update({"app": "nectar/%s" % (nectar_version)})
|
|
2221
|
+
|
|
2222
|
+
if not author and self.config["default_account"]:
|
|
2223
|
+
author = self.config["default_account"]
|
|
2224
|
+
if not author:
|
|
2225
|
+
raise ValueError("You need to provide an account")
|
|
2226
|
+
account = Account(author, blockchain_instance=self)
|
|
2227
|
+
# deal with the category and tags
|
|
2228
|
+
if isinstance(tags, str):
|
|
2229
|
+
tags = list({_f for _f in (re.split(r"[\W_]", tags)) if _f})
|
|
2230
|
+
|
|
2231
|
+
tags = tags or json_metadata.get("tags", [])
|
|
2232
|
+
|
|
2233
|
+
if parse_body:
|
|
2234
|
+
|
|
2235
|
+
def get_urls(mdstring: str) -> list[str]:
|
|
2236
|
+
urls = re.findall(r'http[s]*://[^\s"><\)\(]+', mdstring)
|
|
2237
|
+
return list(dict.fromkeys(urls))
|
|
2238
|
+
|
|
2239
|
+
def get_users(mdstring: str) -> list[str]:
|
|
2240
|
+
"""
|
|
2241
|
+
Extract usernames mentioned in a Markdown string.
|
|
2242
|
+
|
|
2243
|
+
Searches mdstring for @-mentions (ASCII @ or fullwidth @) and returns the usernames found in order of appearance.
|
|
2244
|
+
Usernames must start with a lowercase ASCII letter, may contain lowercase letters, digits, hyphens, dots (including fullwidth dot), and must end with a letter or digit.
|
|
2245
|
+
|
|
2246
|
+
Parameters:
|
|
2247
|
+
mdstring (str): Text to scan for @-mentions.
|
|
2248
|
+
|
|
2249
|
+
Returns:
|
|
2250
|
+
list[str]: List of matched username strings in the order they were found (may contain duplicates).
|
|
2251
|
+
"""
|
|
2252
|
+
users = []
|
|
2253
|
+
for u in re.findall(
|
|
2254
|
+
r"(^|[^a-zA-Z0-9_!#$%&*@@/]|(^|[^a-zA-Z0-9_+~.-/#]))[@@]([a-z][-.a-z\d]+[a-z\d])",
|
|
2255
|
+
mdstring,
|
|
2256
|
+
):
|
|
2257
|
+
users.append(list(u)[-1])
|
|
2258
|
+
return users
|
|
2259
|
+
|
|
2260
|
+
def get_hashtags(mdstring: str) -> list[str]:
|
|
2261
|
+
hashtags = []
|
|
2262
|
+
for t in re.findall(r"(^|\s)(#[-a-z\d]+)", mdstring):
|
|
2263
|
+
hashtags.append(list(t)[-1])
|
|
2264
|
+
return hashtags
|
|
2265
|
+
|
|
2266
|
+
users = []
|
|
2267
|
+
image = []
|
|
2268
|
+
links = []
|
|
2269
|
+
for url in get_urls(body):
|
|
2270
|
+
img_exts = [".jpg", ".png", ".gif", ".svg", ".jpeg"]
|
|
2271
|
+
if os.path.splitext(url)[1].lower() in img_exts:
|
|
2272
|
+
image.append(url)
|
|
2273
|
+
elif url[:25] == "https://images.hive.blog/":
|
|
2274
|
+
image.append(url)
|
|
2275
|
+
else:
|
|
2276
|
+
links.append(url)
|
|
2277
|
+
users = get_users(body)
|
|
2278
|
+
hashtags = get_hashtags(body)
|
|
2279
|
+
users = list(set(users).difference({author}))
|
|
2280
|
+
if len(users) > 0:
|
|
2281
|
+
json_metadata.update({"users": users})
|
|
2282
|
+
if len(image) > 0:
|
|
2283
|
+
json_metadata.update({"image": image})
|
|
2284
|
+
if len(links) > 0:
|
|
2285
|
+
json_metadata.update({"links": links})
|
|
2286
|
+
if len(tags) < 5:
|
|
2287
|
+
for i in range(5 - len(tags)):
|
|
2288
|
+
if len(hashtags) > i:
|
|
2289
|
+
tags.append(hashtags[i])
|
|
2290
|
+
|
|
2291
|
+
if tags:
|
|
2292
|
+
# first tag should be a category
|
|
2293
|
+
if community is None:
|
|
2294
|
+
category = tags[0]
|
|
2295
|
+
else:
|
|
2296
|
+
category = community
|
|
2297
|
+
json_metadata.update({"tags": tags})
|
|
2298
|
+
elif community:
|
|
2299
|
+
category = community
|
|
2300
|
+
else:
|
|
2301
|
+
category = None
|
|
2302
|
+
|
|
2303
|
+
# can't provide a category while replying to a post
|
|
2304
|
+
if reply_identifier and category:
|
|
2305
|
+
category = None
|
|
2306
|
+
|
|
2307
|
+
# deal with replies/categories
|
|
2308
|
+
if reply_identifier:
|
|
2309
|
+
parent_author, parent_permlink = resolve_authorperm(reply_identifier)
|
|
2310
|
+
if not permlink:
|
|
2311
|
+
permlink = derive_permlink(title, parent_permlink)
|
|
2312
|
+
elif category:
|
|
2313
|
+
parent_permlink = sanitize_permlink(category)
|
|
2314
|
+
parent_author = ""
|
|
2315
|
+
if not permlink:
|
|
2316
|
+
permlink = derive_permlink(title)
|
|
2317
|
+
else:
|
|
2318
|
+
parent_author = ""
|
|
2319
|
+
parent_permlink = ""
|
|
2320
|
+
if not permlink:
|
|
2321
|
+
permlink = derive_permlink(title)
|
|
2322
|
+
|
|
2323
|
+
post_op = operations.Comment(
|
|
2324
|
+
**{
|
|
2325
|
+
"parent_author": parent_author.strip(),
|
|
2326
|
+
"parent_permlink": parent_permlink.strip(),
|
|
2327
|
+
"author": account["name"] or "",
|
|
2328
|
+
"permlink": permlink.strip() if permlink else "",
|
|
2329
|
+
"title": title.strip() if title else "",
|
|
2330
|
+
"body": body,
|
|
2331
|
+
"json_metadata": json_metadata,
|
|
2332
|
+
}
|
|
2333
|
+
)
|
|
2334
|
+
ops = [post_op]
|
|
2335
|
+
|
|
2336
|
+
# if comment_options are used, add a new op to the transaction
|
|
2337
|
+
if comment_options or beneficiaries:
|
|
2338
|
+
comment_op = self._build_comment_options_op(
|
|
2339
|
+
account["name"] or "",
|
|
2340
|
+
permlink or "",
|
|
2341
|
+
comment_options or {},
|
|
2342
|
+
beneficiaries or [],
|
|
2343
|
+
)
|
|
2344
|
+
ops.append(comment_op)
|
|
2345
|
+
|
|
2346
|
+
if self_vote:
|
|
2347
|
+
vote_op = operations.Vote(
|
|
2348
|
+
**{
|
|
2349
|
+
"voter": account["name"] or "",
|
|
2350
|
+
"author": account["name"] or "",
|
|
2351
|
+
"permlink": permlink or "",
|
|
2352
|
+
"weight": HIVE_100_PERCENT,
|
|
2353
|
+
}
|
|
2354
|
+
)
|
|
2355
|
+
ops.append(vote_op)
|
|
2356
|
+
|
|
2357
|
+
return self.finalizeOp(ops, account, "posting", **kwargs)
|
|
2358
|
+
|
|
2359
|
+
def vote(
|
|
2360
|
+
self,
|
|
2361
|
+
weight: float,
|
|
2362
|
+
identifier: str,
|
|
2363
|
+
account: str | Account | None = None,
|
|
2364
|
+
**kwargs,
|
|
2365
|
+
) -> dict[str, Any]:
|
|
2366
|
+
"""
|
|
2367
|
+
Cast a vote on a post.
|
|
2368
|
+
|
|
2369
|
+
Parameters:
|
|
2370
|
+
weight (float): Vote weight in percent, range -100.0 to 100.0. This is
|
|
2371
|
+
converted to the chain's internal weight units (multiplied by
|
|
2372
|
+
HIVE_1_PERCENT) and clamped to the allowed range.
|
|
2373
|
+
identifier (str): Post identifier in the form "@author/permlink".
|
|
2374
|
+
account (str, optional): Name of the account to use for voting. If not
|
|
2375
|
+
provided, the instance's `default_account` from config is used. A
|
|
2376
|
+
ValueError is raised if no account can be determined.
|
|
2377
|
+
|
|
2378
|
+
Returns:
|
|
2379
|
+
The result from finalizeOp (operation signing/broadcast buffer or broadcast
|
|
2380
|
+
response) after creating a Vote operation using posting permission.
|
|
2381
|
+
"""
|
|
2382
|
+
if not account:
|
|
2383
|
+
if "default_account" in self.config:
|
|
2384
|
+
account = self.config["default_account"]
|
|
2385
|
+
if not account:
|
|
2386
|
+
raise ValueError("You need to provide an account")
|
|
2387
|
+
account = Account(account, blockchain_instance=self) # type: ignore[assignment]
|
|
2388
|
+
|
|
2389
|
+
[post_author, post_permlink] = resolve_authorperm(identifier)
|
|
2390
|
+
|
|
2391
|
+
vote_weight = int(float(weight) * HIVE_1_PERCENT)
|
|
2392
|
+
if vote_weight > HIVE_100_PERCENT:
|
|
2393
|
+
vote_weight = HIVE_100_PERCENT
|
|
2394
|
+
if vote_weight < -HIVE_100_PERCENT:
|
|
2395
|
+
vote_weight = -HIVE_100_PERCENT
|
|
2396
|
+
|
|
2397
|
+
op = operations.Vote(
|
|
2398
|
+
**{
|
|
2399
|
+
"voter": account["name"] or "",
|
|
2400
|
+
"author": post_author,
|
|
2401
|
+
"permlink": post_permlink,
|
|
2402
|
+
"weight": vote_weight,
|
|
2403
|
+
}
|
|
2404
|
+
)
|
|
2405
|
+
|
|
2406
|
+
return self.finalizeOp(op, account, "posting", **kwargs)
|
|
2407
|
+
|
|
2408
|
+
def comment_options(
|
|
2409
|
+
self,
|
|
2410
|
+
options: dict[str, Any],
|
|
2411
|
+
identifier: str,
|
|
2412
|
+
beneficiaries: list[dict[str, Any]] = [],
|
|
2413
|
+
account: str | Account | None = None,
|
|
2414
|
+
**kwargs: Any,
|
|
2415
|
+
) -> dict[str, Any]:
|
|
2416
|
+
"""
|
|
2417
|
+
Set comment/post options for a post (Comment_options operation) and submit the operation.
|
|
2418
|
+
|
|
2419
|
+
Parameters:
|
|
2420
|
+
options (dict): Comment options to set. Common keys include:
|
|
2421
|
+
- max_accepted_payout (str): e.g. "1000000.000 HBD"
|
|
2422
|
+
- percent_hbd (int): e.g. 10000 for 100%
|
|
2423
|
+
- allow_votes (bool)
|
|
2424
|
+
- allow_curation_rewards (bool)
|
|
2425
|
+
Other valid keys accepted by the chain's Comment_options operation are supported.
|
|
2426
|
+
identifier (str): Post identifier in the form "author/permlink" or a permlink for the default author.
|
|
2427
|
+
beneficiaries (list): Optional list of beneficiaries (each entry typically a dict with `account` and `weight`).
|
|
2428
|
+
account (str): Account that authorizes this operation; defaults to the instance's `default_account` if not provided.
|
|
2429
|
+
**kwargs: Additional keyword arguments forwarded to finalizeOp (e.g., broadcast/signing options).
|
|
2430
|
+
|
|
2431
|
+
Returns:
|
|
2432
|
+
The result of finalizeOp for the created Comment_options operation (signed/broadcasted transaction or unsigned buffer), depending on instance configuration.
|
|
2433
|
+
|
|
2434
|
+
Raises:
|
|
2435
|
+
ValueError: If no account is provided and no default account is configured.
|
|
2436
|
+
"""
|
|
2437
|
+
if not account and self.config["default_account"]:
|
|
2438
|
+
account = self.config["default_account"]
|
|
2439
|
+
if not account:
|
|
2440
|
+
raise ValueError("You need to provide an account")
|
|
2441
|
+
account = Account(account, blockchain_instance=self) # type: ignore[assignment]
|
|
2442
|
+
author, permlink = resolve_authorperm(identifier)
|
|
2443
|
+
op = self._build_comment_options_op(author, permlink, options, beneficiaries)
|
|
2444
|
+
return self.finalizeOp(op, account, "posting", **kwargs)
|
|
2445
|
+
|
|
2446
|
+
def _build_comment_options_op(
|
|
2447
|
+
self,
|
|
2448
|
+
author: str,
|
|
2449
|
+
permlink: str,
|
|
2450
|
+
options: dict[str, Any],
|
|
2451
|
+
beneficiaries: list[dict[str, Any]],
|
|
2452
|
+
) -> Any:
|
|
2453
|
+
"""
|
|
2454
|
+
Build and return a Comment_options operation for a post, validating and normalizing provided options and beneficiaries.
|
|
2455
|
+
|
|
2456
|
+
Parameters:
|
|
2457
|
+
author (str): The post author's account name.
|
|
2458
|
+
permlink (str): The permlink of the post to set options for.
|
|
2459
|
+
options (dict): Optional comment options; supported keys include
|
|
2460
|
+
"max_accepted_payout", "percent_hbd", "allow_votes",
|
|
2461
|
+
"allow_curation_rewards", and "extensions". Keys not listed are removed.
|
|
2462
|
+
beneficiaries (list): Optional list of beneficiary dicts, each with
|
|
2463
|
+
"account" (str) and optional "weight" (int, 1..HIVE_100_PERCENT). If provided,
|
|
2464
|
+
beneficiaries override any beneficiaries in `options`.
|
|
2465
|
+
|
|
2466
|
+
Returns:
|
|
2467
|
+
operations.Comment_options: A Comment_options operation ready to be appended to a transaction.
|
|
2468
|
+
|
|
2469
|
+
Raises:
|
|
2470
|
+
ValueError: If a beneficiary is missing the "account" field, has an account name
|
|
2471
|
+
longer than 16 characters, has an invalid weight (not in 1..HIVE_100_PERCENT),
|
|
2472
|
+
or if the sum of beneficiary weights exceeds HIVE_100_PERCENT.
|
|
2473
|
+
"""
|
|
2474
|
+
options = remove_from_dict(
|
|
2475
|
+
options or {},
|
|
2476
|
+
[
|
|
2477
|
+
"max_accepted_payout",
|
|
2478
|
+
"percent_hbd",
|
|
2479
|
+
"allow_votes",
|
|
2480
|
+
"allow_curation_rewards",
|
|
2481
|
+
"extensions",
|
|
2482
|
+
],
|
|
2483
|
+
keep_keys=True,
|
|
2484
|
+
)
|
|
2485
|
+
# override beneficiaries extension
|
|
2486
|
+
if beneficiaries:
|
|
2487
|
+
# validate schema
|
|
2488
|
+
# or just simply vo.Schema([{'account': str, 'weight': int}])
|
|
2489
|
+
|
|
2490
|
+
weight_sum = 0
|
|
2491
|
+
for b in beneficiaries:
|
|
2492
|
+
if "account" not in b:
|
|
2493
|
+
raise ValueError("beneficiaries need an account field!")
|
|
2494
|
+
if "weight" not in b:
|
|
2495
|
+
b["weight"] = HIVE_100_PERCENT
|
|
2496
|
+
if len(b["account"]) > 16:
|
|
2497
|
+
raise ValueError("beneficiaries error, account name length >16!")
|
|
2498
|
+
if b["weight"] < 1 or b["weight"] > HIVE_100_PERCENT:
|
|
2499
|
+
raise ValueError("beneficiaries error, 1<=weight<=%s!" % (HIVE_100_PERCENT))
|
|
2500
|
+
weight_sum += b["weight"]
|
|
2501
|
+
|
|
2502
|
+
if weight_sum > HIVE_100_PERCENT:
|
|
2503
|
+
raise ValueError("beneficiaries exceed total weight limit %s" % HIVE_100_PERCENT)
|
|
2504
|
+
|
|
2505
|
+
options["beneficiaries"] = beneficiaries
|
|
2506
|
+
|
|
2507
|
+
default_max_payout = Amount(
|
|
2508
|
+
"1000000.000 %s" % (self.backed_token_symbol), blockchain_instance=self
|
|
2509
|
+
)
|
|
2510
|
+
comment_op = operations.Comment_options(
|
|
2511
|
+
**{
|
|
2512
|
+
"author": author,
|
|
2513
|
+
"permlink": permlink,
|
|
2514
|
+
"max_accepted_payout": options.get("max_accepted_payout", default_max_payout),
|
|
2515
|
+
"percent_hbd": int(options.get("percent_hbd", HIVE_100_PERCENT)),
|
|
2516
|
+
"allow_votes": options.get("allow_votes", True),
|
|
2517
|
+
"allow_curation_rewards": options.get("allow_curation_rewards", True),
|
|
2518
|
+
"extensions": options.get("extensions", []),
|
|
2519
|
+
"beneficiaries": options.get("beneficiaries", []),
|
|
2520
|
+
"prefix": self.prefix,
|
|
2521
|
+
}
|
|
2522
|
+
)
|
|
2523
|
+
return comment_op
|
|
2524
|
+
|
|
2525
|
+
def get_api_methods(self) -> list[str]:
|
|
2526
|
+
"""
|
|
2527
|
+
Return the list of all JSON-RPC API methods supported by the connected node.
|
|
2528
|
+
|
|
2529
|
+
Returns:
|
|
2530
|
+
list: Method names (strings) provided by the node's JSON-RPC API.
|
|
2531
|
+
"""
|
|
2532
|
+
if self.rpc is None:
|
|
2533
|
+
raise RuntimeError("RPC connection not established")
|
|
2534
|
+
return self.rpc.get_methods()
|
|
2535
|
+
|
|
2536
|
+
def get_apis(self) -> list[str]:
|
|
2537
|
+
"""Returns all enabled apis"""
|
|
2538
|
+
api_methods = self.get_api_methods()
|
|
2539
|
+
api_list = []
|
|
2540
|
+
for a in api_methods:
|
|
2541
|
+
api = a.split(".")[0]
|
|
2542
|
+
if api not in api_list:
|
|
2543
|
+
api_list.append(api)
|
|
2544
|
+
return api_list
|
|
2545
|
+
|
|
2546
|
+
def _get_asset_symbol(self, asset_id: int) -> str:
|
|
2547
|
+
"""
|
|
2548
|
+
Return the asset symbol for a given asset id.
|
|
2549
|
+
|
|
2550
|
+
Asset ids are looked up in self.chain_params["chain_assets"]. Common mappings include
|
|
2551
|
+
0 -> HBD, 1 -> HIVE, 2 -> VESTS.
|
|
2552
|
+
|
|
2553
|
+
Parameters:
|
|
2554
|
+
asset_id (int): Numeric asset id as used in chain_params.
|
|
2555
|
+
|
|
2556
|
+
Returns:
|
|
2557
|
+
str: The asset symbol for the provided id.
|
|
2558
|
+
|
|
2559
|
+
Raises:
|
|
2560
|
+
KeyError: If the asset id is not present in self.chain_params["chain_assets"].
|
|
2561
|
+
"""
|
|
2562
|
+
for asset in self.chain_params["chain_assets"]:
|
|
2563
|
+
if asset["id"] == asset_id:
|
|
2564
|
+
return asset["symbol"]
|
|
2565
|
+
|
|
2566
|
+
raise KeyError("asset ID not found in chain assets")
|
|
2567
|
+
|
|
2568
|
+
@property
|
|
2569
|
+
def backed_token_symbol(self) -> str:
|
|
2570
|
+
"""
|
|
2571
|
+
Return the symbol for the chain's backed asset (HBD-like).
|
|
2572
|
+
|
|
2573
|
+
Attempts to read the asset symbol at asset id 0 (typical HBD). If that key is missing, falls back to asset id 1 (main token) and returns that symbol. Returns a string (e.g., "HBD", "TBD", or the chain's main token symbol). May propagate KeyError if neither asset id is available.
|
|
2574
|
+
"""
|
|
2575
|
+
# some networks (e.g. whaleshares) do not have HBD
|
|
2576
|
+
try:
|
|
2577
|
+
symbol = self._get_asset_symbol(0)
|
|
2578
|
+
except KeyError:
|
|
2579
|
+
symbol = self._get_asset_symbol(1)
|
|
2580
|
+
return symbol
|
|
2581
|
+
|
|
2582
|
+
@property
|
|
2583
|
+
def token_symbol(self) -> str:
|
|
2584
|
+
"""get the current chains symbol for HIVE (e.g. "TESTS" on testnet)"""
|
|
2585
|
+
return self._get_asset_symbol(1)
|
|
2586
|
+
|
|
2587
|
+
@property
|
|
2588
|
+
def vest_token_symbol(self) -> str:
|
|
2589
|
+
"""get the current chains symbol for VESTS"""
|
|
2590
|
+
return self._get_asset_symbol(2)
|