dao-treasury 0.0.10__cp310-cp310-win32.whl → 0.0.70__cp310-cp310-win32.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dao_treasury/.grafana/provisioning/dashboards/breakdowns/Expenses.json +551 -0
- dao_treasury/.grafana/provisioning/dashboards/breakdowns/Revenue.json +551 -0
- dao_treasury/.grafana/provisioning/dashboards/dashboards.yaml +7 -7
- dao_treasury/.grafana/provisioning/dashboards/streams/LlamaPay.json +220 -0
- dao_treasury/.grafana/provisioning/dashboards/summary/Monthly.json +153 -29
- dao_treasury/.grafana/provisioning/dashboards/transactions/Treasury Transactions.json +181 -29
- dao_treasury/.grafana/provisioning/dashboards/treasury/Cashflow (Including Unsorted).json +808 -0
- dao_treasury/.grafana/provisioning/dashboards/treasury/Cashflow.json +602 -0
- dao_treasury/.grafana/provisioning/dashboards/treasury/Current Treasury Assets.json +981 -0
- dao_treasury/.grafana/provisioning/dashboards/treasury/Historical Treasury Balances.json +2989 -0
- dao_treasury/.grafana/provisioning/dashboards/treasury/Operating Cashflow.json +478 -0
- dao_treasury/.grafana/provisioning/datasources/datasources.yaml +17 -0
- dao_treasury/ENVIRONMENT_VARIABLES.py +20 -0
- dao_treasury/__init__.py +36 -10
- dao_treasury/_docker.cp310-win32.pyd +0 -0
- dao_treasury/_docker.py +169 -37
- dao_treasury/_nicknames.cp310-win32.pyd +0 -0
- dao_treasury/_nicknames.py +32 -0
- dao_treasury/_wallet.cp310-win32.pyd +0 -0
- dao_treasury/_wallet.py +164 -12
- dao_treasury/constants.cp310-win32.pyd +0 -0
- dao_treasury/constants.py +39 -0
- dao_treasury/db.py +925 -150
- dao_treasury/docker-compose.yaml +6 -5
- dao_treasury/main.py +238 -28
- dao_treasury/sorting/__init__.cp310-win32.pyd +0 -0
- dao_treasury/sorting/__init__.py +219 -115
- dao_treasury/sorting/_matchers.cp310-win32.pyd +0 -0
- dao_treasury/sorting/_matchers.py +261 -17
- dao_treasury/sorting/_rules.cp310-win32.pyd +0 -0
- dao_treasury/sorting/_rules.py +166 -21
- dao_treasury/sorting/factory.cp310-win32.pyd +0 -0
- dao_treasury/sorting/factory.py +245 -37
- dao_treasury/sorting/rule.cp310-win32.pyd +0 -0
- dao_treasury/sorting/rule.py +228 -46
- dao_treasury/sorting/rules/__init__.cp310-win32.pyd +0 -0
- dao_treasury/sorting/rules/__init__.py +1 -0
- dao_treasury/sorting/rules/ignore/__init__.cp310-win32.pyd +0 -0
- dao_treasury/sorting/rules/ignore/__init__.py +1 -0
- dao_treasury/sorting/rules/ignore/llamapay.cp310-win32.pyd +0 -0
- dao_treasury/sorting/rules/ignore/llamapay.py +20 -0
- dao_treasury/streams/__init__.cp310-win32.pyd +0 -0
- dao_treasury/streams/__init__.py +0 -0
- dao_treasury/streams/llamapay.cp310-win32.pyd +0 -0
- dao_treasury/streams/llamapay.py +388 -0
- dao_treasury/treasury.py +118 -25
- dao_treasury/types.cp310-win32.pyd +0 -0
- dao_treasury/types.py +104 -7
- dao_treasury-0.0.70.dist-info/METADATA +134 -0
- dao_treasury-0.0.70.dist-info/RECORD +54 -0
- dao_treasury-0.0.70.dist-info/top_level.txt +2 -0
- dao_treasury__mypyc.cp310-win32.pyd +0 -0
- a743a720bbc4482d330e__mypyc.cp310-win32.pyd +0 -0
- dao_treasury/.grafana/provisioning/datasources/sqlite.yaml +0 -10
- dao_treasury-0.0.10.dist-info/METADATA +0 -36
- dao_treasury-0.0.10.dist-info/RECORD +0 -28
- dao_treasury-0.0.10.dist-info/top_level.txt +0 -2
- {dao_treasury-0.0.10.dist-info → dao_treasury-0.0.70.dist-info}/WHEEL +0 -0
dao_treasury/db.py
CHANGED
|
@@ -1,27 +1,61 @@
|
|
|
1
1
|
# mypy: disable-error-code="operator,valid-type,misc"
|
|
2
|
+
"""
|
|
3
|
+
Database models and utilities for DAO treasury reporting.
|
|
4
|
+
|
|
5
|
+
This module defines Pony ORM entities for:
|
|
6
|
+
|
|
7
|
+
- Blockchain networks (:class:`Chain`)
|
|
8
|
+
- On-chain addresses (:class:`Address`)
|
|
9
|
+
- ERC-20 tokens and native coin placeholder (:class:`Token`)
|
|
10
|
+
- Hierarchical transaction grouping (:class:`TxGroup`)
|
|
11
|
+
- Treasury transaction records (:class:`TreasuryTx`)
|
|
12
|
+
- Streams and StreamedFunds for streaming payments
|
|
13
|
+
|
|
14
|
+
It also provides helper functions for inserting ledger entries,
|
|
15
|
+
resolving integrity conflicts, caching transaction receipts,
|
|
16
|
+
and creating SQL views for reporting.
|
|
17
|
+
"""
|
|
18
|
+
|
|
2
19
|
import typing
|
|
3
20
|
from asyncio import Semaphore
|
|
21
|
+
from collections import OrderedDict
|
|
4
22
|
from decimal import Decimal, InvalidOperation
|
|
5
23
|
from functools import lru_cache
|
|
6
24
|
from logging import getLogger
|
|
7
25
|
from os import path
|
|
8
26
|
from pathlib import Path
|
|
9
|
-
from typing import
|
|
27
|
+
from typing import (
|
|
28
|
+
TYPE_CHECKING,
|
|
29
|
+
Any,
|
|
30
|
+
Coroutine,
|
|
31
|
+
Dict,
|
|
32
|
+
Final,
|
|
33
|
+
Literal,
|
|
34
|
+
Tuple,
|
|
35
|
+
Union,
|
|
36
|
+
final,
|
|
37
|
+
overload,
|
|
38
|
+
)
|
|
39
|
+
from datetime import date, datetime, time, timezone
|
|
10
40
|
|
|
41
|
+
import eth_portfolio
|
|
11
42
|
from a_sync import AsyncThreadPoolExecutor
|
|
12
43
|
from brownie import chain
|
|
13
44
|
from brownie.convert.datatypes import HexString
|
|
14
|
-
from brownie.
|
|
45
|
+
from brownie.exceptions import EventLookupError
|
|
46
|
+
from brownie.network.event import EventDict, _EventItem
|
|
15
47
|
from brownie.network.transaction import TransactionReceipt
|
|
16
|
-
from eth_typing import ChecksumAddress, HexAddress
|
|
17
48
|
from eth_portfolio.structs import (
|
|
18
49
|
InternalTransfer,
|
|
19
50
|
LedgerEntry,
|
|
20
51
|
TokenTransfer,
|
|
21
52
|
Transaction,
|
|
22
53
|
)
|
|
54
|
+
from eth_retry import auto_retry
|
|
55
|
+
from eth_typing import ChecksumAddress, HexAddress, HexStr
|
|
23
56
|
from pony.orm import (
|
|
24
57
|
Database,
|
|
58
|
+
InterfaceError,
|
|
25
59
|
Optional,
|
|
26
60
|
PrimaryKey,
|
|
27
61
|
Required,
|
|
@@ -31,22 +65,32 @@ from pony.orm import (
|
|
|
31
65
|
composite_key,
|
|
32
66
|
composite_index,
|
|
33
67
|
db_session,
|
|
68
|
+
select,
|
|
34
69
|
)
|
|
35
70
|
from y import EEE_ADDRESS, Contract, Network, convert, get_block_timestamp_async
|
|
36
|
-
from y.
|
|
71
|
+
from y._db.decorators import retry_locked
|
|
37
72
|
from y.contracts import _get_code
|
|
38
73
|
from y.exceptions import ContractNotVerified
|
|
39
74
|
|
|
75
|
+
from dao_treasury.constants import CHAINID
|
|
40
76
|
from dao_treasury.types import TxGroupDbid, TxGroupName
|
|
41
77
|
|
|
42
78
|
|
|
79
|
+
EventItem = _EventItem[_EventItem[OrderedDict[str, Any]]]
|
|
80
|
+
|
|
81
|
+
|
|
43
82
|
SQLITE_DIR = Path(path.expanduser("~")) / ".dao-treasury"
|
|
83
|
+
"""Path to the directory in the user's home where the DAO treasury SQLite database is stored."""
|
|
84
|
+
|
|
44
85
|
SQLITE_DIR.mkdir(parents=True, exist_ok=True)
|
|
45
86
|
|
|
46
87
|
|
|
47
88
|
_INSERT_THREAD = AsyncThreadPoolExecutor(1)
|
|
89
|
+
_SORT_THREAD = AsyncThreadPoolExecutor(1)
|
|
90
|
+
_EVENTS_THREADS = AsyncThreadPoolExecutor(16)
|
|
48
91
|
_SORT_SEMAPHORE = Semaphore(50)
|
|
49
92
|
|
|
93
|
+
_UTC = timezone.utc
|
|
50
94
|
|
|
51
95
|
db = Database()
|
|
52
96
|
|
|
@@ -55,7 +99,14 @@ logger = getLogger("dao_treasury.db")
|
|
|
55
99
|
|
|
56
100
|
@final
|
|
57
101
|
class BadToken(ValueError):
|
|
58
|
-
|
|
102
|
+
"""Raised when a token contract returns invalid metadata.
|
|
103
|
+
|
|
104
|
+
This exception is thrown if the token name or symbol is empty
|
|
105
|
+
or cannot be decoded.
|
|
106
|
+
|
|
107
|
+
Examples:
|
|
108
|
+
>>> raise BadToken("symbol for 0x0 is ''")
|
|
109
|
+
"""
|
|
59
110
|
|
|
60
111
|
|
|
61
112
|
# makes type checking work, see below for info:
|
|
@@ -65,35 +116,70 @@ DbEntity = db.Entity
|
|
|
65
116
|
|
|
66
117
|
@final
|
|
67
118
|
class Chain(DbEntity):
|
|
119
|
+
"""Pony ORM entity representing a blockchain network.
|
|
120
|
+
|
|
121
|
+
Stores human-readable network names and numeric chain IDs for reporting.
|
|
122
|
+
|
|
123
|
+
Examples:
|
|
124
|
+
>>> Chain.get_dbid(1) # Ethereum Mainnet
|
|
125
|
+
1
|
|
126
|
+
|
|
127
|
+
See Also:
|
|
128
|
+
:meth:`get_or_insert`
|
|
129
|
+
"""
|
|
130
|
+
|
|
68
131
|
_table_ = "chains"
|
|
132
|
+
|
|
69
133
|
chain_dbid = PrimaryKey(int, auto=True)
|
|
134
|
+
"""Auto-incremented primary key for the chains table."""
|
|
70
135
|
|
|
71
136
|
chain_name = Required(str, unique=True)
|
|
137
|
+
"""Name of the blockchain network, e.g., 'Mainnet', 'Polygon'."""
|
|
138
|
+
|
|
72
139
|
chainid = Required(int, unique=True)
|
|
140
|
+
"""Numeric chain ID matching the connected RPC via :data:`~y.constants.CHAINID`."""
|
|
73
141
|
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
tokens: Set["Token"]
|
|
77
|
-
treasury_txs: Set["TreasuryTx"]
|
|
78
|
-
|
|
79
|
-
addresses = Set("Address", reverse="chain")
|
|
80
|
-
tokens = Set("Token", reverse="chain")
|
|
81
|
-
treasury_txs = Set("TreasuryTx")
|
|
82
|
-
# partners_txs = Set("PartnerHarvestEvent")
|
|
142
|
+
addresses = Set("Address", reverse="chain", lazy=True)
|
|
143
|
+
"""Relationship to address records on this chain."""
|
|
83
144
|
|
|
84
|
-
|
|
145
|
+
tokens = Set("Token", reverse="chain", lazy=True)
|
|
146
|
+
"""Relationship to token records on this chain."""
|
|
147
|
+
|
|
148
|
+
treasury_txs = Set("TreasuryTx", lazy=True)
|
|
149
|
+
"""Relationship to treasury transactions on this chain."""
|
|
150
|
+
|
|
151
|
+
@staticmethod
|
|
85
152
|
@lru_cache(maxsize=None)
|
|
86
|
-
def get_dbid(
|
|
153
|
+
def get_dbid(chainid: int = CHAINID) -> int:
|
|
154
|
+
"""Get or create the record for `chainid` and return its database ID.
|
|
155
|
+
|
|
156
|
+
Args:
|
|
157
|
+
chainid: Numeric chain identifier (default uses active RPC via :data:`~y.constants.CHAINID`).
|
|
158
|
+
|
|
159
|
+
Examples:
|
|
160
|
+
>>> Chain.get_dbid(1)
|
|
161
|
+
1
|
|
162
|
+
"""
|
|
87
163
|
with db_session:
|
|
88
|
-
return
|
|
164
|
+
return Chain.get_or_insert(chainid).chain_dbid # type: ignore [no-any-return]
|
|
89
165
|
|
|
90
|
-
@
|
|
91
|
-
def get_or_insert(
|
|
92
|
-
|
|
166
|
+
@staticmethod
|
|
167
|
+
def get_or_insert(chainid: int) -> "Chain":
|
|
168
|
+
"""Insert a new chain record if it does not exist.
|
|
169
|
+
|
|
170
|
+
Args:
|
|
171
|
+
chainid: Numeric chain identifier.
|
|
172
|
+
|
|
173
|
+
Examples:
|
|
174
|
+
>>> chain = Chain.get_or_insert(1)
|
|
175
|
+
>>> chain.chain_name
|
|
176
|
+
'Mainnet'
|
|
177
|
+
"""
|
|
178
|
+
entity = Chain.get(chainid=chainid) or Chain(
|
|
93
179
|
chain_name=Network.name(chainid),
|
|
94
180
|
chainid=chainid,
|
|
95
181
|
# TODO: either remove this or implement it when the dash pieces are together
|
|
96
|
-
#victoria_metrics_label=Network.label(chainid),
|
|
182
|
+
# victoria_metrics_label=Network.label(chainid),
|
|
97
183
|
)
|
|
98
184
|
commit()
|
|
99
185
|
return entity
|
|
@@ -101,13 +187,35 @@ class Chain(DbEntity):
|
|
|
101
187
|
|
|
102
188
|
@final
|
|
103
189
|
class Address(DbEntity):
|
|
190
|
+
"""Pony ORM entity representing an on-chain address.
|
|
191
|
+
|
|
192
|
+
Records both contract and externally owned addresses for tracing funds.
|
|
193
|
+
|
|
194
|
+
Examples:
|
|
195
|
+
>>> Address.get_dbid("0x0000000000000000000000000000000000000000")
|
|
196
|
+
1
|
|
197
|
+
|
|
198
|
+
See Also:
|
|
199
|
+
:meth:`get_or_insert`
|
|
200
|
+
"""
|
|
201
|
+
|
|
104
202
|
_table_ = "addresses"
|
|
203
|
+
|
|
105
204
|
address_id = PrimaryKey(int, auto=True)
|
|
106
|
-
|
|
205
|
+
"""Auto-incremented primary key for the addresses table."""
|
|
206
|
+
|
|
207
|
+
chain = Required(Chain, reverse="addresses", lazy=True)
|
|
208
|
+
"""Reference to the chain on which this address resides."""
|
|
107
209
|
|
|
108
210
|
address = Required(str, index=True)
|
|
211
|
+
"""Checksum string of the on-chain address."""
|
|
212
|
+
|
|
109
213
|
nickname = Optional(str)
|
|
110
|
-
|
|
214
|
+
"""Optional human-readable label (e.g., contract name or token name)."""
|
|
215
|
+
|
|
216
|
+
is_contract = Required(bool, index=True, lazy=True)
|
|
217
|
+
"""Flag indicating whether the address is a smart contract."""
|
|
218
|
+
|
|
111
219
|
composite_key(address, chain)
|
|
112
220
|
composite_index(is_contract, chain)
|
|
113
221
|
|
|
@@ -115,73 +223,145 @@ class Address(DbEntity):
|
|
|
115
223
|
token: Optional["Token"]
|
|
116
224
|
treasury_tx_from: Set["TreasuryTx"]
|
|
117
225
|
treasury_tx_to: Set["TreasuryTx"]
|
|
118
|
-
|
|
119
|
-
token = Optional("Token", index=True)
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
226
|
+
|
|
227
|
+
token = Optional("Token", index=True, lazy=True)
|
|
228
|
+
"""Optional back-reference to a Token if this address is one."""
|
|
229
|
+
# partners_tx = Set('PartnerHarvestEvent', reverse='wrapper', lazy=True)
|
|
230
|
+
|
|
231
|
+
treasury_tx_from = Set("TreasuryTx", reverse="from_address", lazy=True)
|
|
232
|
+
"""Inverse relation for transactions sent from this address."""
|
|
233
|
+
|
|
234
|
+
treasury_tx_to = Set("TreasuryTx", reverse="to_address", lazy=True)
|
|
235
|
+
"""Inverse relation for transactions sent to this address."""
|
|
236
|
+
|
|
237
|
+
streams_from = Set("Stream", reverse="from_address", lazy=True)
|
|
238
|
+
streams_to = Set("Stream", reverse="to_address", lazy=True)
|
|
239
|
+
streams = Set("Stream", reverse="contract", lazy=True)
|
|
240
|
+
# vesting_escrows = Set("VestingEscrow", reverse="address", lazy=True)
|
|
241
|
+
# vests_received = Set("VestingEscrow", reverse="recipient", lazy=True)
|
|
242
|
+
# vests_funded = Set("VestingEscrow", reverse="funder", lazy=True)
|
|
243
|
+
|
|
244
|
+
def __eq__(self, other: Union["Address", ChecksumAddress, "Token"]) -> bool: # type: ignore [override]
|
|
132
245
|
if isinstance(other, str):
|
|
133
246
|
return CHAINID == self.chain.chainid and other == self.address
|
|
247
|
+
elif isinstance(other, Token):
|
|
248
|
+
return self.address_id == other.address.address_id
|
|
134
249
|
return super().__eq__(other)
|
|
135
|
-
|
|
250
|
+
|
|
136
251
|
__hash__ = DbEntity.__hash__
|
|
137
252
|
|
|
138
|
-
@
|
|
253
|
+
@property
|
|
254
|
+
def contract(self) -> Contract:
|
|
255
|
+
return Contract(self.address)
|
|
256
|
+
|
|
257
|
+
@property
|
|
258
|
+
def contract_coro(self) -> Coroutine[Any, Any, Contract]:
|
|
259
|
+
return Contract.coroutine(self.address)
|
|
260
|
+
|
|
261
|
+
@staticmethod
|
|
139
262
|
@lru_cache(maxsize=None)
|
|
140
|
-
def get_dbid(
|
|
263
|
+
def get_dbid(address: HexAddress) -> int:
|
|
264
|
+
"""Get the DB ID for an address, inserting if necessary.
|
|
265
|
+
|
|
266
|
+
Args:
|
|
267
|
+
address: Hex string of the address (any case, any prefix).
|
|
268
|
+
|
|
269
|
+
Examples:
|
|
270
|
+
>>> Address.get_dbid("0x0000000000000000000000000000000000000000")
|
|
271
|
+
1
|
|
272
|
+
"""
|
|
141
273
|
with db_session:
|
|
142
|
-
return
|
|
274
|
+
return Address.get_or_insert(address).address_id # type: ignore [no-any-return]
|
|
143
275
|
|
|
144
|
-
@
|
|
145
|
-
def get_or_insert(
|
|
276
|
+
@staticmethod
|
|
277
|
+
def get_or_insert(address: HexAddress) -> "Address":
|
|
278
|
+
"""Insert or fetch an :class:`~dao_treasury.db.Address` for `address`.
|
|
279
|
+
|
|
280
|
+
If the address has on-chain code, attempts to label it using
|
|
281
|
+
the verified contract name or fallback label.
|
|
282
|
+
|
|
283
|
+
Args:
|
|
284
|
+
address: Hex address string.
|
|
285
|
+
|
|
286
|
+
Examples:
|
|
287
|
+
>>> addr = Address.get_or_insert("0x0000000000000000000000000000000000000000")
|
|
288
|
+
>>> addr.is_contract
|
|
289
|
+
False
|
|
290
|
+
"""
|
|
146
291
|
checksum_address = convert.to_address(address)
|
|
147
292
|
chain_dbid = Chain.get_dbid()
|
|
148
|
-
|
|
293
|
+
|
|
149
294
|
if entity := Address.get(chain=chain_dbid, address=checksum_address):
|
|
150
295
|
return entity # type: ignore [no-any-return]
|
|
151
|
-
|
|
152
|
-
if _get_code(
|
|
296
|
+
|
|
297
|
+
if _get_code(checksum_address, None).hex().removeprefix("0x"):
|
|
153
298
|
try:
|
|
154
|
-
nickname =
|
|
155
|
-
|
|
156
|
-
|
|
299
|
+
nickname = (
|
|
300
|
+
f"Contract: {Contract(checksum_address)._build['contractName']}"
|
|
301
|
+
)
|
|
302
|
+
except ContractNotVerified:
|
|
303
|
+
nickname = f"Non-Verified Contract: {checksum_address}"
|
|
157
304
|
|
|
158
305
|
entity = Address(
|
|
159
|
-
chain=chain_dbid,
|
|
306
|
+
chain=chain_dbid,
|
|
160
307
|
address=checksum_address,
|
|
161
308
|
nickname=nickname,
|
|
162
|
-
is_contract=
|
|
309
|
+
is_contract=True,
|
|
163
310
|
)
|
|
164
311
|
|
|
165
312
|
else:
|
|
166
313
|
|
|
167
314
|
entity = Address(
|
|
168
|
-
chain=chain_dbid,
|
|
315
|
+
chain=chain_dbid,
|
|
169
316
|
address=checksum_address,
|
|
170
317
|
is_contract=False,
|
|
171
318
|
)
|
|
172
319
|
|
|
173
320
|
commit()
|
|
174
|
-
|
|
175
321
|
return entity # type: ignore [no-any-return]
|
|
176
322
|
|
|
323
|
+
@staticmethod
|
|
324
|
+
def set_nickname(address: HexAddress, nickname: str) -> None:
|
|
325
|
+
if not nickname:
|
|
326
|
+
raise ValueError("You must provide an actual string")
|
|
327
|
+
with db_session:
|
|
328
|
+
entity = Address.get_or_insert(address)
|
|
329
|
+
if entity.nickname == nickname:
|
|
330
|
+
return
|
|
331
|
+
if entity.nickname:
|
|
332
|
+
old = entity.nickname
|
|
333
|
+
entity.nickname = nickname
|
|
334
|
+
commit()
|
|
335
|
+
logger.info(
|
|
336
|
+
"%s nickname changed from %s to %s", entity.address, old, nickname
|
|
337
|
+
)
|
|
338
|
+
else:
|
|
339
|
+
entity.nickname = nickname
|
|
340
|
+
commit()
|
|
341
|
+
logger.info("%s nickname set to %s", entity.address, nickname)
|
|
342
|
+
|
|
343
|
+
@staticmethod
|
|
344
|
+
def set_nicknames(nicknames: Dict[HexAddress, str]) -> None:
|
|
345
|
+
with db_session:
|
|
346
|
+
for address, nickname in nicknames.items():
|
|
347
|
+
Address.set_nickname(address, nickname)
|
|
348
|
+
|
|
177
349
|
|
|
178
350
|
UNI_V3_POS: Final = {
|
|
179
351
|
Network.Mainnet: "0xC36442b4a4522E871399CD717aBDD847Ab11FE88",
|
|
180
|
-
}.get(CHAINID,
|
|
352
|
+
}.get(CHAINID, "not on this chain")
|
|
181
353
|
|
|
182
354
|
|
|
183
355
|
def _hex_to_string(h: HexString) -> str:
|
|
184
|
-
|
|
356
|
+
"""Decode a padded HexString to UTF-8, trimming trailing zero bytes.
|
|
357
|
+
|
|
358
|
+
Args:
|
|
359
|
+
h: The HexString instance from an ERC-20 contract.
|
|
360
|
+
|
|
361
|
+
Examples:
|
|
362
|
+
>>> _hex_to_string(HexString(b'0x5465737400', 'bytes32'))
|
|
363
|
+
'Test'
|
|
364
|
+
"""
|
|
185
365
|
h = h.hex().rstrip("0")
|
|
186
366
|
if len(h) % 2 != 0:
|
|
187
367
|
h += "0"
|
|
@@ -190,47 +370,123 @@ def _hex_to_string(h: HexString) -> str:
|
|
|
190
370
|
|
|
191
371
|
@final
|
|
192
372
|
class Token(DbEntity):
|
|
373
|
+
"""Pony ORM entity representing an ERC-20 token or native coin placeholder.
|
|
374
|
+
|
|
375
|
+
Stores symbol, name, and decimals for value scaling.
|
|
376
|
+
|
|
377
|
+
Examples:
|
|
378
|
+
>>> Token.get_dbid("0xEeeeeEeeeEeEeeEeEeEeeEEEeeeeEeeeeeeeEEeE")
|
|
379
|
+
1
|
|
380
|
+
>>> tok = Token.get_or_insert("0x6B175474E89094C44Da98b954EedeAC495271d0F")
|
|
381
|
+
>>> tok.symbol
|
|
382
|
+
'DAI'
|
|
383
|
+
|
|
384
|
+
See Also:
|
|
385
|
+
:meth:`scale_value`
|
|
386
|
+
"""
|
|
387
|
+
|
|
193
388
|
_table_ = "tokens"
|
|
389
|
+
|
|
194
390
|
token_id = PrimaryKey(int, auto=True)
|
|
195
|
-
|
|
391
|
+
"""Auto-incremented primary key for the tokens table."""
|
|
196
392
|
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
393
|
+
chain = Required(Chain, index=True, lazy=True)
|
|
394
|
+
"""Foreign key linking to :class:`~dao_treasury.db.Chain`."""
|
|
395
|
+
|
|
396
|
+
symbol = Required(str, index=True, lazy=True)
|
|
397
|
+
"""Short ticker symbol for the token."""
|
|
398
|
+
|
|
399
|
+
name = Required(str, lazy=True)
|
|
400
|
+
"""Full human-readable name of the token."""
|
|
401
|
+
|
|
402
|
+
decimals = Required(int, lazy=True)
|
|
403
|
+
"""Number of decimals used for value scaling."""
|
|
200
404
|
|
|
201
405
|
if TYPE_CHECKING:
|
|
202
406
|
treasury_tx: Set["TreasuryTx"]
|
|
203
|
-
|
|
204
|
-
treasury_tx = Set("TreasuryTx", reverse="token")
|
|
205
|
-
|
|
407
|
+
|
|
408
|
+
treasury_tx = Set("TreasuryTx", reverse="token", lazy=True)
|
|
409
|
+
"""Inverse relation for treasury transactions involving this token."""
|
|
410
|
+
# partner_harvest_event = Set('PartnerHarvestEvent', reverse="vault", lazy=True)
|
|
411
|
+
|
|
206
412
|
address = Required(Address, column="address_id")
|
|
207
|
-
|
|
208
|
-
|
|
413
|
+
"""Foreign key to the address record for this token contract."""
|
|
414
|
+
|
|
415
|
+
streams = Set("Stream", reverse="token", lazy=True)
|
|
416
|
+
# vesting_escrows = Set("VestingEscrow", reverse="token", lazy=True)
|
|
417
|
+
|
|
418
|
+
def __eq__(self, other: Union["Token", Address, ChecksumAddress]) -> bool: # type: ignore [override]
|
|
419
|
+
if isinstance(other, str):
|
|
420
|
+
return self.address == other
|
|
421
|
+
elif isinstance(other, Address):
|
|
422
|
+
return self.address.address_id == other.address_id
|
|
423
|
+
return super().__eq__(other)
|
|
209
424
|
|
|
210
|
-
def __eq__(self, other: Union["Token", ChecksumAddress]) -> bool: # type: ignore [override]
|
|
211
|
-
return self.address == other if isinstance(other, str) else super().__eq__(other)
|
|
212
|
-
|
|
213
425
|
__hash__ = DbEntity.__hash__
|
|
214
426
|
|
|
427
|
+
@property
|
|
428
|
+
def contract(self) -> Contract:
|
|
429
|
+
return Contract(self.address.address)
|
|
430
|
+
|
|
431
|
+
@property
|
|
432
|
+
def contract_coro(self) -> Coroutine[Any, Any, Contract]:
|
|
433
|
+
return Contract.coroutine(self.address.address)
|
|
434
|
+
|
|
215
435
|
@property
|
|
216
436
|
def scale(self) -> int:
|
|
437
|
+
"""Base for division according to `decimals`, e.g., `10**decimals`.
|
|
438
|
+
|
|
439
|
+
Examples:
|
|
440
|
+
>>> t = Token.get_or_insert("0x...")
|
|
441
|
+
>>> t.scale
|
|
442
|
+
1000000000000000000
|
|
443
|
+
"""
|
|
217
444
|
return 10**self.decimals # type: ignore [no-any-return]
|
|
218
|
-
|
|
445
|
+
|
|
219
446
|
def scale_value(self, value: int) -> Decimal:
|
|
447
|
+
"""Convert an integer token amount into a Decimal accounting for `decimals`.
|
|
448
|
+
|
|
449
|
+
Args:
|
|
450
|
+
value: Raw integer on-chain amount.
|
|
451
|
+
|
|
452
|
+
Examples:
|
|
453
|
+
>>> t = Token.get_or_insert("0x...")
|
|
454
|
+
>>> t.scale_value(1500000000000000000)
|
|
455
|
+
Decimal('1.5')
|
|
456
|
+
"""
|
|
220
457
|
return Decimal(value) / self.scale
|
|
221
|
-
|
|
222
|
-
@
|
|
458
|
+
|
|
459
|
+
@staticmethod
|
|
223
460
|
@lru_cache(maxsize=None)
|
|
224
|
-
def get_dbid(
|
|
461
|
+
def get_dbid(address: HexAddress) -> int:
|
|
462
|
+
"""Get or insert a `Token` record and return its database ID.
|
|
463
|
+
|
|
464
|
+
Args:
|
|
465
|
+
address: Token contract address or native coin placeholder.
|
|
466
|
+
|
|
467
|
+
Examples:
|
|
468
|
+
>>> Token.get_dbid("0x6B175474E89094C44Da98b954EedeAC495271d0F")
|
|
469
|
+
2
|
|
470
|
+
"""
|
|
225
471
|
with db_session:
|
|
226
|
-
return
|
|
472
|
+
return Token.get_or_insert(address).token_id # type: ignore [no-any-return]
|
|
227
473
|
|
|
228
|
-
@
|
|
229
|
-
def get_or_insert(
|
|
474
|
+
@staticmethod
|
|
475
|
+
def get_or_insert(address: HexAddress) -> "Token":
|
|
476
|
+
"""Insert or fetch a token record from the chain, resolving metadata on-chain.
|
|
477
|
+
|
|
478
|
+
Args:
|
|
479
|
+
address: ERC-20 contract address or native coin placeholder.
|
|
480
|
+
|
|
481
|
+
Examples:
|
|
482
|
+
>>> Token.get_or_insert("0x6B175474E89094C44Da98b954EedeAC495271d0F")
|
|
483
|
+
<Token ...>
|
|
484
|
+
"""
|
|
230
485
|
address_entity = Address.get_or_insert(address)
|
|
231
486
|
if token := Token.get(address=address_entity):
|
|
232
487
|
return token # type: ignore [no-any-return]
|
|
233
|
-
|
|
488
|
+
|
|
489
|
+
address = address_entity.address
|
|
234
490
|
if address == EEE_ADDRESS:
|
|
235
491
|
name, symbol = {Network.Mainnet: ("Ethereum", "ETH")}[chain.id]
|
|
236
492
|
decimals = 18
|
|
@@ -249,28 +505,30 @@ class Token(DbEntity):
|
|
|
249
505
|
decimals = contract.decimals()
|
|
250
506
|
except AttributeError:
|
|
251
507
|
decimals = 0
|
|
252
|
-
|
|
508
|
+
|
|
253
509
|
# MKR contract returns name and symbol as bytes32 which is converted to a brownie HexString
|
|
254
510
|
# try to decode it
|
|
255
511
|
if isinstance(name, HexString):
|
|
256
512
|
name = _hex_to_string(name)
|
|
257
513
|
if isinstance(symbol, HexString):
|
|
258
514
|
symbol = _hex_to_string(symbol)
|
|
259
|
-
|
|
515
|
+
|
|
260
516
|
if not name:
|
|
261
517
|
raise BadToken(f"name for {address} is {name}")
|
|
262
|
-
|
|
518
|
+
|
|
263
519
|
if not symbol:
|
|
264
520
|
raise BadToken(f"symbol for {address} is {symbol}")
|
|
265
|
-
|
|
521
|
+
|
|
266
522
|
if address == UNI_V3_POS or decimals is None:
|
|
267
523
|
decimals = 0
|
|
268
524
|
|
|
269
525
|
# update address nickname for token
|
|
270
|
-
if address_entity.nickname is None or address_entity.nickname.startswith(
|
|
526
|
+
if address_entity.nickname is None or address_entity.nickname.startswith(
|
|
527
|
+
"Contract: "
|
|
528
|
+
):
|
|
271
529
|
# Don't overwrite any intentionally set nicknames, if applicable
|
|
272
530
|
address_entity.nickname = f"Token: {name}"
|
|
273
|
-
|
|
531
|
+
|
|
274
532
|
token = Token(
|
|
275
533
|
chain=Chain.get_dbid(),
|
|
276
534
|
address=address_entity.address_id,
|
|
@@ -278,138 +536,327 @@ class Token(DbEntity):
|
|
|
278
536
|
name=name,
|
|
279
537
|
decimals=decimals,
|
|
280
538
|
)
|
|
281
|
-
|
|
282
539
|
commit()
|
|
283
|
-
|
|
284
540
|
return token # type: ignore [no-any-return]
|
|
285
541
|
|
|
286
542
|
|
|
287
543
|
class TxGroup(DbEntity):
|
|
288
|
-
|
|
544
|
+
"""Pony ORM entity for hierarchical transaction groups.
|
|
545
|
+
|
|
546
|
+
Used to categorize treasury transactions into nested buckets.
|
|
547
|
+
|
|
548
|
+
Examples:
|
|
549
|
+
>>> gid = TxGroup.get_dbid("Revenue")
|
|
550
|
+
>>> group = TxGroup.get_or_insert("Revenue", None)
|
|
551
|
+
>>> group.full_string
|
|
552
|
+
'Revenue'
|
|
553
|
+
"""
|
|
554
|
+
|
|
555
|
+
_table_ = "txgroups"
|
|
556
|
+
|
|
289
557
|
txgroup_id = PrimaryKey(int, auto=True)
|
|
558
|
+
"""Auto-incremented primary key for transaction groups."""
|
|
559
|
+
|
|
560
|
+
name = Required(str)
|
|
561
|
+
"""Name of the grouping category, e.g., 'Revenue', 'Expenses'."""
|
|
290
562
|
|
|
291
|
-
|
|
563
|
+
treasury_tx = Set("TreasuryTx", reverse="txgroup", lazy=True)
|
|
564
|
+
"""Inverse relation for treasury transactions assigned to this group."""
|
|
292
565
|
|
|
293
|
-
treasury_tx = Set('TreasuryTx', reverse="txgroup")
|
|
294
566
|
parent_txgroup = Optional("TxGroup", reverse="child_txgroups")
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
567
|
+
"""Optional reference to a parent group for nesting."""
|
|
568
|
+
|
|
569
|
+
composite_key(name, parent_txgroup)
|
|
570
|
+
|
|
571
|
+
child_txgroups = Set("TxGroup", reverse="parent_txgroup", lazy=True)
|
|
572
|
+
"""Set of nested child groups."""
|
|
573
|
+
|
|
574
|
+
streams = Set("Stream", reverse="txgroup", lazy=True)
|
|
575
|
+
|
|
576
|
+
# TODO: implement this
|
|
577
|
+
# vesting_escrows = Set("VestingEscrow", reverse="txgroup", lazy=True)
|
|
299
578
|
|
|
300
579
|
@property
|
|
301
|
-
def
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
580
|
+
def fullname(self) -> str:
|
|
581
|
+
"""Return the colon-delimited path from root to this group.
|
|
582
|
+
|
|
583
|
+
Examples:
|
|
584
|
+
>>> root = TxGroup.get_or_insert("Revenue", None)
|
|
585
|
+
>>> child = TxGroup.get_or_insert("Interest", root)
|
|
586
|
+
>>> child.full_string
|
|
587
|
+
'Revenue:Interest'
|
|
588
|
+
"""
|
|
306
589
|
t = self
|
|
307
590
|
retval = t.name
|
|
308
|
-
while
|
|
309
|
-
if t.parent_txgroup is None:
|
|
310
|
-
return retval # type: ignore [no-any-return]
|
|
591
|
+
while t.parent_txgroup:
|
|
311
592
|
t = t.parent_txgroup
|
|
312
593
|
retval = f"{t.name}:{retval}"
|
|
313
|
-
|
|
314
|
-
|
|
594
|
+
return retval
|
|
595
|
+
|
|
596
|
+
@property
|
|
597
|
+
def top_txgroup(self) -> "TxGroup":
|
|
598
|
+
"""Get the top-level ancestor in this group’s hierarchy."""
|
|
599
|
+
return self.parent_txgroup.top_txgroup if self.parent_txgroup else self
|
|
600
|
+
|
|
601
|
+
@staticmethod
|
|
315
602
|
@lru_cache(maxsize=None)
|
|
316
|
-
def get_dbid(
|
|
603
|
+
def get_dbid(
|
|
604
|
+
name: TxGroupName, parent: typing.Optional["TxGroup"] = None
|
|
605
|
+
) -> TxGroupDbid:
|
|
606
|
+
"""Get or insert a transaction group and return its database ID.
|
|
607
|
+
|
|
608
|
+
Args:
|
|
609
|
+
name: Category name.
|
|
610
|
+
parent: Optional parent :class:`~dao_treasury.db.TxGroup`.
|
|
611
|
+
|
|
612
|
+
Examples:
|
|
613
|
+
>>> TxGroup.get_dbid("Expenses", None)
|
|
614
|
+
3
|
|
615
|
+
"""
|
|
317
616
|
with db_session:
|
|
318
|
-
return TxGroupDbid(
|
|
319
|
-
|
|
320
|
-
@
|
|
321
|
-
|
|
617
|
+
return TxGroupDbid(TxGroup.get_or_insert(name, parent).txgroup_id)
|
|
618
|
+
|
|
619
|
+
@staticmethod
|
|
620
|
+
@lru_cache(maxsize=None)
|
|
621
|
+
def get_fullname(dbid: TxGroupDbid) -> TxGroupName:
|
|
622
|
+
with db_session:
|
|
623
|
+
if txgroup := TxGroup.get(txgroup_id=dbid):
|
|
624
|
+
return txgroup.fullname
|
|
625
|
+
raise ValueError(f"TxGroup[{dbid}] not found")
|
|
626
|
+
|
|
627
|
+
@staticmethod
|
|
628
|
+
def get_or_insert(
|
|
629
|
+
name: TxGroupName, parent: typing.Optional["TxGroup"]
|
|
630
|
+
) -> "TxGroup":
|
|
631
|
+
"""Insert or fetch a transaction group.
|
|
632
|
+
|
|
633
|
+
Args:
|
|
634
|
+
name: Category name.
|
|
635
|
+
parent: Optional parent group.
|
|
636
|
+
|
|
637
|
+
Examples:
|
|
638
|
+
>>> TxGroup.get_or_insert("Expenses", None).name
|
|
639
|
+
'Expenses'
|
|
640
|
+
"""
|
|
322
641
|
if txgroup := TxGroup.get(name=name, parent_txgroup=parent):
|
|
323
642
|
return txgroup # type: ignore [no-any-return]
|
|
324
643
|
txgroup = TxGroup(name=name, parent_txgroup=parent)
|
|
325
644
|
try:
|
|
326
645
|
commit()
|
|
327
646
|
except TransactionIntegrityError as e:
|
|
647
|
+
if txgroup := TxGroup.get(name=name, parent_txgroup=parent):
|
|
648
|
+
return txgroup # type: ignore [no-any-return]
|
|
328
649
|
raise Exception(e, name, parent) from e
|
|
329
650
|
return txgroup # type: ignore [no-any-return]
|
|
330
651
|
|
|
331
652
|
|
|
332
|
-
@lru_cache(
|
|
653
|
+
@lru_cache(500)
|
|
333
654
|
def get_transaction(txhash: str) -> TransactionReceipt:
|
|
655
|
+
"""Fetch and cache a transaction receipt from the connected chain.
|
|
656
|
+
|
|
657
|
+
Wraps :meth:`brownie.network.chain.Chain.get_transaction`.
|
|
658
|
+
|
|
659
|
+
Args:
|
|
660
|
+
txhash: Hex string of the transaction hash.
|
|
661
|
+
|
|
662
|
+
Examples:
|
|
663
|
+
>>> get_transaction("0xabcde...")
|
|
664
|
+
<Transaction '0xabcde...'>
|
|
665
|
+
"""
|
|
334
666
|
return chain.get_transaction(txhash)
|
|
335
667
|
|
|
336
668
|
|
|
337
669
|
class TreasuryTx(DbEntity):
|
|
670
|
+
"""Pony ORM entity for on-chain treasury transactions.
|
|
671
|
+
|
|
672
|
+
Represents individual token or native transfers with pricing, grouping, and gas data.
|
|
673
|
+
|
|
674
|
+
Examples:
|
|
675
|
+
>>> # After inserting, fetch sorted records
|
|
676
|
+
>>> with db_session:
|
|
677
|
+
... txs = TreasuryTx.select(lambda tx: tx.txgroup == TxGroup.get_dbid("Revenue"))
|
|
678
|
+
... for tx in txs:
|
|
679
|
+
... print(tx.hash, tx.value_usd)
|
|
680
|
+
"""
|
|
681
|
+
|
|
338
682
|
_table_ = "treasury_txs"
|
|
683
|
+
|
|
339
684
|
treasury_tx_id = PrimaryKey(int, auto=True)
|
|
685
|
+
"""Auto-incremented primary key for treasury transactions."""
|
|
686
|
+
|
|
340
687
|
chain = Required(Chain, index=True)
|
|
688
|
+
"""Foreign key to the network where the transaction occurred."""
|
|
341
689
|
|
|
342
690
|
timestamp = Required(int, index=True)
|
|
691
|
+
"""Block timestamp as Unix epoch seconds."""
|
|
692
|
+
|
|
343
693
|
block = Required(int, index=True)
|
|
694
|
+
"""Block number of the transaction."""
|
|
695
|
+
|
|
344
696
|
hash = Required(str, index=True)
|
|
697
|
+
"""Hex string of the transaction hash."""
|
|
698
|
+
|
|
345
699
|
log_index = Optional(int)
|
|
700
|
+
"""Log index within the block (None for native transfers)."""
|
|
701
|
+
|
|
346
702
|
composite_key(hash, log_index)
|
|
703
|
+
|
|
347
704
|
token = Required(Token, reverse="treasury_tx", column="token_id", index=True)
|
|
705
|
+
"""Foreign key to the token record used in the transfer."""
|
|
706
|
+
|
|
348
707
|
from_address = Optional(
|
|
349
708
|
Address, reverse="treasury_tx_from", column="from", index=True
|
|
350
709
|
)
|
|
710
|
+
"""Foreign key to sender address record."""
|
|
711
|
+
|
|
351
712
|
to_address = Optional(Address, reverse="treasury_tx_to", column="to", index=True)
|
|
713
|
+
"""Foreign key to recipient address record."""
|
|
714
|
+
|
|
352
715
|
amount = Required(Decimal, 38, 18)
|
|
716
|
+
"""On-chain transfer amount as a Decimal with fixed precision."""
|
|
717
|
+
|
|
353
718
|
price = Optional(Decimal, 38, 18)
|
|
719
|
+
"""Token price at the time of transfer (if available)."""
|
|
720
|
+
|
|
354
721
|
value_usd = Optional(Decimal, 38, 18)
|
|
722
|
+
"""USD value of the transfer, computed as `amount * price`."""
|
|
723
|
+
|
|
355
724
|
gas_used = Optional(Decimal, 38, 1)
|
|
725
|
+
"""Gas units consumed by this transaction (native transfers only)."""
|
|
726
|
+
|
|
356
727
|
gas_price = Optional(Decimal, 38, 1)
|
|
357
|
-
|
|
728
|
+
"""Gas price paid, in native token units (native transfers only)."""
|
|
729
|
+
|
|
730
|
+
txgroup = Required(
|
|
731
|
+
"TxGroup", reverse="treasury_tx", column="txgroup_id", index=True
|
|
732
|
+
)
|
|
733
|
+
"""Foreign key to the categorization group."""
|
|
734
|
+
|
|
358
735
|
composite_index(chain, txgroup)
|
|
359
736
|
|
|
360
737
|
@property
|
|
361
738
|
def to_nickname(self) -> typing.Optional[str]:
|
|
739
|
+
"""Human-readable label for the recipient address, if any."""
|
|
362
740
|
if to_address := self.to_address:
|
|
363
741
|
return to_address.nickname or to_address.address
|
|
364
742
|
return None
|
|
365
743
|
|
|
366
744
|
@property
|
|
367
745
|
def from_nickname(self) -> str:
|
|
368
|
-
|
|
746
|
+
"""Human-readable label for the sender address."""
|
|
747
|
+
return self.from_address.nickname or self.from_address.address # type: ignore [union-attr]
|
|
748
|
+
|
|
749
|
+
@property
|
|
750
|
+
def token_address(self) -> ChecksumAddress:
|
|
751
|
+
return self.token.address.address
|
|
369
752
|
|
|
370
753
|
@property
|
|
371
754
|
def symbol(self) -> str:
|
|
755
|
+
"""Ticker symbol for the transferred token."""
|
|
372
756
|
return self.token.symbol # type: ignore [no-any-return]
|
|
373
757
|
|
|
374
|
-
# Helpers
|
|
375
758
|
@property
|
|
376
|
-
def
|
|
759
|
+
def events(self) -> EventDict:
|
|
760
|
+
"""Decoded event logs for this transaction."""
|
|
377
761
|
return self._transaction.events
|
|
378
762
|
|
|
763
|
+
async def events_async(self) -> EventDict:
|
|
764
|
+
"""Asynchronously fetch decoded event logs for this transaction."""
|
|
765
|
+
tx = self._transaction
|
|
766
|
+
events = tx._events
|
|
767
|
+
if events is None:
|
|
768
|
+
events = await _EVENTS_THREADS.run(getattr, tx, "events")
|
|
769
|
+
return events
|
|
770
|
+
|
|
771
|
+
@overload
|
|
772
|
+
def get_events(
|
|
773
|
+
self, event_name: str, sync: Literal[False]
|
|
774
|
+
) -> Coroutine[Any, Any, EventItem]: ...
|
|
775
|
+
@overload
|
|
776
|
+
def get_events(self, event_name: str, sync: bool = True) -> EventItem: ...
|
|
777
|
+
def get_events(self, event_name: str, sync: bool = True) -> EventItem:
|
|
778
|
+
if not sync:
|
|
779
|
+
return _EVENTS_THREADS.run(self.get_events, event_name)
|
|
780
|
+
try:
|
|
781
|
+
return self.events[event_name]
|
|
782
|
+
except EventLookupError:
|
|
783
|
+
pass
|
|
784
|
+
except KeyError as e:
|
|
785
|
+
# This happens sometimes due to a busted abi and hopefully shouldnt impact you
|
|
786
|
+
if str(e) != "'components'":
|
|
787
|
+
raise
|
|
788
|
+
return _EventItem(event_name, None, [], ())
|
|
789
|
+
|
|
379
790
|
@property
|
|
380
791
|
def _transaction(self) -> TransactionReceipt:
|
|
792
|
+
"""Cached transaction receipt object."""
|
|
381
793
|
return get_transaction(self.hash)
|
|
382
794
|
|
|
383
795
|
@staticmethod
|
|
796
|
+
@auto_retry
|
|
384
797
|
async def insert(entry: LedgerEntry) -> None:
|
|
798
|
+
"""Asynchronously insert and sort a ledger entry.
|
|
799
|
+
|
|
800
|
+
Converts a :class:`~eth_portfolio.structs.LedgerEntry` into a
|
|
801
|
+
:class:`~dao_treasury.db.TreasuryTx` record, then applies advanced sorting.
|
|
802
|
+
|
|
803
|
+
Args:
|
|
804
|
+
entry: A ledger entry representing a token or internal transfer.
|
|
805
|
+
|
|
806
|
+
Examples:
|
|
807
|
+
>>> import asyncio, eth_portfolio.structs as s
|
|
808
|
+
>>> asyncio.run(TreasuryTx.insert(s.TokenTransfer(...)))
|
|
809
|
+
See Also:
|
|
810
|
+
:meth:`__insert`
|
|
811
|
+
"""
|
|
385
812
|
timestamp = int(await get_block_timestamp_async(entry.block_number))
|
|
386
813
|
if txid := await _INSERT_THREAD.run(TreasuryTx.__insert, entry, timestamp):
|
|
387
814
|
async with _SORT_SEMAPHORE:
|
|
388
815
|
from dao_treasury.sorting import sort_advanced
|
|
389
816
|
|
|
390
|
-
|
|
817
|
+
try:
|
|
391
818
|
await sort_advanced(TreasuryTx[txid])
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
819
|
+
except Exception as e:
|
|
820
|
+
e.args = *e.args, entry
|
|
821
|
+
raise
|
|
822
|
+
|
|
823
|
+
async def _set_txgroup(self, txgroup_dbid: TxGroupDbid) -> None:
|
|
824
|
+
await _SORT_THREAD.run(
|
|
825
|
+
TreasuryTx.__set_txgroup, self.treasury_tx_id, txgroup_dbid
|
|
826
|
+
)
|
|
827
|
+
|
|
828
|
+
@staticmethod
|
|
829
|
+
def __insert(entry: LedgerEntry, ts: int) -> typing.Optional[int]:
|
|
830
|
+
"""Synchronously insert a ledger entry record into the database.
|
|
831
|
+
|
|
832
|
+
Handles both :class:`TokenTransfer` and other ledger entry types,
|
|
833
|
+
populates pricing fields, and resolves grouping via basic sorting.
|
|
834
|
+
|
|
835
|
+
Args:
|
|
836
|
+
entry: Ledger entry to insert.
|
|
837
|
+
ts: Unix timestamp of the block.
|
|
838
|
+
|
|
839
|
+
If a uniqueness conflict arises, delegates to
|
|
840
|
+
:func:`_validate_integrity_error`. Returns the new record ID
|
|
841
|
+
if further advanced sorting is required.
|
|
842
|
+
"""
|
|
395
843
|
try:
|
|
396
844
|
with db_session:
|
|
397
845
|
if isinstance(entry, TokenTransfer):
|
|
398
|
-
|
|
399
|
-
token = Token.get_dbid(entry.token_address)
|
|
400
|
-
except (ContractNotVerified, BadToken):
|
|
401
|
-
return None
|
|
846
|
+
token = Token.get_dbid(entry.token_address)
|
|
402
847
|
log_index = entry.log_index
|
|
403
|
-
# TODO: implement gas
|
|
404
848
|
gas, gas_price, gas_used = None, None, None
|
|
405
849
|
else:
|
|
406
850
|
token = Token.get_dbid(EEE_ADDRESS)
|
|
407
851
|
log_index = None
|
|
408
|
-
# TODO: implement gas
|
|
409
852
|
gas = entry.gas
|
|
410
|
-
gas_used =
|
|
411
|
-
|
|
412
|
-
|
|
853
|
+
gas_used = (
|
|
854
|
+
entry.gas_used if isinstance(entry, InternalTransfer) else None
|
|
855
|
+
)
|
|
856
|
+
gas_price = (
|
|
857
|
+
entry.gas_price if isinstance(entry, Transaction) else None
|
|
858
|
+
)
|
|
859
|
+
|
|
413
860
|
if to_address := entry.to_address:
|
|
414
861
|
to_address = Address.get_dbid(to_address)
|
|
415
862
|
if from_address := entry.from_address:
|
|
@@ -438,23 +885,243 @@ class TreasuryTx(DbEntity):
|
|
|
438
885
|
gas_price=gas_price,
|
|
439
886
|
txgroup=txgroup_dbid,
|
|
440
887
|
)
|
|
888
|
+
# we must commit here or else dbid below will be `None`.
|
|
889
|
+
commit()
|
|
441
890
|
dbid = entity.treasury_tx_id
|
|
891
|
+
except InterfaceError as e:
|
|
892
|
+
raise ValueError(
|
|
893
|
+
e,
|
|
894
|
+
{
|
|
895
|
+
"chain": Chain.get_dbid(CHAINID),
|
|
896
|
+
"block": entry.block_number,
|
|
897
|
+
"timestamp": ts,
|
|
898
|
+
"hash": entry.hash.hex(),
|
|
899
|
+
"log_index": log_index,
|
|
900
|
+
"from_address": from_address,
|
|
901
|
+
"to_address": to_address,
|
|
902
|
+
"token": token,
|
|
903
|
+
"amount": entry.value,
|
|
904
|
+
"price": entry.price,
|
|
905
|
+
"value_usd": entry.value_usd,
|
|
906
|
+
# TODO: nuke db and add this column
|
|
907
|
+
# gas = gas,
|
|
908
|
+
"gas_used": gas_used,
|
|
909
|
+
"gas_price": gas_price,
|
|
910
|
+
"txgroup": txgroup_dbid,
|
|
911
|
+
},
|
|
912
|
+
) from e
|
|
442
913
|
except InvalidOperation as e:
|
|
443
|
-
|
|
914
|
+
with db_session:
|
|
915
|
+
from_address_entity = Address[from_address]
|
|
916
|
+
to_address_entity = Address[to_address]
|
|
917
|
+
token_entity = Token[token]
|
|
918
|
+
logger.error(e)
|
|
919
|
+
logger.error(
|
|
920
|
+
{
|
|
921
|
+
"chain": Chain.get_dbid(CHAINID),
|
|
922
|
+
"block": entry.block_number,
|
|
923
|
+
"timestamp": ts,
|
|
924
|
+
"hash": entry.hash.hex(),
|
|
925
|
+
"log_index": log_index,
|
|
926
|
+
"from_address": {
|
|
927
|
+
"dbid": from_address,
|
|
928
|
+
"address": from_address_entity.address,
|
|
929
|
+
"nickname": from_address_entity.nickname,
|
|
930
|
+
},
|
|
931
|
+
"to_address": {
|
|
932
|
+
"dbid": to_address,
|
|
933
|
+
"address": to_address_entity.address,
|
|
934
|
+
"nickname": to_address_entity.nickname,
|
|
935
|
+
},
|
|
936
|
+
"token": {
|
|
937
|
+
"dbid": token,
|
|
938
|
+
"address": token_entity.address.address,
|
|
939
|
+
"name": token_entity.name,
|
|
940
|
+
"symbol": token_entity.symbol,
|
|
941
|
+
"decimals": token_entity.decimals,
|
|
942
|
+
},
|
|
943
|
+
"amount": entry.value,
|
|
944
|
+
"price": entry.price,
|
|
945
|
+
"value_usd": entry.value_usd,
|
|
946
|
+
# TODO: nuke db and add this column
|
|
947
|
+
# gas = gas,
|
|
948
|
+
"gas_used": gas_used,
|
|
949
|
+
"gas_price": gas_price,
|
|
950
|
+
"txgroup": {
|
|
951
|
+
"dbid": txgroup_dbid,
|
|
952
|
+
"fullname": TxGroup[txgroup_dbid].fullname,
|
|
953
|
+
},
|
|
954
|
+
}
|
|
955
|
+
)
|
|
444
956
|
return None
|
|
445
957
|
except TransactionIntegrityError as e:
|
|
446
|
-
#logger.error(e, entry, exc_info=True)
|
|
447
|
-
# TODO: implement this
|
|
448
958
|
return _validate_integrity_error(entry, log_index)
|
|
449
959
|
except Exception as e:
|
|
450
960
|
e.args = *e.args, entry
|
|
451
961
|
raise
|
|
452
962
|
else:
|
|
453
|
-
if txgroup_dbid not in (
|
|
454
|
-
|
|
963
|
+
if txgroup_dbid not in (
|
|
964
|
+
must_sort_inbound_txgroup_dbid,
|
|
965
|
+
must_sort_outbound_txgroup_dbid,
|
|
966
|
+
):
|
|
967
|
+
logger.info(
|
|
968
|
+
"Sorted %s to %s", entry, TxGroup.get_fullname(txgroup_dbid)
|
|
969
|
+
)
|
|
455
970
|
return None
|
|
456
971
|
return dbid # type: ignore [no-any-return]
|
|
457
972
|
|
|
973
|
+
@staticmethod
|
|
974
|
+
@retry_locked
|
|
975
|
+
def __set_txgroup(treasury_tx_dbid: int, txgroup_dbid: TxGroupDbid) -> None:
|
|
976
|
+
with db_session:
|
|
977
|
+
TreasuryTx[treasury_tx_dbid].txgroup = txgroup_dbid
|
|
978
|
+
commit()
|
|
979
|
+
|
|
980
|
+
|
|
981
|
+
_stream_metadata_cache: Final[Dict[HexStr, Tuple[ChecksumAddress, date]]] = {}
|
|
982
|
+
|
|
983
|
+
|
|
984
|
+
class Stream(DbEntity):
|
|
985
|
+
_table_ = "streams"
|
|
986
|
+
stream_id = PrimaryKey(str)
|
|
987
|
+
|
|
988
|
+
contract = Required("Address", reverse="streams")
|
|
989
|
+
start_block = Required(int)
|
|
990
|
+
end_block = Optional(int)
|
|
991
|
+
token = Required("Token", reverse="streams", index=True)
|
|
992
|
+
from_address = Required("Address", reverse="streams_from")
|
|
993
|
+
to_address = Required("Address", reverse="streams_to")
|
|
994
|
+
reason = Optional(str)
|
|
995
|
+
amount_per_second = Required(Decimal, 38, 1)
|
|
996
|
+
status = Required(str, default="Active")
|
|
997
|
+
txgroup = Optional("TxGroup", reverse="streams")
|
|
998
|
+
|
|
999
|
+
streamed_funds = Set("StreamedFunds", lazy=True)
|
|
1000
|
+
|
|
1001
|
+
scale = 10**20
|
|
1002
|
+
|
|
1003
|
+
@property
|
|
1004
|
+
def is_alive(self) -> bool:
|
|
1005
|
+
if self.end_block is None:
|
|
1006
|
+
assert self.status in ["Active", "Paused"]
|
|
1007
|
+
return self.status == "Active"
|
|
1008
|
+
assert self.status == "Stopped"
|
|
1009
|
+
return False
|
|
1010
|
+
|
|
1011
|
+
@property
|
|
1012
|
+
def amount_per_minute(self) -> int:
|
|
1013
|
+
return self.amount_per_second * 60
|
|
1014
|
+
|
|
1015
|
+
@property
|
|
1016
|
+
def amount_per_hour(self) -> int:
|
|
1017
|
+
return self.amount_per_minute * 60
|
|
1018
|
+
|
|
1019
|
+
@property
|
|
1020
|
+
def amount_per_day(self) -> int:
|
|
1021
|
+
return self.amount_per_hour * 24
|
|
1022
|
+
|
|
1023
|
+
@staticmethod
|
|
1024
|
+
def check_closed(stream_id: HexStr) -> bool:
|
|
1025
|
+
with db_session:
|
|
1026
|
+
return any(sf.is_last_day for sf in Stream[stream_id].streamed_funds)
|
|
1027
|
+
|
|
1028
|
+
@staticmethod
|
|
1029
|
+
def _get_start_and_end(stream_dbid: HexStr) -> Tuple[datetime, datetime]:
|
|
1030
|
+
with db_session:
|
|
1031
|
+
stream = Stream[stream_dbid]
|
|
1032
|
+
start_date, end = stream.start_date, datetime.now(_UTC)
|
|
1033
|
+
# convert start to datetime
|
|
1034
|
+
start = datetime.combine(start_date, time(tzinfo=_UTC), tzinfo=_UTC)
|
|
1035
|
+
if stream.end_block:
|
|
1036
|
+
end = datetime.fromtimestamp(chain[stream.end_block].timestamp, tz=_UTC)
|
|
1037
|
+
return start, end
|
|
1038
|
+
|
|
1039
|
+
def stop_stream(self, block: int) -> None:
|
|
1040
|
+
self.end_block = block
|
|
1041
|
+
self.status = "Stopped"
|
|
1042
|
+
|
|
1043
|
+
def pause(self) -> None:
|
|
1044
|
+
self.status = "Paused"
|
|
1045
|
+
|
|
1046
|
+
@staticmethod
|
|
1047
|
+
def _get_token_and_start_date(stream_id: HexStr) -> Tuple[ChecksumAddress, date]:
|
|
1048
|
+
try:
|
|
1049
|
+
return _stream_metadata_cache[stream_id]
|
|
1050
|
+
except KeyError:
|
|
1051
|
+
with db_session:
|
|
1052
|
+
stream = Stream[stream_id]
|
|
1053
|
+
token = stream.token.address.address
|
|
1054
|
+
start_date = stream.start_date
|
|
1055
|
+
_stream_metadata_cache[stream_id] = token, start_date
|
|
1056
|
+
return token, start_date
|
|
1057
|
+
|
|
1058
|
+
@property
|
|
1059
|
+
def stream_contract(self) -> Contract:
|
|
1060
|
+
return Contract(self.contract.address)
|
|
1061
|
+
|
|
1062
|
+
@property
|
|
1063
|
+
def start_date(self) -> date:
|
|
1064
|
+
return datetime.fromtimestamp(chain[self.start_block].timestamp).date()
|
|
1065
|
+
|
|
1066
|
+
async def amount_withdrawable(self, block: int) -> int:
|
|
1067
|
+
return await self.stream_contract.withdrawable.coroutine(
|
|
1068
|
+
self.from_address.address,
|
|
1069
|
+
self.to_address.address,
|
|
1070
|
+
int(self.amount_per_second),
|
|
1071
|
+
block_identifier=block,
|
|
1072
|
+
)
|
|
1073
|
+
|
|
1074
|
+
def print(self) -> None:
|
|
1075
|
+
symbol = self.token.symbol
|
|
1076
|
+
print(f"{symbol} per second: {self.amount_per_second / self.scale}")
|
|
1077
|
+
print(f"{symbol} per day: {self.amount_per_day / self.scale}")
|
|
1078
|
+
|
|
1079
|
+
|
|
1080
|
+
class StreamedFunds(DbEntity):
|
|
1081
|
+
"""Each object represents one calendar day of tokens streamed for a particular stream."""
|
|
1082
|
+
|
|
1083
|
+
_table_ = "streamed_funds"
|
|
1084
|
+
|
|
1085
|
+
date = Required(date)
|
|
1086
|
+
stream = Required(Stream, reverse="streamed_funds")
|
|
1087
|
+
PrimaryKey(stream, date)
|
|
1088
|
+
|
|
1089
|
+
amount = Required(Decimal, 38, 18)
|
|
1090
|
+
price = Required(Decimal, 38, 18)
|
|
1091
|
+
value_usd = Required(Decimal, 38, 18)
|
|
1092
|
+
seconds_active = Required(int)
|
|
1093
|
+
is_last_day = Required(bool)
|
|
1094
|
+
|
|
1095
|
+
@db_session
|
|
1096
|
+
def get_entity(stream_id: str, date: datetime) -> "StreamedFunds":
|
|
1097
|
+
stream = Stream[stream_id]
|
|
1098
|
+
return StreamedFunds.get(date=date, stream=stream)
|
|
1099
|
+
|
|
1100
|
+
@classmethod
|
|
1101
|
+
@db_session
|
|
1102
|
+
def create_entity(
|
|
1103
|
+
cls,
|
|
1104
|
+
stream_id: str,
|
|
1105
|
+
date: datetime,
|
|
1106
|
+
price: Decimal,
|
|
1107
|
+
seconds_active: int,
|
|
1108
|
+
is_last_day: bool,
|
|
1109
|
+
) -> "StreamedFunds":
|
|
1110
|
+
stream = Stream[stream_id]
|
|
1111
|
+
amount_streamed_today = round(
|
|
1112
|
+
stream.amount_per_second * seconds_active / stream.scale, 18
|
|
1113
|
+
)
|
|
1114
|
+
entity = StreamedFunds(
|
|
1115
|
+
date=date,
|
|
1116
|
+
stream=stream,
|
|
1117
|
+
amount=amount_streamed_today,
|
|
1118
|
+
price=round(price, 18),
|
|
1119
|
+
value_usd=round(amount_streamed_today * price, 18),
|
|
1120
|
+
seconds_active=seconds_active,
|
|
1121
|
+
is_last_day=is_last_day,
|
|
1122
|
+
)
|
|
1123
|
+
return entity
|
|
1124
|
+
|
|
458
1125
|
|
|
459
1126
|
db.bind(
|
|
460
1127
|
provider="sqlite", # TODO: let user choose postgres with server connection params
|
|
@@ -465,20 +1132,35 @@ db.bind(
|
|
|
465
1132
|
db.generate_mapping(create_tables=True)
|
|
466
1133
|
|
|
467
1134
|
|
|
1135
|
+
def _set_address_nicknames_for_tokens() -> None:
|
|
1136
|
+
"""Set address.nickname for addresses belonging to tokens."""
|
|
1137
|
+
for address in select(a for a in Address if a.token and not a.nickname):
|
|
1138
|
+
address.nickname = f"Token: {address.token.name}"
|
|
1139
|
+
db.commit()
|
|
1140
|
+
|
|
1141
|
+
|
|
468
1142
|
def create_stream_ledger_view() -> None:
|
|
1143
|
+
"""Create or replace the SQL view `stream_ledger` for streamed funds reporting.
|
|
1144
|
+
|
|
1145
|
+
This view joins streamed funds, streams, tokens, addresses, and txgroups
|
|
1146
|
+
into a unified ledger of stream transactions.
|
|
1147
|
+
|
|
1148
|
+
Examples:
|
|
1149
|
+
>>> create_stream_ledger_view()
|
|
1150
|
+
"""
|
|
1151
|
+
db.execute("""DROP VIEW IF EXISTS stream_ledger;""")
|
|
469
1152
|
db.execute(
|
|
470
1153
|
"""
|
|
471
|
-
DROP VIEW IF EXISTS stream_ledger;
|
|
472
1154
|
create view stream_ledger as
|
|
473
1155
|
SELECT 'Mainnet' as chain_name,
|
|
474
|
-
cast(
|
|
1156
|
+
cast(strftime('%s', date || ' 00:00:00') as INTEGER) as timestamp,
|
|
475
1157
|
NULL as block,
|
|
476
1158
|
NULL as hash,
|
|
477
1159
|
NULL as log_index,
|
|
478
1160
|
symbol as token,
|
|
479
1161
|
d.address AS "from",
|
|
480
1162
|
d.nickname as from_nickname,
|
|
481
|
-
e.address
|
|
1163
|
+
e.address AS "to",
|
|
482
1164
|
e.nickname as to_nickname,
|
|
483
1165
|
amount,
|
|
484
1166
|
price,
|
|
@@ -497,8 +1179,42 @@ def create_stream_ledger_view() -> None:
|
|
|
497
1179
|
)
|
|
498
1180
|
|
|
499
1181
|
|
|
1182
|
+
def create_txgroup_hierarchy_view() -> None:
|
|
1183
|
+
"""Create or replace the SQL view `txgroup_hierarchy` for recursive txgroup hierarchy.
|
|
1184
|
+
|
|
1185
|
+
This view exposes txgroup_id, top_category, and parent_txgroup for all txgroups,
|
|
1186
|
+
matching the recursive CTE logic used in dashboards.
|
|
1187
|
+
"""
|
|
1188
|
+
db.execute("DROP VIEW IF EXISTS txgroup_hierarchy;")
|
|
1189
|
+
db.execute(
|
|
1190
|
+
"""
|
|
1191
|
+
CREATE VIEW txgroup_hierarchy AS
|
|
1192
|
+
WITH RECURSIVE group_hierarchy (txgroup_id, top_category, parent_txgroup) AS (
|
|
1193
|
+
SELECT txgroup_id, name AS top_category, parent_txgroup
|
|
1194
|
+
FROM txgroups
|
|
1195
|
+
WHERE parent_txgroup IS NULL
|
|
1196
|
+
UNION ALL
|
|
1197
|
+
SELECT child.txgroup_id, parent.top_category, child.parent_txgroup
|
|
1198
|
+
FROM txgroups AS child
|
|
1199
|
+
JOIN group_hierarchy AS parent
|
|
1200
|
+
ON child.parent_txgroup = parent.txgroup_id
|
|
1201
|
+
)
|
|
1202
|
+
SELECT * FROM group_hierarchy;
|
|
1203
|
+
"""
|
|
1204
|
+
)
|
|
1205
|
+
|
|
1206
|
+
|
|
500
1207
|
def create_vesting_ledger_view() -> None:
|
|
501
|
-
|
|
1208
|
+
"""Create or replace the SQL view `vesting_ledger` for vesting escrow reporting.
|
|
1209
|
+
|
|
1210
|
+
This view joins vested funds, vesting escrows, tokens, chains, addresses,
|
|
1211
|
+
and txgroups to produce a vesting ledger.
|
|
1212
|
+
|
|
1213
|
+
Examples:
|
|
1214
|
+
>>> create_vesting_ledger_view()
|
|
1215
|
+
"""
|
|
1216
|
+
db.execute(
|
|
1217
|
+
"""
|
|
502
1218
|
DROP VIEW IF EXISTS vesting_ledger;
|
|
503
1219
|
CREATE VIEW vesting_ledger AS
|
|
504
1220
|
SELECT d.chain_name,
|
|
@@ -525,17 +1241,25 @@ def create_vesting_ledger_view() -> None:
|
|
|
525
1241
|
LEFT JOIN addresses f ON b.recipient = f.address_id
|
|
526
1242
|
LEFT JOIN txgroups g ON b.txgroup = g.txgroup_id
|
|
527
1243
|
left JOIN txgroups h ON g.parent_txgroup = h.txgroup_id
|
|
528
|
-
"""
|
|
1244
|
+
"""
|
|
1245
|
+
)
|
|
529
1246
|
|
|
530
1247
|
|
|
531
1248
|
def create_general_ledger_view() -> None:
|
|
1249
|
+
"""Create or replace the SQL view `general_ledger` aggregating all treasury transactions.
|
|
1250
|
+
|
|
1251
|
+
Joins chains, tokens, addresses, and txgroups into a single chronological ledger.
|
|
1252
|
+
|
|
1253
|
+
Examples:
|
|
1254
|
+
>>> create_general_ledger_view()
|
|
1255
|
+
"""
|
|
532
1256
|
db.execute("drop VIEW IF EXISTS general_ledger")
|
|
533
1257
|
db.execute(
|
|
534
1258
|
"""
|
|
535
1259
|
create VIEW general_ledger as
|
|
536
1260
|
select *
|
|
537
1261
|
from (
|
|
538
|
-
SELECT treasury_tx_id, b.chain_name,
|
|
1262
|
+
SELECT treasury_tx_id, b.chain_name, a.timestamp, a.block, a.hash, a.log_index, c.symbol AS token, d.address AS "from", d.nickname as from_nickname, e.address AS "to", e.nickname as to_nickname, a.amount, a.price, a.value_usd, f.name AS txgroup, g.name AS parent_txgroup, f.txgroup_id
|
|
539
1263
|
FROM treasury_txs a
|
|
540
1264
|
LEFT JOIN chains b ON a.chain = b.chain_dbid
|
|
541
1265
|
LEFT JOIN tokens c ON a.token_id = c.token_id
|
|
@@ -543,9 +1267,9 @@ def create_general_ledger_view() -> None:
|
|
|
543
1267
|
LEFT JOIN addresses e ON a."to" = e.address_id
|
|
544
1268
|
LEFT JOIN txgroups f ON a.txgroup_id = f.txgroup_id
|
|
545
1269
|
LEFT JOIN txgroups g ON f.parent_txgroup = g.txgroup_id
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
1270
|
+
UNION
|
|
1271
|
+
SELECT -1, chain_name, timestamp, block, hash, log_index, token, "from", from_nickname, "to", to_nickname, amount, price, value_usd, txgroup, parent_txgroup, txgroup_id
|
|
1272
|
+
FROM stream_ledger
|
|
549
1273
|
--UNION
|
|
550
1274
|
--SELECT -1, *
|
|
551
1275
|
--FROM vesting_ledger
|
|
@@ -553,9 +1277,16 @@ def create_general_ledger_view() -> None:
|
|
|
553
1277
|
ORDER BY timestamp
|
|
554
1278
|
"""
|
|
555
1279
|
)
|
|
556
|
-
|
|
1280
|
+
|
|
557
1281
|
|
|
558
1282
|
def create_unsorted_txs_view() -> None:
|
|
1283
|
+
"""Create or replace the SQL view `unsorted_txs` for pending categorization.
|
|
1284
|
+
|
|
1285
|
+
Filters `general_ledger` for transactions still in 'Categorization Pending'.
|
|
1286
|
+
|
|
1287
|
+
Examples:
|
|
1288
|
+
>>> create_unsorted_txs_view()
|
|
1289
|
+
"""
|
|
559
1290
|
db.execute("DROP VIEW IF EXISTS unsorted_txs;")
|
|
560
1291
|
db.execute(
|
|
561
1292
|
"""
|
|
@@ -569,6 +1300,13 @@ def create_unsorted_txs_view() -> None:
|
|
|
569
1300
|
|
|
570
1301
|
|
|
571
1302
|
def create_monthly_pnl_view() -> None:
|
|
1303
|
+
"""Create or replace the SQL view `monthly_pnl` summarizing monthly profit and loss.
|
|
1304
|
+
|
|
1305
|
+
Aggregates categorized transactions by month and top-level category.
|
|
1306
|
+
|
|
1307
|
+
Examples:
|
|
1308
|
+
>>> create_monthly_pnl_view()
|
|
1309
|
+
"""
|
|
572
1310
|
db.execute("DROP VIEW IF EXISTS monthly_pnl;")
|
|
573
1311
|
sql = """
|
|
574
1312
|
CREATE VIEW monthly_pnl AS
|
|
@@ -607,24 +1345,36 @@ def create_monthly_pnl_view() -> None:
|
|
|
607
1345
|
|
|
608
1346
|
|
|
609
1347
|
with db_session:
|
|
610
|
-
|
|
611
|
-
|
|
1348
|
+
create_stream_ledger_view()
|
|
1349
|
+
create_txgroup_hierarchy_view()
|
|
1350
|
+
# create_vesting_ledger_view()
|
|
612
1351
|
create_general_ledger_view()
|
|
613
1352
|
create_unsorted_txs_view()
|
|
614
|
-
#create_monthly_pnl_view()
|
|
1353
|
+
# create_monthly_pnl_view()
|
|
615
1354
|
|
|
616
1355
|
must_sort_inbound_txgroup_dbid = TxGroup.get_dbid(name="Sort Me (Inbound)")
|
|
617
1356
|
must_sort_outbound_txgroup_dbid = TxGroup.get_dbid(name="Sort Me (Outbound)")
|
|
618
1357
|
|
|
619
1358
|
|
|
620
1359
|
@db_session
|
|
621
|
-
def _validate_integrity_error(
|
|
622
|
-
|
|
1360
|
+
def _validate_integrity_error(
|
|
1361
|
+
entry: LedgerEntry, log_index: int
|
|
1362
|
+
) -> typing.Optional[int]:
|
|
1363
|
+
"""Validate that an existing TreasuryTx matches an attempted insert on conflict.
|
|
1364
|
+
|
|
1365
|
+
Raises AssertionError if any field deviates from the existing record. Used
|
|
1366
|
+
to resolve :exc:`pony.orm.TransactionIntegrityError`.
|
|
1367
|
+
|
|
1368
|
+
Args:
|
|
1369
|
+
entry: The ledger entry that triggered the conflict.
|
|
1370
|
+
log_index: The log index within the transaction.
|
|
1371
|
+
|
|
1372
|
+
Examples:
|
|
1373
|
+
>>> _validate_integrity_error(entry, 0)
|
|
1374
|
+
"""
|
|
623
1375
|
txhash = entry.hash.hex()
|
|
624
1376
|
chain_dbid = Chain.get_dbid()
|
|
625
|
-
existing_object = TreasuryTx.get(
|
|
626
|
-
hash=txhash, log_index=log_index, chain=chain_dbid
|
|
627
|
-
)
|
|
1377
|
+
existing_object = TreasuryTx.get(hash=txhash, log_index=log_index, chain=chain_dbid)
|
|
628
1378
|
if existing_object is None:
|
|
629
1379
|
existing_objects = list(
|
|
630
1380
|
TreasuryTx.select(
|
|
@@ -634,7 +1384,7 @@ def _validate_integrity_error(entry: LedgerEntry, log_index: int) -> typing.Opti
|
|
|
634
1384
|
)
|
|
635
1385
|
)
|
|
636
1386
|
raise ValueError(
|
|
637
|
-
f
|
|
1387
|
+
f"unable to `.get` due to multiple entries: {existing_objects}"
|
|
638
1388
|
)
|
|
639
1389
|
if entry.to_address:
|
|
640
1390
|
assert entry.to_address == existing_object.to_address.address, (
|
|
@@ -656,7 +1406,10 @@ def _validate_integrity_error(entry: LedgerEntry, log_index: int) -> typing.Opti
|
|
|
656
1406
|
existing_object.amount,
|
|
657
1407
|
)
|
|
658
1408
|
except AssertionError:
|
|
659
|
-
logger.
|
|
1409
|
+
logger.debug(
|
|
1410
|
+
"slight rounding error in value for TreasuryTx[%s] due to sqlite decimal handling",
|
|
1411
|
+
existing_object.treasury_tx_id,
|
|
1412
|
+
)
|
|
660
1413
|
assert entry.block_number == existing_object.block, (
|
|
661
1414
|
entry.block_number,
|
|
662
1415
|
existing_object.block,
|
|
@@ -671,9 +1424,31 @@ def _validate_integrity_error(entry: LedgerEntry, log_index: int) -> typing.Opti
|
|
|
671
1424
|
# NOTE All good!
|
|
672
1425
|
return (
|
|
673
1426
|
existing_object.treasury_tx_id
|
|
674
|
-
if existing_object.txgroup.txgroup_id
|
|
675
|
-
|
|
1427
|
+
if existing_object.txgroup.txgroup_id
|
|
1428
|
+
in (
|
|
1429
|
+
must_sort_inbound_txgroup_dbid,
|
|
676
1430
|
must_sort_outbound_txgroup_dbid,
|
|
677
1431
|
)
|
|
678
1432
|
else None
|
|
679
1433
|
)
|
|
1434
|
+
|
|
1435
|
+
|
|
1436
|
+
def _drop_shitcoin_txs() -> None:
|
|
1437
|
+
"""
|
|
1438
|
+
Purge any shitcoin txs from the db.
|
|
1439
|
+
|
|
1440
|
+
These should not be frequent, and only occur if a user populated the db before a shitcoin was added to the SHITCOINS mapping.
|
|
1441
|
+
"""
|
|
1442
|
+
shitcoins = eth_portfolio.SHITCOINS[CHAINID]
|
|
1443
|
+
with db_session:
|
|
1444
|
+
shitcoin_txs = select(
|
|
1445
|
+
tx for tx in TreasuryTx if tx.token.address.address in shitcoins
|
|
1446
|
+
)
|
|
1447
|
+
if count := shitcoin_txs.count():
|
|
1448
|
+
logger.info(f"Purging {count} shitcoin txs from the database...")
|
|
1449
|
+
for tx in shitcoin_txs:
|
|
1450
|
+
tx.delete()
|
|
1451
|
+
logger.info("Shitcoin tx purge complete.")
|
|
1452
|
+
|
|
1453
|
+
|
|
1454
|
+
_drop_shitcoin_txs()
|