dao-treasury 0.0.17__cp312-cp312-win32.whl → 0.0.61__cp312-cp312-win32.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dao-treasury might be problematic. Click here for more details.
- dao_treasury/.grafana/provisioning/dashboards/breakdowns/Expenses.json +526 -0
- dao_treasury/.grafana/provisioning/dashboards/breakdowns/Revenue.json +526 -0
- dao_treasury/.grafana/provisioning/dashboards/dashboards.yaml +76 -2
- dao_treasury/.grafana/provisioning/dashboards/streams/LlamaPay.json +225 -0
- dao_treasury/.grafana/provisioning/dashboards/summary/Monthly.json +13 -17
- dao_treasury/.grafana/provisioning/dashboards/transactions/Treasury Transactions.json +167 -19
- dao_treasury/.grafana/provisioning/dashboards/treasury/Cashflow (Including Unsorted).json +876 -0
- dao_treasury/.grafana/provisioning/dashboards/treasury/Cashflow.json +645 -0
- dao_treasury/.grafana/provisioning/dashboards/treasury/Current Treasury Assets.json +593 -0
- dao_treasury/.grafana/provisioning/dashboards/treasury/Historical Treasury Balances.json +2999 -0
- dao_treasury/.grafana/provisioning/dashboards/treasury/Operating Cashflow.json +513 -0
- dao_treasury/.grafana/provisioning/datasources/datasources.yaml +17 -0
- dao_treasury/ENVIRONMENT_VARIABLES.py +20 -0
- dao_treasury/__init__.py +24 -0
- dao_treasury/_docker.cp312-win32.pyd +0 -0
- dao_treasury/_docker.py +48 -23
- dao_treasury/_nicknames.cp312-win32.pyd +0 -0
- dao_treasury/_nicknames.py +32 -0
- dao_treasury/_wallet.cp312-win32.pyd +0 -0
- dao_treasury/_wallet.py +162 -10
- dao_treasury/constants.cp312-win32.pyd +0 -0
- dao_treasury/constants.py +39 -0
- dao_treasury/db.py +429 -57
- dao_treasury/docker-compose.yaml +6 -5
- dao_treasury/main.py +102 -13
- dao_treasury/sorting/__init__.cp312-win32.pyd +0 -0
- dao_treasury/sorting/__init__.py +181 -105
- dao_treasury/sorting/_matchers.cp312-win32.pyd +0 -0
- dao_treasury/sorting/_rules.cp312-win32.pyd +0 -0
- dao_treasury/sorting/_rules.py +1 -3
- dao_treasury/sorting/factory.cp312-win32.pyd +0 -0
- dao_treasury/sorting/factory.py +2 -6
- dao_treasury/sorting/rule.cp312-win32.pyd +0 -0
- dao_treasury/sorting/rule.py +16 -13
- dao_treasury/sorting/rules/__init__.cp312-win32.pyd +0 -0
- dao_treasury/sorting/rules/__init__.py +1 -0
- dao_treasury/sorting/rules/ignore/__init__.cp312-win32.pyd +0 -0
- dao_treasury/sorting/rules/ignore/__init__.py +1 -0
- dao_treasury/sorting/rules/ignore/llamapay.cp312-win32.pyd +0 -0
- dao_treasury/sorting/rules/ignore/llamapay.py +20 -0
- dao_treasury/streams/__init__.cp312-win32.pyd +0 -0
- dao_treasury/streams/__init__.py +0 -0
- dao_treasury/streams/llamapay.cp312-win32.pyd +0 -0
- dao_treasury/streams/llamapay.py +388 -0
- dao_treasury/treasury.py +75 -28
- dao_treasury/types.cp312-win32.pyd +0 -0
- dao_treasury-0.0.61.dist-info/METADATA +120 -0
- dao_treasury-0.0.61.dist-info/RECORD +54 -0
- dao_treasury-0.0.61.dist-info/top_level.txt +2 -0
- dao_treasury__mypyc.cp312-win32.pyd +0 -0
- 52b51d40e96d4333695d__mypyc.cp312-win32.pyd +0 -0
- dao_treasury/.grafana/provisioning/datasources/sqlite.yaml +0 -10
- dao_treasury-0.0.17.dist-info/METADATA +0 -36
- dao_treasury-0.0.17.dist-info/RECORD +0 -30
- dao_treasury-0.0.17.dist-info/top_level.txt +0 -2
- {dao_treasury-0.0.17.dist-info → dao_treasury-0.0.61.dist-info}/WHEEL +0 -0
dao_treasury/sorting/rule.py
CHANGED
|
@@ -31,6 +31,7 @@ See Also:
|
|
|
31
31
|
|
|
32
32
|
from collections import defaultdict
|
|
33
33
|
from dataclasses import dataclass
|
|
34
|
+
from logging import getLogger
|
|
34
35
|
from typing import (
|
|
35
36
|
TYPE_CHECKING,
|
|
36
37
|
DefaultDict,
|
|
@@ -53,6 +54,9 @@ if TYPE_CHECKING:
|
|
|
53
54
|
from dao_treasury.db import TreasuryTx
|
|
54
55
|
|
|
55
56
|
|
|
57
|
+
logger: Final = getLogger(__name__)
|
|
58
|
+
_log_debug: Final = logger.debug
|
|
59
|
+
|
|
56
60
|
SORT_RULES: DefaultDict[Type[SortRule], List[SortRule]] = defaultdict(list)
|
|
57
61
|
"""Mapping from sort rule classes to lists of instantiated rules, in creation order per class.
|
|
58
62
|
|
|
@@ -126,8 +130,6 @@ class _SortRule:
|
|
|
126
130
|
func: Optional[SortFunction] = None
|
|
127
131
|
"""Custom matching function that takes a `TreasuryTx` and returns a bool or an awaitable that returns a bool."""
|
|
128
132
|
|
|
129
|
-
# __instances__: ClassVar[List[Self]] = []
|
|
130
|
-
|
|
131
133
|
def __post_init__(self) -> None:
|
|
132
134
|
"""Validate inputs, checksum addresses, and register the rule.
|
|
133
135
|
|
|
@@ -214,6 +216,7 @@ class _SortRule:
|
|
|
214
216
|
getattr(tx, matcher) == getattr(self, matcher) for matcher in matchers
|
|
215
217
|
)
|
|
216
218
|
|
|
219
|
+
_log_debug("checking %s for %s", tx, self.func)
|
|
217
220
|
match = self.func(tx) # type: ignore [misc]
|
|
218
221
|
return match if isinstance(match, bool) else await match
|
|
219
222
|
|
|
@@ -229,8 +232,8 @@ class _InboundSortRule(_SortRule):
|
|
|
229
232
|
async def match(self, tx: "TreasuryTx") -> bool:
|
|
230
233
|
return (
|
|
231
234
|
tx.to_address is not None
|
|
232
|
-
and TreasuryWallet.
|
|
233
|
-
and await super().match(tx)
|
|
235
|
+
and TreasuryWallet.check_membership(tx.to_address.address, tx.block)
|
|
236
|
+
and await super(_InboundSortRule, self).match(tx)
|
|
234
237
|
)
|
|
235
238
|
|
|
236
239
|
|
|
@@ -243,9 +246,9 @@ class _OutboundSortRule(_SortRule):
|
|
|
243
246
|
"""
|
|
244
247
|
|
|
245
248
|
async def match(self, tx: "TreasuryTx") -> bool:
|
|
246
|
-
return TreasuryWallet.
|
|
247
|
-
tx.from_address.address
|
|
248
|
-
)
|
|
249
|
+
return TreasuryWallet.check_membership(
|
|
250
|
+
tx.from_address.address, tx.block
|
|
251
|
+
) and await super(_OutboundSortRule, self).match(tx)
|
|
249
252
|
|
|
250
253
|
|
|
251
254
|
@mypyc_attr(native_class=False)
|
|
@@ -262,7 +265,7 @@ class RevenueSortRule(_InboundSortRule):
|
|
|
262
265
|
def __post_init__(self) -> None:
|
|
263
266
|
"""Prepends `self.txgroup` with 'Revenue:'."""
|
|
264
267
|
object.__setattr__(self, "txgroup", f"Revenue:{self.txgroup}")
|
|
265
|
-
super().__post_init__()
|
|
268
|
+
super(RevenueSortRule, self).__post_init__()
|
|
266
269
|
|
|
267
270
|
|
|
268
271
|
@mypyc_attr(native_class=False)
|
|
@@ -275,7 +278,7 @@ class CostOfRevenueSortRule(_OutboundSortRule):
|
|
|
275
278
|
def __post_init__(self) -> None:
|
|
276
279
|
"""Prepends `self.txgroup` with 'Cost of Revenue:'."""
|
|
277
280
|
object.__setattr__(self, "txgroup", f"Cost of Revenue:{self.txgroup}")
|
|
278
|
-
super().__post_init__()
|
|
281
|
+
super(CostOfRevenueSortRule, self).__post_init__()
|
|
279
282
|
|
|
280
283
|
|
|
281
284
|
@mypyc_attr(native_class=False)
|
|
@@ -288,7 +291,7 @@ class ExpenseSortRule(_OutboundSortRule):
|
|
|
288
291
|
def __post_init__(self) -> None:
|
|
289
292
|
"""Prepends `self.txgroup` with 'Expenses:'."""
|
|
290
293
|
object.__setattr__(self, "txgroup", f"Expenses:{self.txgroup}")
|
|
291
|
-
super().__post_init__()
|
|
294
|
+
super(ExpenseSortRule, self).__post_init__()
|
|
292
295
|
|
|
293
296
|
|
|
294
297
|
@mypyc_attr(native_class=False)
|
|
@@ -301,7 +304,7 @@ class OtherIncomeSortRule(_InboundSortRule):
|
|
|
301
304
|
def __post_init__(self) -> None:
|
|
302
305
|
"""Prepends `self.txgroup` with 'Other Income:'."""
|
|
303
306
|
object.__setattr__(self, "txgroup", f"Other Income:{self.txgroup}")
|
|
304
|
-
super().__post_init__()
|
|
307
|
+
super(OtherIncomeSortRule, self).__post_init__()
|
|
305
308
|
|
|
306
309
|
|
|
307
310
|
@mypyc_attr(native_class=False)
|
|
@@ -314,7 +317,7 @@ class OtherExpenseSortRule(_OutboundSortRule):
|
|
|
314
317
|
def __post_init__(self) -> None:
|
|
315
318
|
"""Prepends `self.txgroup` with 'Other Expenses:'."""
|
|
316
319
|
object.__setattr__(self, "txgroup", f"Other Expenses:{self.txgroup}")
|
|
317
|
-
super().__post_init__()
|
|
320
|
+
super(OtherExpenseSortRule, self).__post_init__()
|
|
318
321
|
|
|
319
322
|
|
|
320
323
|
@mypyc_attr(native_class=False)
|
|
@@ -327,7 +330,7 @@ class IgnoreSortRule(_SortRule):
|
|
|
327
330
|
def __post_init__(self) -> None:
|
|
328
331
|
"""Prepends `self.txgroup` with 'Ignore:'."""
|
|
329
332
|
object.__setattr__(self, "txgroup", f"Ignore:{self.txgroup}")
|
|
330
|
-
super().__post_init__()
|
|
333
|
+
super(IgnoreSortRule, self).__post_init__()
|
|
331
334
|
|
|
332
335
|
|
|
333
336
|
TRule = TypeVar(
|
|
Binary file
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
from dao_treasury.sorting.rules.ignore import *
|
|
Binary file
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
from dao_treasury.sorting.rules.ignore.llamapay import *
|
|
Binary file
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
from dao_treasury import TreasuryTx
|
|
2
|
+
from dao_treasury.sorting.factory import ignore
|
|
3
|
+
from dao_treasury.streams import llamapay
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
@ignore("LlamaPay")
|
|
7
|
+
def is_llamapay_stream_replenishment(tx: TreasuryTx) -> bool:
|
|
8
|
+
if tx.to_address.address in llamapay.factories: # type: ignore [operator]
|
|
9
|
+
# We amortize these streams daily in the `llamapay` module, you'll sort each stream appropriately.
|
|
10
|
+
return True
|
|
11
|
+
|
|
12
|
+
# NOTE: not sure if we want this yet
|
|
13
|
+
# Puling unused funds back from vesting escrow / llamapay
|
|
14
|
+
# elif tx.from_address == "Contract: LlamaPay" and "StreamCancelled" in tx.events:
|
|
15
|
+
# if tx.amount > 0:
|
|
16
|
+
# tx.amount *= -1
|
|
17
|
+
# if tx.value_usd > 0:
|
|
18
|
+
# tx.value_usd *= -1
|
|
19
|
+
# return True
|
|
20
|
+
return False
|
|
Binary file
|
|
File without changes
|
|
Binary file
|
|
@@ -0,0 +1,388 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import datetime as dt
|
|
3
|
+
import decimal
|
|
4
|
+
from logging import getLogger
|
|
5
|
+
from typing import (
|
|
6
|
+
Awaitable,
|
|
7
|
+
Callable,
|
|
8
|
+
Dict,
|
|
9
|
+
Final,
|
|
10
|
+
Iterator,
|
|
11
|
+
List,
|
|
12
|
+
Optional,
|
|
13
|
+
Set,
|
|
14
|
+
final,
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
import dank_mids
|
|
18
|
+
import pony.orm
|
|
19
|
+
from a_sync import AsyncThreadPoolExecutor, igather
|
|
20
|
+
from brownie.network.event import _EventItem
|
|
21
|
+
from eth_typing import BlockNumber, ChecksumAddress, HexAddress, HexStr
|
|
22
|
+
from tqdm.asyncio import tqdm_asyncio
|
|
23
|
+
|
|
24
|
+
import y
|
|
25
|
+
from y.time import NoBlockFound, UnixTimestamp
|
|
26
|
+
from y.utils.events import decode_logs, get_logs_asap
|
|
27
|
+
|
|
28
|
+
from dao_treasury import constants
|
|
29
|
+
from dao_treasury.db import (
|
|
30
|
+
Stream,
|
|
31
|
+
StreamedFunds,
|
|
32
|
+
Address,
|
|
33
|
+
Token,
|
|
34
|
+
must_sort_outbound_txgroup_dbid,
|
|
35
|
+
)
|
|
36
|
+
from dao_treasury._wallet import TreasuryWallet
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
logger: Final = getLogger(__name__)
|
|
40
|
+
|
|
41
|
+
_UTC: Final = dt.timezone.utc
|
|
42
|
+
|
|
43
|
+
_ONE_DAY: Final = 60 * 60 * 24
|
|
44
|
+
|
|
45
|
+
_STREAMS_THREAD: Final = AsyncThreadPoolExecutor(1)
|
|
46
|
+
|
|
47
|
+
create_task: Final = asyncio.create_task
|
|
48
|
+
sleep: Final = asyncio.sleep
|
|
49
|
+
|
|
50
|
+
datetime: Final = dt.datetime
|
|
51
|
+
timedelta: Final = dt.timedelta
|
|
52
|
+
fromtimestamp: Final = datetime.fromtimestamp
|
|
53
|
+
now: Final = datetime.now
|
|
54
|
+
|
|
55
|
+
Decimal: Final = decimal.Decimal
|
|
56
|
+
|
|
57
|
+
ObjectNotFound: Final = pony.orm.ObjectNotFound
|
|
58
|
+
commit: Final = pony.orm.commit
|
|
59
|
+
db_session: Final = pony.orm.db_session
|
|
60
|
+
|
|
61
|
+
Contract: Final = y.Contract
|
|
62
|
+
Network: Final = y.Network
|
|
63
|
+
get_block_at_timestamp: Final = y.get_block_at_timestamp
|
|
64
|
+
get_price: Final = y.get_price
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
networks: Final = [Network.Mainnet]
|
|
68
|
+
|
|
69
|
+
factories: List[HexAddress] = []
|
|
70
|
+
|
|
71
|
+
if dai_stream_factory := {
|
|
72
|
+
Network.Mainnet: "0x60c7B0c5B3a4Dc8C690b074727a17fF7aA287Ff2",
|
|
73
|
+
}.get(constants.CHAINID):
|
|
74
|
+
factories.append(dai_stream_factory)
|
|
75
|
+
|
|
76
|
+
if yfi_stream_factory := {
|
|
77
|
+
Network.Mainnet: "0xf3764eC89B1ad20A31ed633b1466363FAc1741c4",
|
|
78
|
+
}.get(constants.CHAINID):
|
|
79
|
+
factories.append(yfi_stream_factory)
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def _generate_dates(
|
|
83
|
+
start: dt.datetime, end: dt.datetime, stop_at_today: bool = True
|
|
84
|
+
) -> Iterator[dt.datetime]:
|
|
85
|
+
current = start
|
|
86
|
+
while current < end:
|
|
87
|
+
yield current
|
|
88
|
+
current += timedelta(days=1)
|
|
89
|
+
if stop_at_today and current.date() > now(_UTC).date():
|
|
90
|
+
break
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
_StreamToStart = Callable[[HexStr, Optional[BlockNumber]], Awaitable[int]]
|
|
94
|
+
|
|
95
|
+
_streamToStart_cache: Final[Dict[HexStr, _StreamToStart]] = {}
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def _get_streamToStart(stream_id: HexStr) -> _StreamToStart:
|
|
99
|
+
if streamToStart := _streamToStart_cache.get(stream_id):
|
|
100
|
+
return streamToStart
|
|
101
|
+
with db_session:
|
|
102
|
+
contract: y.Contract = Stream[stream_id].contract.contract # type: ignore [misc]
|
|
103
|
+
streamToStart = contract.streamToStart.coroutine
|
|
104
|
+
_streamToStart_cache[stream_id] = streamToStart
|
|
105
|
+
return streamToStart
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
async def _get_start_timestamp(
|
|
109
|
+
stream_id: HexStr, block: Optional[BlockNumber] = None
|
|
110
|
+
) -> int:
|
|
111
|
+
streamToStart = _streamToStart_cache.get(stream_id)
|
|
112
|
+
if streamToStart is None:
|
|
113
|
+
streamToStart = await _STREAMS_THREAD.run(_get_streamToStart, stream_id)
|
|
114
|
+
# try:
|
|
115
|
+
return int(await streamToStart(f"0x{stream_id}", block_identifier=block)) # type: ignore [call-arg]
|
|
116
|
+
# except Exception:
|
|
117
|
+
# return 0
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
def _pause_stream(stream_id: HexStr) -> None:
|
|
121
|
+
with db_session:
|
|
122
|
+
Stream[stream_id].pause() # type: ignore [misc]
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
def _stop_stream(stream_id: str, block: BlockNumber) -> None:
|
|
126
|
+
with db_session:
|
|
127
|
+
Stream[stream_id].stop_stream(block) # type: ignore [misc]
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
_block_timestamps: Final[Dict[BlockNumber, UnixTimestamp]] = {}
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
async def _get_block_timestamp(block: BlockNumber) -> UnixTimestamp:
|
|
134
|
+
if timestamp := _block_timestamps.get(block):
|
|
135
|
+
return timestamp
|
|
136
|
+
timestamp = await dank_mids.eth.get_block_timestamp(block)
|
|
137
|
+
_block_timestamps[block] = timestamp
|
|
138
|
+
return timestamp
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
"""
|
|
142
|
+
class _StreamProcessor(ABC):
|
|
143
|
+
@abstractmethod
|
|
144
|
+
async def _load_streams(self) -> None:
|
|
145
|
+
...
|
|
146
|
+
"""
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
@final
|
|
150
|
+
class LlamaPayProcessor:
|
|
151
|
+
"""
|
|
152
|
+
Generalized async processor for DAO stream contracts.
|
|
153
|
+
Args are passed in at construction time.
|
|
154
|
+
Supports time-bounded admin periods for filtering.
|
|
155
|
+
"""
|
|
156
|
+
|
|
157
|
+
handled_events: Final = (
|
|
158
|
+
"StreamCreated",
|
|
159
|
+
"StreamCreatedWithReason",
|
|
160
|
+
"StreamModified",
|
|
161
|
+
"StreamPaused",
|
|
162
|
+
"StreamCancelled",
|
|
163
|
+
)
|
|
164
|
+
skipped_events: Final = (
|
|
165
|
+
"PayerDeposit",
|
|
166
|
+
"PayerWithdraw",
|
|
167
|
+
"Withdraw",
|
|
168
|
+
)
|
|
169
|
+
|
|
170
|
+
def __init__(self) -> None:
|
|
171
|
+
self.stream_contracts: Final = {Contract(addr) for addr in factories}
|
|
172
|
+
|
|
173
|
+
async def _get_streams(self) -> None:
|
|
174
|
+
await igather(
|
|
175
|
+
self._load_contract_events(stream_contract)
|
|
176
|
+
for stream_contract in self.stream_contracts
|
|
177
|
+
)
|
|
178
|
+
|
|
179
|
+
async def _load_contract_events(self, stream_contract: y.Contract) -> None:
|
|
180
|
+
events = decode_logs(
|
|
181
|
+
await get_logs_asap(stream_contract.address, None, sync=False)
|
|
182
|
+
)
|
|
183
|
+
keys: Set[str] = set(events.keys())
|
|
184
|
+
for k in keys:
|
|
185
|
+
if k not in self.handled_events and k not in self.skipped_events:
|
|
186
|
+
raise NotImplementedError(f"Need to handle event: {k}")
|
|
187
|
+
|
|
188
|
+
if "StreamCreated" in keys:
|
|
189
|
+
for event in events["StreamCreated"]:
|
|
190
|
+
from_address, *_ = event.values()
|
|
191
|
+
from_address = Address.get_or_insert(from_address).address
|
|
192
|
+
if not TreasuryWallet.check_membership(
|
|
193
|
+
from_address, event.block_number
|
|
194
|
+
):
|
|
195
|
+
continue
|
|
196
|
+
await _STREAMS_THREAD.run(self._get_stream, event)
|
|
197
|
+
|
|
198
|
+
if "StreamCreatedWithReason" in keys:
|
|
199
|
+
for event in events["StreamCreatedWithReason"]:
|
|
200
|
+
from_address, *_ = event.values()
|
|
201
|
+
from_address = Address.get_or_insert(from_address).address
|
|
202
|
+
if not TreasuryWallet.check_membership(
|
|
203
|
+
from_address, event.block_number
|
|
204
|
+
):
|
|
205
|
+
continue
|
|
206
|
+
await _STREAMS_THREAD.run(self._get_stream, event)
|
|
207
|
+
|
|
208
|
+
if "StreamModified" in keys:
|
|
209
|
+
for event in events["StreamModified"]:
|
|
210
|
+
from_address, _, _, old_stream_id, *_ = event.values()
|
|
211
|
+
if not TreasuryWallet.check_membership(
|
|
212
|
+
from_address, event.block_number
|
|
213
|
+
):
|
|
214
|
+
continue
|
|
215
|
+
await _STREAMS_THREAD.run(
|
|
216
|
+
_stop_stream, old_stream_id.hex(), event.block_number
|
|
217
|
+
)
|
|
218
|
+
await _STREAMS_THREAD.run(self._get_stream, event)
|
|
219
|
+
|
|
220
|
+
if "StreamPaused" in keys:
|
|
221
|
+
for event in events["StreamPaused"]:
|
|
222
|
+
from_address, *_, stream_id = event.values()
|
|
223
|
+
if not TreasuryWallet.check_membership(
|
|
224
|
+
from_address, event.block_number
|
|
225
|
+
):
|
|
226
|
+
continue
|
|
227
|
+
await _STREAMS_THREAD.run(_pause_stream, stream_id.hex())
|
|
228
|
+
|
|
229
|
+
if "StreamCancelled" in keys:
|
|
230
|
+
for event in events["StreamCancelled"]:
|
|
231
|
+
from_address, *_, stream_id = event.values()
|
|
232
|
+
if not TreasuryWallet.check_membership(
|
|
233
|
+
from_address, event.block_number
|
|
234
|
+
):
|
|
235
|
+
continue
|
|
236
|
+
await _STREAMS_THREAD.run(
|
|
237
|
+
_stop_stream, stream_id.hex(), event.block_number
|
|
238
|
+
)
|
|
239
|
+
|
|
240
|
+
def _get_stream(self, log: _EventItem) -> Stream:
|
|
241
|
+
with db_session:
|
|
242
|
+
if log.name == "StreamCreated":
|
|
243
|
+
from_address, to_address, amount_per_second, stream_id = log.values()
|
|
244
|
+
reason = None
|
|
245
|
+
elif log.name == "StreamCreatedWithReason":
|
|
246
|
+
from_address, to_address, amount_per_second, stream_id, reason = (
|
|
247
|
+
log.values()
|
|
248
|
+
)
|
|
249
|
+
elif log.name == "StreamModified":
|
|
250
|
+
(
|
|
251
|
+
from_address,
|
|
252
|
+
_,
|
|
253
|
+
_,
|
|
254
|
+
old_stream_id,
|
|
255
|
+
to_address,
|
|
256
|
+
amount_per_second,
|
|
257
|
+
stream_id,
|
|
258
|
+
) = log.values()
|
|
259
|
+
reason = Stream[old_stream_id.hex()].reason # type: ignore [misc]
|
|
260
|
+
else:
|
|
261
|
+
raise NotImplementedError("This is not an appropriate event log.")
|
|
262
|
+
|
|
263
|
+
stream_id_hex = stream_id.hex()
|
|
264
|
+
try:
|
|
265
|
+
return Stream[stream_id_hex] # type: ignore [misc]
|
|
266
|
+
except ObjectNotFound:
|
|
267
|
+
entity = Stream(
|
|
268
|
+
stream_id=stream_id_hex,
|
|
269
|
+
contract=Address.get_dbid(log.address),
|
|
270
|
+
start_block=log.block_number,
|
|
271
|
+
token=Token.get_dbid(Contract(log.address).token()),
|
|
272
|
+
from_address=Address.get_dbid(from_address),
|
|
273
|
+
to_address=Address.get_dbid(to_address),
|
|
274
|
+
amount_per_second=amount_per_second,
|
|
275
|
+
txgroup=must_sort_outbound_txgroup_dbid,
|
|
276
|
+
)
|
|
277
|
+
if reason is not None:
|
|
278
|
+
entity.reason = reason
|
|
279
|
+
commit()
|
|
280
|
+
return entity
|
|
281
|
+
|
|
282
|
+
def streams_for_recipient(
|
|
283
|
+
self, recipient: ChecksumAddress, at_block: Optional[BlockNumber] = None
|
|
284
|
+
) -> List[Stream]:
|
|
285
|
+
with db_session:
|
|
286
|
+
streams = Stream.select(lambda s: s.to_address.address == recipient)
|
|
287
|
+
if at_block is None:
|
|
288
|
+
return list(streams)
|
|
289
|
+
return [
|
|
290
|
+
s for s in streams if (s.end_block is None or at_block <= s.end_block)
|
|
291
|
+
]
|
|
292
|
+
|
|
293
|
+
def streams_for_token(
|
|
294
|
+
self, token: ChecksumAddress, include_inactive: bool = False
|
|
295
|
+
) -> List[Stream]:
|
|
296
|
+
with db_session:
|
|
297
|
+
streams = Stream.select(lambda s: s.token.address.address == token)
|
|
298
|
+
return (
|
|
299
|
+
list(streams)
|
|
300
|
+
if include_inactive
|
|
301
|
+
else [s for s in streams if s.is_alive]
|
|
302
|
+
)
|
|
303
|
+
|
|
304
|
+
async def process_streams(self, run_forever: bool = False) -> None:
|
|
305
|
+
logger.info("Processing stream events and streamed funds...")
|
|
306
|
+
# Always sync events before processing
|
|
307
|
+
await self._get_streams()
|
|
308
|
+
with db_session:
|
|
309
|
+
streams = [s.stream_id for s in Stream.select()]
|
|
310
|
+
await tqdm_asyncio.gather(
|
|
311
|
+
*(
|
|
312
|
+
self.process_stream(stream_id, run_forever=run_forever)
|
|
313
|
+
for stream_id in streams
|
|
314
|
+
),
|
|
315
|
+
desc="LlamaPay Streams",
|
|
316
|
+
)
|
|
317
|
+
|
|
318
|
+
async def process_stream(
|
|
319
|
+
self, stream_id: HexStr, run_forever: bool = False
|
|
320
|
+
) -> None:
|
|
321
|
+
start, end = await _STREAMS_THREAD.run(Stream._get_start_and_end, stream_id)
|
|
322
|
+
for date_obj in _generate_dates(start, end, stop_at_today=not run_forever):
|
|
323
|
+
if await self.process_stream_for_date(stream_id, date_obj) is None:
|
|
324
|
+
return
|
|
325
|
+
|
|
326
|
+
async def process_stream_for_date(
|
|
327
|
+
self, stream_id: HexStr, date_obj: dt.datetime
|
|
328
|
+
) -> Optional[StreamedFunds]:
|
|
329
|
+
entity = await _STREAMS_THREAD.run(
|
|
330
|
+
StreamedFunds.get_entity, stream_id, date_obj
|
|
331
|
+
)
|
|
332
|
+
if entity:
|
|
333
|
+
return entity
|
|
334
|
+
|
|
335
|
+
stream_token, start_date = await _STREAMS_THREAD.run(
|
|
336
|
+
Stream._get_token_and_start_date, stream_id
|
|
337
|
+
)
|
|
338
|
+
check_at = date_obj + timedelta(days=1) - timedelta(seconds=1)
|
|
339
|
+
if check_at > now(tz=_UTC):
|
|
340
|
+
await sleep((check_at - now(tz=_UTC)).total_seconds())
|
|
341
|
+
|
|
342
|
+
while True:
|
|
343
|
+
try:
|
|
344
|
+
block = await get_block_at_timestamp(check_at, sync=False)
|
|
345
|
+
except NoBlockFound:
|
|
346
|
+
sleep_time = (check_at - now(tz=_UTC)).total_seconds()
|
|
347
|
+
logger.debug(
|
|
348
|
+
"no block found for %s, sleeping %ss", check_at, sleep_time
|
|
349
|
+
)
|
|
350
|
+
await sleep(sleep_time)
|
|
351
|
+
else:
|
|
352
|
+
break
|
|
353
|
+
|
|
354
|
+
price_fut = create_task(get_price(stream_token, block, sync=False))
|
|
355
|
+
start_timestamp = await _get_start_timestamp(stream_id, block)
|
|
356
|
+
if start_timestamp == 0:
|
|
357
|
+
if await _STREAMS_THREAD.run(Stream.check_closed, stream_id):
|
|
358
|
+
price_fut.cancel()
|
|
359
|
+
return None
|
|
360
|
+
|
|
361
|
+
while start_timestamp == 0:
|
|
362
|
+
block -= 1
|
|
363
|
+
start_timestamp = await _get_start_timestamp(stream_id, block)
|
|
364
|
+
|
|
365
|
+
block_datetime = fromtimestamp(await _get_block_timestamp(block), tz=_UTC)
|
|
366
|
+
assert block_datetime.date() == date_obj.date()
|
|
367
|
+
seconds_active = (check_at - block_datetime).seconds
|
|
368
|
+
is_last_day = True
|
|
369
|
+
else:
|
|
370
|
+
seconds_active = int(check_at.timestamp()) - start_timestamp
|
|
371
|
+
is_last_day = False
|
|
372
|
+
|
|
373
|
+
seconds_active_today = min(seconds_active, _ONE_DAY)
|
|
374
|
+
if seconds_active_today < _ONE_DAY and not is_last_day:
|
|
375
|
+
if date_obj.date() != start_date:
|
|
376
|
+
seconds_active_today = _ONE_DAY
|
|
377
|
+
|
|
378
|
+
with db_session:
|
|
379
|
+
price = Decimal(await price_fut)
|
|
380
|
+
entity = await _STREAMS_THREAD.run(
|
|
381
|
+
StreamedFunds.create_entity,
|
|
382
|
+
stream_id,
|
|
383
|
+
date_obj,
|
|
384
|
+
price,
|
|
385
|
+
seconds_active_today,
|
|
386
|
+
is_last_day,
|
|
387
|
+
)
|
|
388
|
+
return entity
|
dao_treasury/treasury.py
CHANGED
|
@@ -1,4 +1,21 @@
|
|
|
1
|
-
|
|
1
|
+
"""Treasury orchestration and analytics interface.
|
|
2
|
+
|
|
3
|
+
This module defines the Treasury class, which aggregates DAO wallets, sets up
|
|
4
|
+
sorting rules, and manages transaction ingestion and streaming analytics.
|
|
5
|
+
It coordinates the end-to-end flow from wallet configuration to database
|
|
6
|
+
population and dashboard analytics.
|
|
7
|
+
|
|
8
|
+
Key Responsibilities:
|
|
9
|
+
- Aggregate and manage DAO-controlled wallets.
|
|
10
|
+
- Ingest and process on-chain transactions.
|
|
11
|
+
- Apply sorting/categorization rules.
|
|
12
|
+
- Integrate with streaming protocols (e.g., LlamaPay).
|
|
13
|
+
- Populate the database for analytics and dashboards.
|
|
14
|
+
|
|
15
|
+
This is the main entry point for orchestrating DAO treasury analytics.
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
from asyncio import create_task, gather
|
|
2
19
|
from logging import getLogger
|
|
3
20
|
from pathlib import Path
|
|
4
21
|
from typing import Final, Iterable, List, Optional, Union
|
|
@@ -9,11 +26,14 @@ from eth_typing import BlockNumber
|
|
|
9
26
|
from eth_portfolio.structs import LedgerEntry
|
|
10
27
|
from eth_portfolio.typing import PortfolioBalances
|
|
11
28
|
from eth_portfolio_scripts._portfolio import ExportablePortfolio
|
|
29
|
+
from pony.orm import db_session
|
|
12
30
|
from tqdm.asyncio import tqdm_asyncio
|
|
13
31
|
|
|
14
32
|
from dao_treasury._wallet import TreasuryWallet
|
|
33
|
+
from dao_treasury.constants import CHAINID
|
|
15
34
|
from dao_treasury.db import TreasuryTx
|
|
16
35
|
from dao_treasury.sorting._rules import Rules
|
|
36
|
+
from dao_treasury.streams import llamapay
|
|
17
37
|
|
|
18
38
|
|
|
19
39
|
Wallet = Union[TreasuryWallet, str]
|
|
@@ -56,20 +76,22 @@ class Treasury(a_sync.ASyncGenericBase): # type: ignore [misc]
|
|
|
56
76
|
TypeError: If any item in `wallets` is not a str or TreasuryWallet.
|
|
57
77
|
|
|
58
78
|
Examples:
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
79
|
+
.. code-block:: python
|
|
80
|
+
|
|
81
|
+
# Create a synchronous Treasury
|
|
82
|
+
treasury = Treasury(
|
|
83
|
+
wallets=["0xAbc123...", TreasuryWallet("0xDef456...", start_block=1000)],
|
|
84
|
+
sort_rules=Path("/path/to/rules"),
|
|
85
|
+
start_block=500,
|
|
86
|
+
label="DAO Treasury",
|
|
87
|
+
asynchronous=False
|
|
88
|
+
)
|
|
89
|
+
|
|
90
|
+
# Create an asynchronous Treasury
|
|
91
|
+
treasury_async = Treasury(
|
|
92
|
+
wallets=["0xAbc123..."],
|
|
93
|
+
asynchronous=True
|
|
94
|
+
)
|
|
73
95
|
"""
|
|
74
96
|
global TREASURY
|
|
75
97
|
if TREASURY is not None:
|
|
@@ -94,7 +116,11 @@ class Treasury(a_sync.ASyncGenericBase): # type: ignore [misc]
|
|
|
94
116
|
self.sort_rules: Final = Rules(sort_rules) if sort_rules else None
|
|
95
117
|
|
|
96
118
|
self.portfolio: Final = ExportablePortfolio(
|
|
97
|
-
addresses=(
|
|
119
|
+
addresses=(
|
|
120
|
+
wallet.address
|
|
121
|
+
for wallet in self.wallets
|
|
122
|
+
if wallet.networks is None or CHAINID in wallet.networks
|
|
123
|
+
),
|
|
98
124
|
start_block=start_block,
|
|
99
125
|
label=label,
|
|
100
126
|
load_prices=True,
|
|
@@ -102,6 +128,10 @@ class Treasury(a_sync.ASyncGenericBase): # type: ignore [misc]
|
|
|
102
128
|
)
|
|
103
129
|
"""An eth_portfolio.Portfolio object used for exporting tx and balance history"""
|
|
104
130
|
|
|
131
|
+
self._llamapay: Final = (
|
|
132
|
+
llamapay.LlamaPayProcessor() if CHAINID in llamapay.networks else None
|
|
133
|
+
)
|
|
134
|
+
|
|
105
135
|
self.asynchronous: Final = asynchronous
|
|
106
136
|
"""A boolean flag indicating whether the API for this `Treasury` object is sync or async by default"""
|
|
107
137
|
|
|
@@ -114,7 +144,7 @@ class Treasury(a_sync.ASyncGenericBase): # type: ignore [misc]
|
|
|
114
144
|
def txs(self) -> a_sync.ASyncIterator[LedgerEntry]:
|
|
115
145
|
return self.portfolio.ledger.all_entries
|
|
116
146
|
|
|
117
|
-
async def
|
|
147
|
+
async def _insert_txs(
|
|
118
148
|
self, start_block: BlockNumber, end_block: BlockNumber
|
|
119
149
|
) -> None:
|
|
120
150
|
"""Populate the database with treasury transactions in a block range.
|
|
@@ -130,15 +160,32 @@ class Treasury(a_sync.ASyncGenericBase): # type: ignore [misc]
|
|
|
130
160
|
|
|
131
161
|
Examples:
|
|
132
162
|
>>> # Insert transactions from block 0 to 10000
|
|
133
|
-
>>> await treasury.
|
|
163
|
+
>>> await treasury._insert_txs(0, 10000)
|
|
164
|
+
"""
|
|
165
|
+
with db_session:
|
|
166
|
+
futs = []
|
|
167
|
+
async for entry in self.portfolio.ledger[start_block:end_block]:
|
|
168
|
+
if not entry.value:
|
|
169
|
+
# TODO: add an arg in eth-port to skip 0 value
|
|
170
|
+
logger.debug("zero value transfer, skipping %s", entry)
|
|
171
|
+
continue
|
|
172
|
+
futs.append(create_task(TreasuryTx.insert(entry)))
|
|
173
|
+
if futs:
|
|
174
|
+
await tqdm_asyncio.gather(*futs, desc="Insert Txs to Postgres")
|
|
175
|
+
logger.info(f"{len(futs)} transfers exported")
|
|
176
|
+
|
|
177
|
+
async def _process_streams(self) -> None:
|
|
178
|
+
if self._llamapay is not None:
|
|
179
|
+
await self._llamapay.process_streams(run_forever=True)
|
|
180
|
+
|
|
181
|
+
async def populate_db(
|
|
182
|
+
self, start_block: BlockNumber, end_block: BlockNumber
|
|
183
|
+
) -> None:
|
|
184
|
+
"""
|
|
185
|
+
Populate the database with treasury transactions and streams in parallel.
|
|
134
186
|
"""
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
continue
|
|
141
|
-
futs.append(create_task(TreasuryTx.insert(entry)))
|
|
142
|
-
|
|
143
|
-
if futs:
|
|
144
|
-
await tqdm_asyncio.gather(*futs, desc="Insert Txs to Postgres")
|
|
187
|
+
tasks = [self._insert_txs(start_block, end_block)]
|
|
188
|
+
if self._llamapay:
|
|
189
|
+
tasks.append(self._process_streams())
|
|
190
|
+
await gather(*tasks)
|
|
191
|
+
logger.info("db connection closed")
|
|
Binary file
|