dao-treasury 0.0.22__cp310-cp310-macosx_11_0_arm64.whl → 0.0.69__cp310-cp310-macosx_11_0_arm64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dao_treasury/.grafana/provisioning/dashboards/breakdowns/Expenses.json +551 -0
- dao_treasury/.grafana/provisioning/dashboards/breakdowns/Revenue.json +551 -0
- dao_treasury/.grafana/provisioning/dashboards/dashboards.yaml +7 -7
- dao_treasury/.grafana/provisioning/dashboards/streams/LlamaPay.json +220 -0
- dao_treasury/.grafana/provisioning/dashboards/summary/Monthly.json +18 -23
- dao_treasury/.grafana/provisioning/dashboards/transactions/Treasury Transactions.json +181 -29
- dao_treasury/.grafana/provisioning/dashboards/treasury/Cashflow (Including Unsorted).json +808 -0
- dao_treasury/.grafana/provisioning/dashboards/treasury/Cashflow.json +602 -0
- dao_treasury/.grafana/provisioning/dashboards/treasury/Current Treasury Assets.json +1009 -0
- dao_treasury/.grafana/provisioning/dashboards/treasury/Historical Treasury Balances.json +2989 -0
- dao_treasury/.grafana/provisioning/dashboards/treasury/Operating Cashflow.json +478 -0
- dao_treasury/.grafana/provisioning/datasources/datasources.yaml +17 -0
- dao_treasury/ENVIRONMENT_VARIABLES.py +20 -0
- dao_treasury/__init__.py +20 -0
- dao_treasury/_docker.cpython-310-darwin.so +0 -0
- dao_treasury/_docker.py +67 -38
- dao_treasury/_nicknames.cpython-310-darwin.so +0 -0
- dao_treasury/_nicknames.py +24 -2
- dao_treasury/_wallet.cpython-310-darwin.so +0 -0
- dao_treasury/_wallet.py +157 -16
- dao_treasury/constants.cpython-310-darwin.so +0 -0
- dao_treasury/constants.py +39 -0
- dao_treasury/db.py +384 -45
- dao_treasury/docker-compose.yaml +6 -5
- dao_treasury/main.py +86 -17
- dao_treasury/sorting/__init__.cpython-310-darwin.so +0 -0
- dao_treasury/sorting/__init__.py +171 -42
- dao_treasury/sorting/_matchers.cpython-310-darwin.so +0 -0
- dao_treasury/sorting/_rules.cpython-310-darwin.so +0 -0
- dao_treasury/sorting/_rules.py +1 -3
- dao_treasury/sorting/factory.cpython-310-darwin.so +0 -0
- dao_treasury/sorting/factory.py +2 -6
- dao_treasury/sorting/rule.cpython-310-darwin.so +0 -0
- dao_treasury/sorting/rule.py +13 -10
- dao_treasury/sorting/rules/__init__.cpython-310-darwin.so +0 -0
- dao_treasury/sorting/rules/__init__.py +1 -0
- dao_treasury/sorting/rules/ignore/__init__.cpython-310-darwin.so +0 -0
- dao_treasury/sorting/rules/ignore/__init__.py +1 -0
- dao_treasury/sorting/rules/ignore/llamapay.cpython-310-darwin.so +0 -0
- dao_treasury/sorting/rules/ignore/llamapay.py +20 -0
- dao_treasury/streams/__init__.cpython-310-darwin.so +0 -0
- dao_treasury/streams/__init__.py +0 -0
- dao_treasury/streams/llamapay.cpython-310-darwin.so +0 -0
- dao_treasury/streams/llamapay.py +388 -0
- dao_treasury/treasury.py +75 -28
- dao_treasury/types.cpython-310-darwin.so +0 -0
- dao_treasury-0.0.69.dist-info/METADATA +120 -0
- dao_treasury-0.0.69.dist-info/RECORD +54 -0
- dao_treasury-0.0.69.dist-info/top_level.txt +2 -0
- dao_treasury__mypyc.cpython-310-darwin.so +0 -0
- 52b51d40e96d4333695d__mypyc.cpython-310-darwin.so +0 -0
- dao_treasury/.grafana/provisioning/datasources/sqlite.yaml +0 -10
- dao_treasury-0.0.22.dist-info/METADATA +0 -63
- dao_treasury-0.0.22.dist-info/RECORD +0 -31
- dao_treasury-0.0.22.dist-info/top_level.txt +0 -2
- {dao_treasury-0.0.22.dist-info → dao_treasury-0.0.69.dist-info}/WHEEL +0 -0
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
from dao_treasury import TreasuryTx
|
|
2
|
+
from dao_treasury.sorting.factory import ignore
|
|
3
|
+
from dao_treasury.streams import llamapay
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
@ignore("LlamaPay")
|
|
7
|
+
def is_llamapay_stream_replenishment(tx: TreasuryTx) -> bool:
|
|
8
|
+
if tx.to_address.address in llamapay.factories: # type: ignore [operator]
|
|
9
|
+
# We amortize these streams daily in the `llamapay` module, you'll sort each stream appropriately.
|
|
10
|
+
return True
|
|
11
|
+
|
|
12
|
+
# NOTE: not sure if we want this yet
|
|
13
|
+
# Puling unused funds back from vesting escrow / llamapay
|
|
14
|
+
# elif tx.from_address == "Contract: LlamaPay" and "StreamCancelled" in tx.events:
|
|
15
|
+
# if tx.amount > 0:
|
|
16
|
+
# tx.amount *= -1
|
|
17
|
+
# if tx.value_usd > 0:
|
|
18
|
+
# tx.value_usd *= -1
|
|
19
|
+
# return True
|
|
20
|
+
return False
|
|
Binary file
|
|
File without changes
|
|
Binary file
|
|
@@ -0,0 +1,388 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import datetime as dt
|
|
3
|
+
import decimal
|
|
4
|
+
from logging import getLogger
|
|
5
|
+
from typing import (
|
|
6
|
+
Awaitable,
|
|
7
|
+
Callable,
|
|
8
|
+
Dict,
|
|
9
|
+
Final,
|
|
10
|
+
Iterator,
|
|
11
|
+
List,
|
|
12
|
+
Optional,
|
|
13
|
+
Set,
|
|
14
|
+
final,
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
import dank_mids
|
|
18
|
+
import pony.orm
|
|
19
|
+
from a_sync import AsyncThreadPoolExecutor, igather
|
|
20
|
+
from brownie.network.event import _EventItem
|
|
21
|
+
from eth_typing import BlockNumber, ChecksumAddress, HexAddress, HexStr
|
|
22
|
+
from tqdm.asyncio import tqdm_asyncio
|
|
23
|
+
|
|
24
|
+
import y
|
|
25
|
+
from y.time import NoBlockFound, UnixTimestamp
|
|
26
|
+
from y.utils.events import decode_logs, get_logs_asap
|
|
27
|
+
|
|
28
|
+
from dao_treasury import constants
|
|
29
|
+
from dao_treasury.db import (
|
|
30
|
+
Stream,
|
|
31
|
+
StreamedFunds,
|
|
32
|
+
Address,
|
|
33
|
+
Token,
|
|
34
|
+
must_sort_outbound_txgroup_dbid,
|
|
35
|
+
)
|
|
36
|
+
from dao_treasury._wallet import TreasuryWallet
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
logger: Final = getLogger(__name__)
|
|
40
|
+
|
|
41
|
+
_UTC: Final = dt.timezone.utc
|
|
42
|
+
|
|
43
|
+
_ONE_DAY: Final = 60 * 60 * 24
|
|
44
|
+
|
|
45
|
+
_STREAMS_THREAD: Final = AsyncThreadPoolExecutor(1)
|
|
46
|
+
|
|
47
|
+
create_task: Final = asyncio.create_task
|
|
48
|
+
sleep: Final = asyncio.sleep
|
|
49
|
+
|
|
50
|
+
datetime: Final = dt.datetime
|
|
51
|
+
timedelta: Final = dt.timedelta
|
|
52
|
+
fromtimestamp: Final = datetime.fromtimestamp
|
|
53
|
+
now: Final = datetime.now
|
|
54
|
+
|
|
55
|
+
Decimal: Final = decimal.Decimal
|
|
56
|
+
|
|
57
|
+
ObjectNotFound: Final = pony.orm.ObjectNotFound
|
|
58
|
+
commit: Final = pony.orm.commit
|
|
59
|
+
db_session: Final = pony.orm.db_session
|
|
60
|
+
|
|
61
|
+
Contract: Final = y.Contract
|
|
62
|
+
Network: Final = y.Network
|
|
63
|
+
get_block_at_timestamp: Final = y.get_block_at_timestamp
|
|
64
|
+
get_price: Final = y.get_price
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
networks: Final = [Network.Mainnet]
|
|
68
|
+
|
|
69
|
+
factories: List[HexAddress] = []
|
|
70
|
+
|
|
71
|
+
if dai_stream_factory := {
|
|
72
|
+
Network.Mainnet: "0x60c7B0c5B3a4Dc8C690b074727a17fF7aA287Ff2",
|
|
73
|
+
}.get(constants.CHAINID):
|
|
74
|
+
factories.append(dai_stream_factory)
|
|
75
|
+
|
|
76
|
+
if yfi_stream_factory := {
|
|
77
|
+
Network.Mainnet: "0xf3764eC89B1ad20A31ed633b1466363FAc1741c4",
|
|
78
|
+
}.get(constants.CHAINID):
|
|
79
|
+
factories.append(yfi_stream_factory)
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def _generate_dates(
|
|
83
|
+
start: dt.datetime, end: dt.datetime, stop_at_today: bool = True
|
|
84
|
+
) -> Iterator[dt.datetime]:
|
|
85
|
+
current = start
|
|
86
|
+
while current < end:
|
|
87
|
+
yield current
|
|
88
|
+
current += timedelta(days=1)
|
|
89
|
+
if stop_at_today and current.date() > now(_UTC).date():
|
|
90
|
+
break
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
_StreamToStart = Callable[[HexStr, Optional[BlockNumber]], Awaitable[int]]
|
|
94
|
+
|
|
95
|
+
_streamToStart_cache: Final[Dict[HexStr, _StreamToStart]] = {}
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def _get_streamToStart(stream_id: HexStr) -> _StreamToStart:
|
|
99
|
+
if streamToStart := _streamToStart_cache.get(stream_id):
|
|
100
|
+
return streamToStart
|
|
101
|
+
with db_session:
|
|
102
|
+
contract: y.Contract = Stream[stream_id].contract.contract # type: ignore [misc]
|
|
103
|
+
streamToStart = contract.streamToStart.coroutine
|
|
104
|
+
_streamToStart_cache[stream_id] = streamToStart
|
|
105
|
+
return streamToStart
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
async def _get_start_timestamp(
|
|
109
|
+
stream_id: HexStr, block: Optional[BlockNumber] = None
|
|
110
|
+
) -> int:
|
|
111
|
+
streamToStart = _streamToStart_cache.get(stream_id)
|
|
112
|
+
if streamToStart is None:
|
|
113
|
+
streamToStart = await _STREAMS_THREAD.run(_get_streamToStart, stream_id)
|
|
114
|
+
# try:
|
|
115
|
+
return int(await streamToStart(f"0x{stream_id}", block_identifier=block)) # type: ignore [call-arg]
|
|
116
|
+
# except Exception:
|
|
117
|
+
# return 0
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
def _pause_stream(stream_id: HexStr) -> None:
|
|
121
|
+
with db_session:
|
|
122
|
+
Stream[stream_id].pause() # type: ignore [misc]
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
def _stop_stream(stream_id: str, block: BlockNumber) -> None:
|
|
126
|
+
with db_session:
|
|
127
|
+
Stream[stream_id].stop_stream(block) # type: ignore [misc]
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
_block_timestamps: Final[Dict[BlockNumber, UnixTimestamp]] = {}
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
async def _get_block_timestamp(block: BlockNumber) -> UnixTimestamp:
|
|
134
|
+
if timestamp := _block_timestamps.get(block):
|
|
135
|
+
return timestamp
|
|
136
|
+
timestamp = await dank_mids.eth.get_block_timestamp(block)
|
|
137
|
+
_block_timestamps[block] = timestamp
|
|
138
|
+
return timestamp
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
"""
|
|
142
|
+
class _StreamProcessor(ABC):
|
|
143
|
+
@abstractmethod
|
|
144
|
+
async def _load_streams(self) -> None:
|
|
145
|
+
...
|
|
146
|
+
"""
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
@final
|
|
150
|
+
class LlamaPayProcessor:
|
|
151
|
+
"""
|
|
152
|
+
Generalized async processor for DAO stream contracts.
|
|
153
|
+
Args are passed in at construction time.
|
|
154
|
+
Supports time-bounded admin periods for filtering.
|
|
155
|
+
"""
|
|
156
|
+
|
|
157
|
+
handled_events: Final = (
|
|
158
|
+
"StreamCreated",
|
|
159
|
+
"StreamCreatedWithReason",
|
|
160
|
+
"StreamModified",
|
|
161
|
+
"StreamPaused",
|
|
162
|
+
"StreamCancelled",
|
|
163
|
+
)
|
|
164
|
+
skipped_events: Final = (
|
|
165
|
+
"PayerDeposit",
|
|
166
|
+
"PayerWithdraw",
|
|
167
|
+
"Withdraw",
|
|
168
|
+
)
|
|
169
|
+
|
|
170
|
+
def __init__(self) -> None:
|
|
171
|
+
self.stream_contracts: Final = {Contract(addr) for addr in factories}
|
|
172
|
+
|
|
173
|
+
async def _get_streams(self) -> None:
|
|
174
|
+
await igather(
|
|
175
|
+
self._load_contract_events(stream_contract)
|
|
176
|
+
for stream_contract in self.stream_contracts
|
|
177
|
+
)
|
|
178
|
+
|
|
179
|
+
async def _load_contract_events(self, stream_contract: y.Contract) -> None:
|
|
180
|
+
events = decode_logs(
|
|
181
|
+
await get_logs_asap(stream_contract.address, None, sync=False)
|
|
182
|
+
)
|
|
183
|
+
keys: Set[str] = set(events.keys())
|
|
184
|
+
for k in keys:
|
|
185
|
+
if k not in self.handled_events and k not in self.skipped_events:
|
|
186
|
+
raise NotImplementedError(f"Need to handle event: {k}")
|
|
187
|
+
|
|
188
|
+
if "StreamCreated" in keys:
|
|
189
|
+
for event in events["StreamCreated"]:
|
|
190
|
+
from_address, *_ = event.values()
|
|
191
|
+
from_address = Address.get_or_insert(from_address).address
|
|
192
|
+
if not TreasuryWallet.check_membership(
|
|
193
|
+
from_address, event.block_number
|
|
194
|
+
):
|
|
195
|
+
continue
|
|
196
|
+
await _STREAMS_THREAD.run(self._get_stream, event)
|
|
197
|
+
|
|
198
|
+
if "StreamCreatedWithReason" in keys:
|
|
199
|
+
for event in events["StreamCreatedWithReason"]:
|
|
200
|
+
from_address, *_ = event.values()
|
|
201
|
+
from_address = Address.get_or_insert(from_address).address
|
|
202
|
+
if not TreasuryWallet.check_membership(
|
|
203
|
+
from_address, event.block_number
|
|
204
|
+
):
|
|
205
|
+
continue
|
|
206
|
+
await _STREAMS_THREAD.run(self._get_stream, event)
|
|
207
|
+
|
|
208
|
+
if "StreamModified" in keys:
|
|
209
|
+
for event in events["StreamModified"]:
|
|
210
|
+
from_address, _, _, old_stream_id, *_ = event.values()
|
|
211
|
+
if not TreasuryWallet.check_membership(
|
|
212
|
+
from_address, event.block_number
|
|
213
|
+
):
|
|
214
|
+
continue
|
|
215
|
+
await _STREAMS_THREAD.run(
|
|
216
|
+
_stop_stream, old_stream_id.hex(), event.block_number
|
|
217
|
+
)
|
|
218
|
+
await _STREAMS_THREAD.run(self._get_stream, event)
|
|
219
|
+
|
|
220
|
+
if "StreamPaused" in keys:
|
|
221
|
+
for event in events["StreamPaused"]:
|
|
222
|
+
from_address, *_, stream_id = event.values()
|
|
223
|
+
if not TreasuryWallet.check_membership(
|
|
224
|
+
from_address, event.block_number
|
|
225
|
+
):
|
|
226
|
+
continue
|
|
227
|
+
await _STREAMS_THREAD.run(_pause_stream, stream_id.hex())
|
|
228
|
+
|
|
229
|
+
if "StreamCancelled" in keys:
|
|
230
|
+
for event in events["StreamCancelled"]:
|
|
231
|
+
from_address, *_, stream_id = event.values()
|
|
232
|
+
if not TreasuryWallet.check_membership(
|
|
233
|
+
from_address, event.block_number
|
|
234
|
+
):
|
|
235
|
+
continue
|
|
236
|
+
await _STREAMS_THREAD.run(
|
|
237
|
+
_stop_stream, stream_id.hex(), event.block_number
|
|
238
|
+
)
|
|
239
|
+
|
|
240
|
+
def _get_stream(self, log: _EventItem) -> Stream:
|
|
241
|
+
with db_session:
|
|
242
|
+
if log.name == "StreamCreated":
|
|
243
|
+
from_address, to_address, amount_per_second, stream_id = log.values()
|
|
244
|
+
reason = None
|
|
245
|
+
elif log.name == "StreamCreatedWithReason":
|
|
246
|
+
from_address, to_address, amount_per_second, stream_id, reason = (
|
|
247
|
+
log.values()
|
|
248
|
+
)
|
|
249
|
+
elif log.name == "StreamModified":
|
|
250
|
+
(
|
|
251
|
+
from_address,
|
|
252
|
+
_,
|
|
253
|
+
_,
|
|
254
|
+
old_stream_id,
|
|
255
|
+
to_address,
|
|
256
|
+
amount_per_second,
|
|
257
|
+
stream_id,
|
|
258
|
+
) = log.values()
|
|
259
|
+
reason = Stream[old_stream_id.hex()].reason # type: ignore [misc]
|
|
260
|
+
else:
|
|
261
|
+
raise NotImplementedError("This is not an appropriate event log.")
|
|
262
|
+
|
|
263
|
+
stream_id_hex = stream_id.hex()
|
|
264
|
+
try:
|
|
265
|
+
return Stream[stream_id_hex] # type: ignore [misc]
|
|
266
|
+
except ObjectNotFound:
|
|
267
|
+
entity = Stream(
|
|
268
|
+
stream_id=stream_id_hex,
|
|
269
|
+
contract=Address.get_dbid(log.address),
|
|
270
|
+
start_block=log.block_number,
|
|
271
|
+
token=Token.get_dbid(Contract(log.address).token()),
|
|
272
|
+
from_address=Address.get_dbid(from_address),
|
|
273
|
+
to_address=Address.get_dbid(to_address),
|
|
274
|
+
amount_per_second=amount_per_second,
|
|
275
|
+
txgroup=must_sort_outbound_txgroup_dbid,
|
|
276
|
+
)
|
|
277
|
+
if reason is not None:
|
|
278
|
+
entity.reason = reason
|
|
279
|
+
commit()
|
|
280
|
+
return entity
|
|
281
|
+
|
|
282
|
+
def streams_for_recipient(
|
|
283
|
+
self, recipient: ChecksumAddress, at_block: Optional[BlockNumber] = None
|
|
284
|
+
) -> List[Stream]:
|
|
285
|
+
with db_session:
|
|
286
|
+
streams = Stream.select(lambda s: s.to_address.address == recipient)
|
|
287
|
+
if at_block is None:
|
|
288
|
+
return list(streams)
|
|
289
|
+
return [
|
|
290
|
+
s for s in streams if (s.end_block is None or at_block <= s.end_block)
|
|
291
|
+
]
|
|
292
|
+
|
|
293
|
+
def streams_for_token(
|
|
294
|
+
self, token: ChecksumAddress, include_inactive: bool = False
|
|
295
|
+
) -> List[Stream]:
|
|
296
|
+
with db_session:
|
|
297
|
+
streams = Stream.select(lambda s: s.token.address.address == token)
|
|
298
|
+
return (
|
|
299
|
+
list(streams)
|
|
300
|
+
if include_inactive
|
|
301
|
+
else [s for s in streams if s.is_alive]
|
|
302
|
+
)
|
|
303
|
+
|
|
304
|
+
async def process_streams(self, run_forever: bool = False) -> None:
|
|
305
|
+
logger.info("Processing stream events and streamed funds...")
|
|
306
|
+
# Always sync events before processing
|
|
307
|
+
await self._get_streams()
|
|
308
|
+
with db_session:
|
|
309
|
+
streams = [s.stream_id for s in Stream.select()]
|
|
310
|
+
await tqdm_asyncio.gather(
|
|
311
|
+
*(
|
|
312
|
+
self.process_stream(stream_id, run_forever=run_forever)
|
|
313
|
+
for stream_id in streams
|
|
314
|
+
),
|
|
315
|
+
desc="LlamaPay Streams",
|
|
316
|
+
)
|
|
317
|
+
|
|
318
|
+
async def process_stream(
|
|
319
|
+
self, stream_id: HexStr, run_forever: bool = False
|
|
320
|
+
) -> None:
|
|
321
|
+
start, end = await _STREAMS_THREAD.run(Stream._get_start_and_end, stream_id)
|
|
322
|
+
for date_obj in _generate_dates(start, end, stop_at_today=not run_forever):
|
|
323
|
+
if await self.process_stream_for_date(stream_id, date_obj) is None:
|
|
324
|
+
return
|
|
325
|
+
|
|
326
|
+
async def process_stream_for_date(
|
|
327
|
+
self, stream_id: HexStr, date_obj: dt.datetime
|
|
328
|
+
) -> Optional[StreamedFunds]:
|
|
329
|
+
entity = await _STREAMS_THREAD.run(
|
|
330
|
+
StreamedFunds.get_entity, stream_id, date_obj
|
|
331
|
+
)
|
|
332
|
+
if entity:
|
|
333
|
+
return entity
|
|
334
|
+
|
|
335
|
+
stream_token, start_date = await _STREAMS_THREAD.run(
|
|
336
|
+
Stream._get_token_and_start_date, stream_id
|
|
337
|
+
)
|
|
338
|
+
check_at = date_obj + timedelta(days=1) - timedelta(seconds=1)
|
|
339
|
+
if check_at > now(tz=_UTC):
|
|
340
|
+
await sleep((check_at - now(tz=_UTC)).total_seconds())
|
|
341
|
+
|
|
342
|
+
while True:
|
|
343
|
+
try:
|
|
344
|
+
block = await get_block_at_timestamp(check_at, sync=False)
|
|
345
|
+
except NoBlockFound:
|
|
346
|
+
sleep_time = (check_at - now(tz=_UTC)).total_seconds()
|
|
347
|
+
logger.debug(
|
|
348
|
+
"no block found for %s, sleeping %ss", check_at, sleep_time
|
|
349
|
+
)
|
|
350
|
+
await sleep(sleep_time)
|
|
351
|
+
else:
|
|
352
|
+
break
|
|
353
|
+
|
|
354
|
+
price_fut = create_task(get_price(stream_token, block, sync=False))
|
|
355
|
+
start_timestamp = await _get_start_timestamp(stream_id, block)
|
|
356
|
+
if start_timestamp == 0:
|
|
357
|
+
if await _STREAMS_THREAD.run(Stream.check_closed, stream_id):
|
|
358
|
+
price_fut.cancel()
|
|
359
|
+
return None
|
|
360
|
+
|
|
361
|
+
while start_timestamp == 0:
|
|
362
|
+
block -= 1
|
|
363
|
+
start_timestamp = await _get_start_timestamp(stream_id, block)
|
|
364
|
+
|
|
365
|
+
block_datetime = fromtimestamp(await _get_block_timestamp(block), tz=_UTC)
|
|
366
|
+
assert block_datetime.date() == date_obj.date()
|
|
367
|
+
seconds_active = (check_at - block_datetime).seconds
|
|
368
|
+
is_last_day = True
|
|
369
|
+
else:
|
|
370
|
+
seconds_active = int(check_at.timestamp()) - start_timestamp
|
|
371
|
+
is_last_day = False
|
|
372
|
+
|
|
373
|
+
seconds_active_today = min(seconds_active, _ONE_DAY)
|
|
374
|
+
if seconds_active_today < _ONE_DAY and not is_last_day:
|
|
375
|
+
if date_obj.date() != start_date:
|
|
376
|
+
seconds_active_today = _ONE_DAY
|
|
377
|
+
|
|
378
|
+
with db_session:
|
|
379
|
+
price = Decimal(await price_fut)
|
|
380
|
+
entity = await _STREAMS_THREAD.run(
|
|
381
|
+
StreamedFunds.create_entity,
|
|
382
|
+
stream_id,
|
|
383
|
+
date_obj,
|
|
384
|
+
price,
|
|
385
|
+
seconds_active_today,
|
|
386
|
+
is_last_day,
|
|
387
|
+
)
|
|
388
|
+
return entity
|
dao_treasury/treasury.py
CHANGED
|
@@ -1,4 +1,21 @@
|
|
|
1
|
-
|
|
1
|
+
"""Treasury orchestration and analytics interface.
|
|
2
|
+
|
|
3
|
+
This module defines the Treasury class, which aggregates DAO wallets, sets up
|
|
4
|
+
sorting rules, and manages transaction ingestion and streaming analytics.
|
|
5
|
+
It coordinates the end-to-end flow from wallet configuration to database
|
|
6
|
+
population and dashboard analytics.
|
|
7
|
+
|
|
8
|
+
Key Responsibilities:
|
|
9
|
+
- Aggregate and manage DAO-controlled wallets.
|
|
10
|
+
- Ingest and process on-chain transactions.
|
|
11
|
+
- Apply sorting/categorization rules.
|
|
12
|
+
- Integrate with streaming protocols (e.g., LlamaPay).
|
|
13
|
+
- Populate the database for analytics and dashboards.
|
|
14
|
+
|
|
15
|
+
This is the main entry point for orchestrating DAO treasury analytics.
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
from asyncio import create_task, gather
|
|
2
19
|
from logging import getLogger
|
|
3
20
|
from pathlib import Path
|
|
4
21
|
from typing import Final, Iterable, List, Optional, Union
|
|
@@ -9,11 +26,14 @@ from eth_typing import BlockNumber
|
|
|
9
26
|
from eth_portfolio.structs import LedgerEntry
|
|
10
27
|
from eth_portfolio.typing import PortfolioBalances
|
|
11
28
|
from eth_portfolio_scripts._portfolio import ExportablePortfolio
|
|
29
|
+
from pony.orm import db_session
|
|
12
30
|
from tqdm.asyncio import tqdm_asyncio
|
|
13
31
|
|
|
14
32
|
from dao_treasury._wallet import TreasuryWallet
|
|
33
|
+
from dao_treasury.constants import CHAINID
|
|
15
34
|
from dao_treasury.db import TreasuryTx
|
|
16
35
|
from dao_treasury.sorting._rules import Rules
|
|
36
|
+
from dao_treasury.streams import llamapay
|
|
17
37
|
|
|
18
38
|
|
|
19
39
|
Wallet = Union[TreasuryWallet, str]
|
|
@@ -56,20 +76,22 @@ class Treasury(a_sync.ASyncGenericBase): # type: ignore [misc]
|
|
|
56
76
|
TypeError: If any item in `wallets` is not a str or TreasuryWallet.
|
|
57
77
|
|
|
58
78
|
Examples:
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
79
|
+
.. code-block:: python
|
|
80
|
+
|
|
81
|
+
# Create a synchronous Treasury
|
|
82
|
+
treasury = Treasury(
|
|
83
|
+
wallets=["0xAbc123...", TreasuryWallet("0xDef456...", start_block=1000)],
|
|
84
|
+
sort_rules=Path("/path/to/rules"),
|
|
85
|
+
start_block=500,
|
|
86
|
+
label="DAO Treasury",
|
|
87
|
+
asynchronous=False
|
|
88
|
+
)
|
|
89
|
+
|
|
90
|
+
# Create an asynchronous Treasury
|
|
91
|
+
treasury_async = Treasury(
|
|
92
|
+
wallets=["0xAbc123..."],
|
|
93
|
+
asynchronous=True
|
|
94
|
+
)
|
|
73
95
|
"""
|
|
74
96
|
global TREASURY
|
|
75
97
|
if TREASURY is not None:
|
|
@@ -94,7 +116,11 @@ class Treasury(a_sync.ASyncGenericBase): # type: ignore [misc]
|
|
|
94
116
|
self.sort_rules: Final = Rules(sort_rules) if sort_rules else None
|
|
95
117
|
|
|
96
118
|
self.portfolio: Final = ExportablePortfolio(
|
|
97
|
-
addresses=(
|
|
119
|
+
addresses=(
|
|
120
|
+
wallet.address
|
|
121
|
+
for wallet in self.wallets
|
|
122
|
+
if wallet.networks is None or CHAINID in wallet.networks
|
|
123
|
+
),
|
|
98
124
|
start_block=start_block,
|
|
99
125
|
label=label,
|
|
100
126
|
load_prices=True,
|
|
@@ -102,6 +128,10 @@ class Treasury(a_sync.ASyncGenericBase): # type: ignore [misc]
|
|
|
102
128
|
)
|
|
103
129
|
"""An eth_portfolio.Portfolio object used for exporting tx and balance history"""
|
|
104
130
|
|
|
131
|
+
self._llamapay: Final = (
|
|
132
|
+
llamapay.LlamaPayProcessor() if CHAINID in llamapay.networks else None
|
|
133
|
+
)
|
|
134
|
+
|
|
105
135
|
self.asynchronous: Final = asynchronous
|
|
106
136
|
"""A boolean flag indicating whether the API for this `Treasury` object is sync or async by default"""
|
|
107
137
|
|
|
@@ -114,7 +144,7 @@ class Treasury(a_sync.ASyncGenericBase): # type: ignore [misc]
|
|
|
114
144
|
def txs(self) -> a_sync.ASyncIterator[LedgerEntry]:
|
|
115
145
|
return self.portfolio.ledger.all_entries
|
|
116
146
|
|
|
117
|
-
async def
|
|
147
|
+
async def _insert_txs(
|
|
118
148
|
self, start_block: BlockNumber, end_block: BlockNumber
|
|
119
149
|
) -> None:
|
|
120
150
|
"""Populate the database with treasury transactions in a block range.
|
|
@@ -130,15 +160,32 @@ class Treasury(a_sync.ASyncGenericBase): # type: ignore [misc]
|
|
|
130
160
|
|
|
131
161
|
Examples:
|
|
132
162
|
>>> # Insert transactions from block 0 to 10000
|
|
133
|
-
>>> await treasury.
|
|
163
|
+
>>> await treasury._insert_txs(0, 10000)
|
|
164
|
+
"""
|
|
165
|
+
with db_session:
|
|
166
|
+
futs = []
|
|
167
|
+
async for entry in self.portfolio.ledger[start_block:end_block]:
|
|
168
|
+
if not entry.value:
|
|
169
|
+
# TODO: add an arg in eth-port to skip 0 value
|
|
170
|
+
logger.debug("zero value transfer, skipping %s", entry)
|
|
171
|
+
continue
|
|
172
|
+
futs.append(create_task(TreasuryTx.insert(entry)))
|
|
173
|
+
if futs:
|
|
174
|
+
await tqdm_asyncio.gather(*futs, desc="Insert Txs to Postgres")
|
|
175
|
+
logger.info(f"{len(futs)} transfers exported")
|
|
176
|
+
|
|
177
|
+
async def _process_streams(self) -> None:
|
|
178
|
+
if self._llamapay is not None:
|
|
179
|
+
await self._llamapay.process_streams(run_forever=True)
|
|
180
|
+
|
|
181
|
+
async def populate_db(
|
|
182
|
+
self, start_block: BlockNumber, end_block: BlockNumber
|
|
183
|
+
) -> None:
|
|
184
|
+
"""
|
|
185
|
+
Populate the database with treasury transactions and streams in parallel.
|
|
134
186
|
"""
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
continue
|
|
141
|
-
futs.append(create_task(TreasuryTx.insert(entry)))
|
|
142
|
-
|
|
143
|
-
if futs:
|
|
144
|
-
await tqdm_asyncio.gather(*futs, desc="Insert Txs to Postgres")
|
|
187
|
+
tasks = [self._insert_txs(start_block, end_block)]
|
|
188
|
+
if self._llamapay:
|
|
189
|
+
tasks.append(self._process_streams())
|
|
190
|
+
await gather(*tasks)
|
|
191
|
+
logger.info("db connection closed")
|
|
Binary file
|
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: dao_treasury
|
|
3
|
+
Version: 0.0.69
|
|
4
|
+
Summary: Produce comprehensive financial reports for your on-chain org
|
|
5
|
+
Classifier: Development Status :: 3 - Alpha
|
|
6
|
+
Classifier: Intended Audience :: Developers
|
|
7
|
+
Classifier: Intended Audience :: Science/Research
|
|
8
|
+
Classifier: Programming Language :: Python :: 3
|
|
9
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
10
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
11
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
12
|
+
Classifier: Programming Language :: Python :: Implementation :: CPython
|
|
13
|
+
Classifier: Operating System :: OS Independent
|
|
14
|
+
Classifier: Topic :: Software Development :: Libraries
|
|
15
|
+
Requires-Python: >=3.10,<3.13
|
|
16
|
+
Description-Content-Type: text/markdown
|
|
17
|
+
Requires-Dist: eth-portfolio-temp==0.2.17
|
|
18
|
+
Dynamic: classifier
|
|
19
|
+
Dynamic: description
|
|
20
|
+
Dynamic: description-content-type
|
|
21
|
+
Dynamic: requires-dist
|
|
22
|
+
Dynamic: requires-python
|
|
23
|
+
Dynamic: summary
|
|
24
|
+
|
|
25
|
+
DAO Treasury is a comprehensive financial reporting and treasury management solution designed specifically for decentralized organizations. Built as an extension to [eth-portfolio](https://github.com/BobTheBuidler/eth-portfolio)'s [Portfolio Exporter](https://bobthebuidler.github.io/eth-portfolio/exporter.html), DAO Treasury automates the collection and visualization of financial data, enabling organizations to monitor and report on treasury activities with clarity and transparency.
|
|
26
|
+
|
|
27
|
+
## Key Features
|
|
28
|
+
|
|
29
|
+
- **Financial Reporting for DAOs:** Extends core portfolio functionalities to generate detailed reports tailored for on-chain organizations.
|
|
30
|
+
- **Dashboard Provisioning:** Utilizes [Grafana](https://grafana.com/) dashboards—defined in JSON files within the .grafana/provisioning directories—to offer real-time, dynamic visualizations of treasury data.
|
|
31
|
+
- **Automated Data Export:** Features a treasury export tool that, once configured (with a supported [brownie network](https://eth-brownie.readthedocs.io/en/stable/network-management.html) and [Docker](https://www.docker.com/get-started/)), continuously captures financial snapshots at set intervals.
|
|
32
|
+
- **Ease of Contribution:** Non-technical users can easily update or create dashboard visuals using Grafana’s intuitive UI. The [Contributing Guidelines](https://github.com/BobTheBuidler/dao-treasury/blob/master/CONTRIBUTING.md) document provides a step-by-step guide to defining new visuals and dashboards and integrating those changes into the repository, ensuring that anyone can contribute to the visual reporting aspect of the project.
|
|
33
|
+
|
|
34
|
+
## Requirements
|
|
35
|
+
- Python 3.10 or higher.
|
|
36
|
+
- At least 16GB of RAM.
|
|
37
|
+
- All dependencies installed as specified in the project’s pyproject.toml file.
|
|
38
|
+
|
|
39
|
+
## Prerequisites
|
|
40
|
+
|
|
41
|
+
- First, you will need to bring your own archive node. This can be one you run yourself, or one from one of the common providers (Tenderly, Alchemy, QuickNode, etc.). Your archive node must have tracing enabled (free-tier Alchemy nodes do not support this option).
|
|
42
|
+
- You must configure a [brownie network](https://eth-brownie.readthedocs.io/en/stable/network-management.html) to use your RPC.
|
|
43
|
+
- You will need an auth token for [Etherscan](https://etherscan.io/)'s API. Follow their [guide](https://docs.etherscan.io/etherscan-v2/getting-an-api-key) to get your key, and set env var `ETHERSCAN_TOKEN` with its value.
|
|
44
|
+
- You'll also need [Docker](https://www.docker.com/get-started/) installed on your system. If on MacOS, you will need to leave Docker Desktop open while DAO Treasury is running.
|
|
45
|
+
|
|
46
|
+
## Installation
|
|
47
|
+
|
|
48
|
+
```bash
|
|
49
|
+
pip install dao-treasury
|
|
50
|
+
```
|
|
51
|
+
|
|
52
|
+
## Usage
|
|
53
|
+
|
|
54
|
+
Run the treasury export tool:
|
|
55
|
+
|
|
56
|
+
```bash
|
|
57
|
+
# For pip installations:
|
|
58
|
+
dao-treasury run --wallet 0x123 --network mainnet --interval 12h
|
|
59
|
+
```
|
|
60
|
+
|
|
61
|
+
For local development (from source installation), use:
|
|
62
|
+
```bash
|
|
63
|
+
poetry run dao-treasury run --wallet 0x123 --network mainnet --interval 12h
|
|
64
|
+
```
|
|
65
|
+
|
|
66
|
+
**CLI Options:**
|
|
67
|
+
- `--network`: The id of the brownie network the exporter will connect to (default: mainnet)
|
|
68
|
+
- `--interval`: The time interval between each data snapshot (default: 12h)
|
|
69
|
+
- `--concurrency`: The max number of historical blocks to export concurrently. (default: 30)
|
|
70
|
+
- `--daemon`: Run the export process in the background (default: False) (NOTE: currently unsupported)
|
|
71
|
+
- `--grafana-port`: Set the port for the Grafana dashboard where you can view data (default: 3004)
|
|
72
|
+
- `--renderer-port`: Set the port for the report rendering service (default: 8091)
|
|
73
|
+
- `--victoria-port`: Set the port for the Victoria metrics reporting endpoint (default: 8430)
|
|
74
|
+
- `--start-renderer`: If set, both the Grafana and renderer containers will be started for dashboard image export. By default, only the grafana container is started.
|
|
75
|
+
|
|
76
|
+
After running the command, the export script will run continuously until you close your terminal.
|
|
77
|
+
To view the dashboards, just open your browser and navigate to [http://localhost:3004](http://localhost:3004)!
|
|
78
|
+
|
|
79
|
+
## Docker
|
|
80
|
+
|
|
81
|
+
When you run DAO Treasury, [eth-portfolio](https://github.com/BobTheBuidler/eth-portfolio) will build and start 4 [required Docker containers](https://bobthebuidler.github.io/eth-portfolio/exporter.html#docker-containers) on your system. Additionally, DAO Treasury will build and start 2 more required containers:
|
|
82
|
+
|
|
83
|
+
- **grafana**
|
|
84
|
+
- Provides a web-based dashboard for visualizing your treasury data.
|
|
85
|
+
- Pre-configured with dashboards and plugins for real-time monitoring.
|
|
86
|
+
- Uses persistent storage to retain dashboard settings and data.
|
|
87
|
+
- Accessible locally (default port `3004`, configurable via `--grafana-port`).
|
|
88
|
+
- Supports anonymous access for convenience.
|
|
89
|
+
- Integrates with the renderer container for dashboard image export.
|
|
90
|
+
- Loads dashboards and data sources automatically via provisioning files.
|
|
91
|
+
|
|
92
|
+
- **renderer**
|
|
93
|
+
- Runs the official Grafana image renderer service.
|
|
94
|
+
- Enables Grafana to export dashboards as images for reporting or sharing.
|
|
95
|
+
- Operates on port `8091` by default (configurable via `--renderer-port`).
|
|
96
|
+
- Tightly integrated with the Grafana container for seamless image rendering.
|
|
97
|
+
- **Note:** The renderer container is only started if you pass the `--start-renderer` CLI flag.
|
|
98
|
+
|
|
99
|
+
**How it works:**
|
|
100
|
+
1. DAO Treasury collects and exports treasury data.
|
|
101
|
+
2. Grafana displays this data in pre-built dashboards for analysis and reporting.
|
|
102
|
+
3. The renderer container allows dashboards to be exported as images directly from Grafana (if enabled).
|
|
103
|
+
|
|
104
|
+
**Additional Information:**
|
|
105
|
+
- All containers are orchestrated via Docker Compose and started automatically as needed.
|
|
106
|
+
- Grafana provisioning ensures dashboards and data sources are set up out-of-the-box.
|
|
107
|
+
- All dashboard data and settings are persisted for durability.
|
|
108
|
+
- Dashboard images can be generated for reporting via the renderer (if enabled).
|
|
109
|
+
|
|
110
|
+
## Screenshots
|
|
111
|
+
|
|
112
|
+
#### [DAO Transactions Dashboard](https://bobthebuidler.github.io/dao-treasury/transactions.html)
|
|
113
|
+
|
|
114
|
+

|
|
115
|
+
|
|
116
|
+
## Contributing
|
|
117
|
+
|
|
118
|
+
We welcome contributions to DAO Treasury! For detailed guidelines on how to contribute, please see the [Contributing Guidelines](https://github.com/BobTheBuidler/dao-treasury/blob/master/CONTRIBUTING.md).
|
|
119
|
+
|
|
120
|
+
Enjoy!
|