dao-treasury 0.0.21__cp311-cp311-win_amd64.whl → 0.0.71__cp311-cp311-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dao-treasury might be problematic. Click here for more details.
- dao_treasury/.grafana/provisioning/dashboards/breakdowns/Expenses.json +551 -0
- dao_treasury/.grafana/provisioning/dashboards/breakdowns/Revenue.json +551 -0
- dao_treasury/.grafana/provisioning/dashboards/dashboards.yaml +7 -7
- dao_treasury/.grafana/provisioning/dashboards/streams/LlamaPay.json +220 -0
- dao_treasury/.grafana/provisioning/dashboards/summary/Monthly.json +286 -29
- dao_treasury/.grafana/provisioning/dashboards/transactions/Treasury Transactions.json +217 -84
- dao_treasury/.grafana/provisioning/dashboards/treasury/Cashflow (Including Unsorted).json +808 -0
- dao_treasury/.grafana/provisioning/dashboards/treasury/Cashflow.json +602 -0
- dao_treasury/.grafana/provisioning/dashboards/treasury/Current Treasury Assets.json +981 -0
- dao_treasury/.grafana/provisioning/dashboards/treasury/Historical Treasury Balances.json +3060 -0
- dao_treasury/.grafana/provisioning/dashboards/treasury/Operating Cashflow.json +478 -0
- dao_treasury/.grafana/provisioning/datasources/datasources.yaml +17 -0
- dao_treasury/ENVIRONMENT_VARIABLES.py +20 -0
- dao_treasury/__init__.py +20 -0
- dao_treasury/_docker.cp311-win_amd64.pyd +0 -0
- dao_treasury/_docker.py +67 -38
- dao_treasury/_nicknames.cp311-win_amd64.pyd +0 -0
- dao_treasury/_nicknames.py +24 -2
- dao_treasury/_wallet.cp311-win_amd64.pyd +0 -0
- dao_treasury/_wallet.py +157 -16
- dao_treasury/constants.cp311-win_amd64.pyd +0 -0
- dao_treasury/constants.py +39 -0
- dao_treasury/db.py +384 -45
- dao_treasury/docker-compose.yaml +6 -5
- dao_treasury/main.py +118 -17
- dao_treasury/sorting/__init__.cp311-win_amd64.pyd +0 -0
- dao_treasury/sorting/__init__.py +171 -42
- dao_treasury/sorting/_matchers.cp311-win_amd64.pyd +0 -0
- dao_treasury/sorting/_rules.cp311-win_amd64.pyd +0 -0
- dao_treasury/sorting/_rules.py +1 -3
- dao_treasury/sorting/factory.cp311-win_amd64.pyd +0 -0
- dao_treasury/sorting/factory.py +2 -6
- dao_treasury/sorting/rule.cp311-win_amd64.pyd +0 -0
- dao_treasury/sorting/rule.py +13 -10
- dao_treasury/sorting/rules/__init__.cp311-win_amd64.pyd +0 -0
- dao_treasury/sorting/rules/__init__.py +1 -0
- dao_treasury/sorting/rules/ignore/__init__.cp311-win_amd64.pyd +0 -0
- dao_treasury/sorting/rules/ignore/__init__.py +1 -0
- dao_treasury/sorting/rules/ignore/llamapay.cp311-win_amd64.pyd +0 -0
- dao_treasury/sorting/rules/ignore/llamapay.py +20 -0
- dao_treasury/streams/__init__.cp311-win_amd64.pyd +0 -0
- dao_treasury/streams/__init__.py +0 -0
- dao_treasury/streams/llamapay.cp311-win_amd64.pyd +0 -0
- dao_treasury/streams/llamapay.py +388 -0
- dao_treasury/treasury.py +79 -30
- dao_treasury/types.cp311-win_amd64.pyd +0 -0
- dao_treasury-0.0.71.dist-info/METADATA +134 -0
- dao_treasury-0.0.71.dist-info/RECORD +54 -0
- dao_treasury-0.0.71.dist-info/top_level.txt +2 -0
- dao_treasury__mypyc.cp311-win_amd64.pyd +0 -0
- 52b51d40e96d4333695d__mypyc.cp311-win_amd64.pyd +0 -0
- dao_treasury/.grafana/provisioning/datasources/sqlite.yaml +0 -10
- dao_treasury-0.0.21.dist-info/METADATA +0 -63
- dao_treasury-0.0.21.dist-info/RECORD +0 -31
- dao_treasury-0.0.21.dist-info/top_level.txt +0 -2
- {dao_treasury-0.0.21.dist-info → dao_treasury-0.0.71.dist-info}/WHEEL +0 -0
dao_treasury/sorting/rule.py
CHANGED
|
@@ -31,6 +31,7 @@ See Also:
|
|
|
31
31
|
|
|
32
32
|
from collections import defaultdict
|
|
33
33
|
from dataclasses import dataclass
|
|
34
|
+
from logging import getLogger
|
|
34
35
|
from typing import (
|
|
35
36
|
TYPE_CHECKING,
|
|
36
37
|
DefaultDict,
|
|
@@ -53,6 +54,9 @@ if TYPE_CHECKING:
|
|
|
53
54
|
from dao_treasury.db import TreasuryTx
|
|
54
55
|
|
|
55
56
|
|
|
57
|
+
logger: Final = getLogger(__name__)
|
|
58
|
+
_log_debug: Final = logger.debug
|
|
59
|
+
|
|
56
60
|
SORT_RULES: DefaultDict[Type[SortRule], List[SortRule]] = defaultdict(list)
|
|
57
61
|
"""Mapping from sort rule classes to lists of instantiated rules, in creation order per class.
|
|
58
62
|
|
|
@@ -126,8 +130,6 @@ class _SortRule:
|
|
|
126
130
|
func: Optional[SortFunction] = None
|
|
127
131
|
"""Custom matching function that takes a `TreasuryTx` and returns a bool or an awaitable that returns a bool."""
|
|
128
132
|
|
|
129
|
-
# __instances__: ClassVar[List[Self]] = []
|
|
130
|
-
|
|
131
133
|
def __post_init__(self) -> None:
|
|
132
134
|
"""Validate inputs, checksum addresses, and register the rule.
|
|
133
135
|
|
|
@@ -214,6 +216,7 @@ class _SortRule:
|
|
|
214
216
|
getattr(tx, matcher) == getattr(self, matcher) for matcher in matchers
|
|
215
217
|
)
|
|
216
218
|
|
|
219
|
+
_log_debug("checking %s for %s", tx, self.func)
|
|
217
220
|
match = self.func(tx) # type: ignore [misc]
|
|
218
221
|
return match if isinstance(match, bool) else await match
|
|
219
222
|
|
|
@@ -230,7 +233,7 @@ class _InboundSortRule(_SortRule):
|
|
|
230
233
|
return (
|
|
231
234
|
tx.to_address is not None
|
|
232
235
|
and TreasuryWallet.check_membership(tx.to_address.address, tx.block)
|
|
233
|
-
and await super().match(tx)
|
|
236
|
+
and await super(_InboundSortRule, self).match(tx)
|
|
234
237
|
)
|
|
235
238
|
|
|
236
239
|
|
|
@@ -245,7 +248,7 @@ class _OutboundSortRule(_SortRule):
|
|
|
245
248
|
async def match(self, tx: "TreasuryTx") -> bool:
|
|
246
249
|
return TreasuryWallet.check_membership(
|
|
247
250
|
tx.from_address.address, tx.block
|
|
248
|
-
) and await super().match(tx)
|
|
251
|
+
) and await super(_OutboundSortRule, self).match(tx)
|
|
249
252
|
|
|
250
253
|
|
|
251
254
|
@mypyc_attr(native_class=False)
|
|
@@ -262,7 +265,7 @@ class RevenueSortRule(_InboundSortRule):
|
|
|
262
265
|
def __post_init__(self) -> None:
|
|
263
266
|
"""Prepends `self.txgroup` with 'Revenue:'."""
|
|
264
267
|
object.__setattr__(self, "txgroup", f"Revenue:{self.txgroup}")
|
|
265
|
-
super().__post_init__()
|
|
268
|
+
super(RevenueSortRule, self).__post_init__()
|
|
266
269
|
|
|
267
270
|
|
|
268
271
|
@mypyc_attr(native_class=False)
|
|
@@ -275,7 +278,7 @@ class CostOfRevenueSortRule(_OutboundSortRule):
|
|
|
275
278
|
def __post_init__(self) -> None:
|
|
276
279
|
"""Prepends `self.txgroup` with 'Cost of Revenue:'."""
|
|
277
280
|
object.__setattr__(self, "txgroup", f"Cost of Revenue:{self.txgroup}")
|
|
278
|
-
super().__post_init__()
|
|
281
|
+
super(CostOfRevenueSortRule, self).__post_init__()
|
|
279
282
|
|
|
280
283
|
|
|
281
284
|
@mypyc_attr(native_class=False)
|
|
@@ -288,7 +291,7 @@ class ExpenseSortRule(_OutboundSortRule):
|
|
|
288
291
|
def __post_init__(self) -> None:
|
|
289
292
|
"""Prepends `self.txgroup` with 'Expenses:'."""
|
|
290
293
|
object.__setattr__(self, "txgroup", f"Expenses:{self.txgroup}")
|
|
291
|
-
super().__post_init__()
|
|
294
|
+
super(ExpenseSortRule, self).__post_init__()
|
|
292
295
|
|
|
293
296
|
|
|
294
297
|
@mypyc_attr(native_class=False)
|
|
@@ -301,7 +304,7 @@ class OtherIncomeSortRule(_InboundSortRule):
|
|
|
301
304
|
def __post_init__(self) -> None:
|
|
302
305
|
"""Prepends `self.txgroup` with 'Other Income:'."""
|
|
303
306
|
object.__setattr__(self, "txgroup", f"Other Income:{self.txgroup}")
|
|
304
|
-
super().__post_init__()
|
|
307
|
+
super(OtherIncomeSortRule, self).__post_init__()
|
|
305
308
|
|
|
306
309
|
|
|
307
310
|
@mypyc_attr(native_class=False)
|
|
@@ -314,7 +317,7 @@ class OtherExpenseSortRule(_OutboundSortRule):
|
|
|
314
317
|
def __post_init__(self) -> None:
|
|
315
318
|
"""Prepends `self.txgroup` with 'Other Expenses:'."""
|
|
316
319
|
object.__setattr__(self, "txgroup", f"Other Expenses:{self.txgroup}")
|
|
317
|
-
super().__post_init__()
|
|
320
|
+
super(OtherExpenseSortRule, self).__post_init__()
|
|
318
321
|
|
|
319
322
|
|
|
320
323
|
@mypyc_attr(native_class=False)
|
|
@@ -327,7 +330,7 @@ class IgnoreSortRule(_SortRule):
|
|
|
327
330
|
def __post_init__(self) -> None:
|
|
328
331
|
"""Prepends `self.txgroup` with 'Ignore:'."""
|
|
329
332
|
object.__setattr__(self, "txgroup", f"Ignore:{self.txgroup}")
|
|
330
|
-
super().__post_init__()
|
|
333
|
+
super(IgnoreSortRule, self).__post_init__()
|
|
331
334
|
|
|
332
335
|
|
|
333
336
|
TRule = TypeVar(
|
|
Binary file
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
from dao_treasury.sorting.rules.ignore import *
|
|
Binary file
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
from dao_treasury.sorting.rules.ignore.llamapay import *
|
|
Binary file
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
from dao_treasury import TreasuryTx
|
|
2
|
+
from dao_treasury.sorting.factory import ignore
|
|
3
|
+
from dao_treasury.streams import llamapay
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
@ignore("LlamaPay")
|
|
7
|
+
def is_llamapay_stream_replenishment(tx: TreasuryTx) -> bool:
|
|
8
|
+
if tx.to_address.address in llamapay.factories: # type: ignore [operator]
|
|
9
|
+
# We amortize these streams daily in the `llamapay` module, you'll sort each stream appropriately.
|
|
10
|
+
return True
|
|
11
|
+
|
|
12
|
+
# NOTE: not sure if we want this yet
|
|
13
|
+
# Puling unused funds back from vesting escrow / llamapay
|
|
14
|
+
# elif tx.from_address == "Contract: LlamaPay" and "StreamCancelled" in tx.events:
|
|
15
|
+
# if tx.amount > 0:
|
|
16
|
+
# tx.amount *= -1
|
|
17
|
+
# if tx.value_usd > 0:
|
|
18
|
+
# tx.value_usd *= -1
|
|
19
|
+
# return True
|
|
20
|
+
return False
|
|
Binary file
|
|
File without changes
|
|
Binary file
|
|
@@ -0,0 +1,388 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import datetime as dt
|
|
3
|
+
import decimal
|
|
4
|
+
from logging import getLogger
|
|
5
|
+
from typing import (
|
|
6
|
+
Awaitable,
|
|
7
|
+
Callable,
|
|
8
|
+
Dict,
|
|
9
|
+
Final,
|
|
10
|
+
Iterator,
|
|
11
|
+
List,
|
|
12
|
+
Optional,
|
|
13
|
+
Set,
|
|
14
|
+
final,
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
import dank_mids
|
|
18
|
+
import pony.orm
|
|
19
|
+
from a_sync import AsyncThreadPoolExecutor, igather
|
|
20
|
+
from brownie.network.event import _EventItem
|
|
21
|
+
from eth_typing import BlockNumber, ChecksumAddress, HexAddress, HexStr
|
|
22
|
+
from tqdm.asyncio import tqdm_asyncio
|
|
23
|
+
|
|
24
|
+
import y
|
|
25
|
+
from y.time import NoBlockFound, UnixTimestamp
|
|
26
|
+
from y.utils.events import decode_logs, get_logs_asap
|
|
27
|
+
|
|
28
|
+
from dao_treasury import constants
|
|
29
|
+
from dao_treasury.db import (
|
|
30
|
+
Stream,
|
|
31
|
+
StreamedFunds,
|
|
32
|
+
Address,
|
|
33
|
+
Token,
|
|
34
|
+
must_sort_outbound_txgroup_dbid,
|
|
35
|
+
)
|
|
36
|
+
from dao_treasury._wallet import TreasuryWallet
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
logger: Final = getLogger(__name__)
|
|
40
|
+
|
|
41
|
+
_UTC: Final = dt.timezone.utc
|
|
42
|
+
|
|
43
|
+
_ONE_DAY: Final = 60 * 60 * 24
|
|
44
|
+
|
|
45
|
+
_STREAMS_THREAD: Final = AsyncThreadPoolExecutor(1)
|
|
46
|
+
|
|
47
|
+
create_task: Final = asyncio.create_task
|
|
48
|
+
sleep: Final = asyncio.sleep
|
|
49
|
+
|
|
50
|
+
datetime: Final = dt.datetime
|
|
51
|
+
timedelta: Final = dt.timedelta
|
|
52
|
+
fromtimestamp: Final = datetime.fromtimestamp
|
|
53
|
+
now: Final = datetime.now
|
|
54
|
+
|
|
55
|
+
Decimal: Final = decimal.Decimal
|
|
56
|
+
|
|
57
|
+
ObjectNotFound: Final = pony.orm.ObjectNotFound
|
|
58
|
+
commit: Final = pony.orm.commit
|
|
59
|
+
db_session: Final = pony.orm.db_session
|
|
60
|
+
|
|
61
|
+
Contract: Final = y.Contract
|
|
62
|
+
Network: Final = y.Network
|
|
63
|
+
get_block_at_timestamp: Final = y.get_block_at_timestamp
|
|
64
|
+
get_price: Final = y.get_price
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
networks: Final = [Network.Mainnet]
|
|
68
|
+
|
|
69
|
+
factories: List[HexAddress] = []
|
|
70
|
+
|
|
71
|
+
if dai_stream_factory := {
|
|
72
|
+
Network.Mainnet: "0x60c7B0c5B3a4Dc8C690b074727a17fF7aA287Ff2",
|
|
73
|
+
}.get(constants.CHAINID):
|
|
74
|
+
factories.append(dai_stream_factory)
|
|
75
|
+
|
|
76
|
+
if yfi_stream_factory := {
|
|
77
|
+
Network.Mainnet: "0xf3764eC89B1ad20A31ed633b1466363FAc1741c4",
|
|
78
|
+
}.get(constants.CHAINID):
|
|
79
|
+
factories.append(yfi_stream_factory)
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def _generate_dates(
|
|
83
|
+
start: dt.datetime, end: dt.datetime, stop_at_today: bool = True
|
|
84
|
+
) -> Iterator[dt.datetime]:
|
|
85
|
+
current = start
|
|
86
|
+
while current < end:
|
|
87
|
+
yield current
|
|
88
|
+
current += timedelta(days=1)
|
|
89
|
+
if stop_at_today and current.date() > now(_UTC).date():
|
|
90
|
+
break
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
_StreamToStart = Callable[[HexStr, Optional[BlockNumber]], Awaitable[int]]
|
|
94
|
+
|
|
95
|
+
_streamToStart_cache: Final[Dict[HexStr, _StreamToStart]] = {}
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def _get_streamToStart(stream_id: HexStr) -> _StreamToStart:
|
|
99
|
+
if streamToStart := _streamToStart_cache.get(stream_id):
|
|
100
|
+
return streamToStart
|
|
101
|
+
with db_session:
|
|
102
|
+
contract: y.Contract = Stream[stream_id].contract.contract # type: ignore [misc]
|
|
103
|
+
streamToStart = contract.streamToStart.coroutine
|
|
104
|
+
_streamToStart_cache[stream_id] = streamToStart
|
|
105
|
+
return streamToStart
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
async def _get_start_timestamp(
|
|
109
|
+
stream_id: HexStr, block: Optional[BlockNumber] = None
|
|
110
|
+
) -> int:
|
|
111
|
+
streamToStart = _streamToStart_cache.get(stream_id)
|
|
112
|
+
if streamToStart is None:
|
|
113
|
+
streamToStart = await _STREAMS_THREAD.run(_get_streamToStart, stream_id)
|
|
114
|
+
# try:
|
|
115
|
+
return int(await streamToStart(f"0x{stream_id}", block_identifier=block)) # type: ignore [call-arg]
|
|
116
|
+
# except Exception:
|
|
117
|
+
# return 0
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
def _pause_stream(stream_id: HexStr) -> None:
|
|
121
|
+
with db_session:
|
|
122
|
+
Stream[stream_id].pause() # type: ignore [misc]
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
def _stop_stream(stream_id: str, block: BlockNumber) -> None:
|
|
126
|
+
with db_session:
|
|
127
|
+
Stream[stream_id].stop_stream(block) # type: ignore [misc]
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
_block_timestamps: Final[Dict[BlockNumber, UnixTimestamp]] = {}
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
async def _get_block_timestamp(block: BlockNumber) -> UnixTimestamp:
|
|
134
|
+
if timestamp := _block_timestamps.get(block):
|
|
135
|
+
return timestamp
|
|
136
|
+
timestamp = await dank_mids.eth.get_block_timestamp(block)
|
|
137
|
+
_block_timestamps[block] = timestamp
|
|
138
|
+
return timestamp
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
"""
|
|
142
|
+
class _StreamProcessor(ABC):
|
|
143
|
+
@abstractmethod
|
|
144
|
+
async def _load_streams(self) -> None:
|
|
145
|
+
...
|
|
146
|
+
"""
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
@final
|
|
150
|
+
class LlamaPayProcessor:
|
|
151
|
+
"""
|
|
152
|
+
Generalized async processor for DAO stream contracts.
|
|
153
|
+
Args are passed in at construction time.
|
|
154
|
+
Supports time-bounded admin periods for filtering.
|
|
155
|
+
"""
|
|
156
|
+
|
|
157
|
+
handled_events: Final = (
|
|
158
|
+
"StreamCreated",
|
|
159
|
+
"StreamCreatedWithReason",
|
|
160
|
+
"StreamModified",
|
|
161
|
+
"StreamPaused",
|
|
162
|
+
"StreamCancelled",
|
|
163
|
+
)
|
|
164
|
+
skipped_events: Final = (
|
|
165
|
+
"PayerDeposit",
|
|
166
|
+
"PayerWithdraw",
|
|
167
|
+
"Withdraw",
|
|
168
|
+
)
|
|
169
|
+
|
|
170
|
+
def __init__(self) -> None:
|
|
171
|
+
self.stream_contracts: Final = {Contract(addr) for addr in factories}
|
|
172
|
+
|
|
173
|
+
async def _get_streams(self) -> None:
|
|
174
|
+
await igather(
|
|
175
|
+
self._load_contract_events(stream_contract)
|
|
176
|
+
for stream_contract in self.stream_contracts
|
|
177
|
+
)
|
|
178
|
+
|
|
179
|
+
async def _load_contract_events(self, stream_contract: y.Contract) -> None:
|
|
180
|
+
events = decode_logs(
|
|
181
|
+
await get_logs_asap(stream_contract.address, None, sync=False)
|
|
182
|
+
)
|
|
183
|
+
keys: Set[str] = set(events.keys())
|
|
184
|
+
for k in keys:
|
|
185
|
+
if k not in self.handled_events and k not in self.skipped_events:
|
|
186
|
+
raise NotImplementedError(f"Need to handle event: {k}")
|
|
187
|
+
|
|
188
|
+
if "StreamCreated" in keys:
|
|
189
|
+
for event in events["StreamCreated"]:
|
|
190
|
+
from_address, *_ = event.values()
|
|
191
|
+
from_address = Address.get_or_insert(from_address).address
|
|
192
|
+
if not TreasuryWallet.check_membership(
|
|
193
|
+
from_address, event.block_number
|
|
194
|
+
):
|
|
195
|
+
continue
|
|
196
|
+
await _STREAMS_THREAD.run(self._get_stream, event)
|
|
197
|
+
|
|
198
|
+
if "StreamCreatedWithReason" in keys:
|
|
199
|
+
for event in events["StreamCreatedWithReason"]:
|
|
200
|
+
from_address, *_ = event.values()
|
|
201
|
+
from_address = Address.get_or_insert(from_address).address
|
|
202
|
+
if not TreasuryWallet.check_membership(
|
|
203
|
+
from_address, event.block_number
|
|
204
|
+
):
|
|
205
|
+
continue
|
|
206
|
+
await _STREAMS_THREAD.run(self._get_stream, event)
|
|
207
|
+
|
|
208
|
+
if "StreamModified" in keys:
|
|
209
|
+
for event in events["StreamModified"]:
|
|
210
|
+
from_address, _, _, old_stream_id, *_ = event.values()
|
|
211
|
+
if not TreasuryWallet.check_membership(
|
|
212
|
+
from_address, event.block_number
|
|
213
|
+
):
|
|
214
|
+
continue
|
|
215
|
+
await _STREAMS_THREAD.run(
|
|
216
|
+
_stop_stream, old_stream_id.hex(), event.block_number
|
|
217
|
+
)
|
|
218
|
+
await _STREAMS_THREAD.run(self._get_stream, event)
|
|
219
|
+
|
|
220
|
+
if "StreamPaused" in keys:
|
|
221
|
+
for event in events["StreamPaused"]:
|
|
222
|
+
from_address, *_, stream_id = event.values()
|
|
223
|
+
if not TreasuryWallet.check_membership(
|
|
224
|
+
from_address, event.block_number
|
|
225
|
+
):
|
|
226
|
+
continue
|
|
227
|
+
await _STREAMS_THREAD.run(_pause_stream, stream_id.hex())
|
|
228
|
+
|
|
229
|
+
if "StreamCancelled" in keys:
|
|
230
|
+
for event in events["StreamCancelled"]:
|
|
231
|
+
from_address, *_, stream_id = event.values()
|
|
232
|
+
if not TreasuryWallet.check_membership(
|
|
233
|
+
from_address, event.block_number
|
|
234
|
+
):
|
|
235
|
+
continue
|
|
236
|
+
await _STREAMS_THREAD.run(
|
|
237
|
+
_stop_stream, stream_id.hex(), event.block_number
|
|
238
|
+
)
|
|
239
|
+
|
|
240
|
+
def _get_stream(self, log: _EventItem) -> Stream:
|
|
241
|
+
with db_session:
|
|
242
|
+
if log.name == "StreamCreated":
|
|
243
|
+
from_address, to_address, amount_per_second, stream_id = log.values()
|
|
244
|
+
reason = None
|
|
245
|
+
elif log.name == "StreamCreatedWithReason":
|
|
246
|
+
from_address, to_address, amount_per_second, stream_id, reason = (
|
|
247
|
+
log.values()
|
|
248
|
+
)
|
|
249
|
+
elif log.name == "StreamModified":
|
|
250
|
+
(
|
|
251
|
+
from_address,
|
|
252
|
+
_,
|
|
253
|
+
_,
|
|
254
|
+
old_stream_id,
|
|
255
|
+
to_address,
|
|
256
|
+
amount_per_second,
|
|
257
|
+
stream_id,
|
|
258
|
+
) = log.values()
|
|
259
|
+
reason = Stream[old_stream_id.hex()].reason # type: ignore [misc]
|
|
260
|
+
else:
|
|
261
|
+
raise NotImplementedError("This is not an appropriate event log.")
|
|
262
|
+
|
|
263
|
+
stream_id_hex = stream_id.hex()
|
|
264
|
+
try:
|
|
265
|
+
return Stream[stream_id_hex] # type: ignore [misc]
|
|
266
|
+
except ObjectNotFound:
|
|
267
|
+
entity = Stream(
|
|
268
|
+
stream_id=stream_id_hex,
|
|
269
|
+
contract=Address.get_dbid(log.address),
|
|
270
|
+
start_block=log.block_number,
|
|
271
|
+
token=Token.get_dbid(Contract(log.address).token()),
|
|
272
|
+
from_address=Address.get_dbid(from_address),
|
|
273
|
+
to_address=Address.get_dbid(to_address),
|
|
274
|
+
amount_per_second=amount_per_second,
|
|
275
|
+
txgroup=must_sort_outbound_txgroup_dbid,
|
|
276
|
+
)
|
|
277
|
+
if reason is not None:
|
|
278
|
+
entity.reason = reason
|
|
279
|
+
commit()
|
|
280
|
+
return entity
|
|
281
|
+
|
|
282
|
+
def streams_for_recipient(
|
|
283
|
+
self, recipient: ChecksumAddress, at_block: Optional[BlockNumber] = None
|
|
284
|
+
) -> List[Stream]:
|
|
285
|
+
with db_session:
|
|
286
|
+
streams = Stream.select(lambda s: s.to_address.address == recipient)
|
|
287
|
+
if at_block is None:
|
|
288
|
+
return list(streams)
|
|
289
|
+
return [
|
|
290
|
+
s for s in streams if (s.end_block is None or at_block <= s.end_block)
|
|
291
|
+
]
|
|
292
|
+
|
|
293
|
+
def streams_for_token(
|
|
294
|
+
self, token: ChecksumAddress, include_inactive: bool = False
|
|
295
|
+
) -> List[Stream]:
|
|
296
|
+
with db_session:
|
|
297
|
+
streams = Stream.select(lambda s: s.token.address.address == token)
|
|
298
|
+
return (
|
|
299
|
+
list(streams)
|
|
300
|
+
if include_inactive
|
|
301
|
+
else [s for s in streams if s.is_alive]
|
|
302
|
+
)
|
|
303
|
+
|
|
304
|
+
async def process_streams(self, run_forever: bool = False) -> None:
|
|
305
|
+
logger.info("Processing stream events and streamed funds...")
|
|
306
|
+
# Always sync events before processing
|
|
307
|
+
await self._get_streams()
|
|
308
|
+
with db_session:
|
|
309
|
+
streams = [s.stream_id for s in Stream.select()]
|
|
310
|
+
await tqdm_asyncio.gather(
|
|
311
|
+
*(
|
|
312
|
+
self.process_stream(stream_id, run_forever=run_forever)
|
|
313
|
+
for stream_id in streams
|
|
314
|
+
),
|
|
315
|
+
desc="LlamaPay Streams",
|
|
316
|
+
)
|
|
317
|
+
|
|
318
|
+
async def process_stream(
|
|
319
|
+
self, stream_id: HexStr, run_forever: bool = False
|
|
320
|
+
) -> None:
|
|
321
|
+
start, end = await _STREAMS_THREAD.run(Stream._get_start_and_end, stream_id)
|
|
322
|
+
for date_obj in _generate_dates(start, end, stop_at_today=not run_forever):
|
|
323
|
+
if await self.process_stream_for_date(stream_id, date_obj) is None:
|
|
324
|
+
return
|
|
325
|
+
|
|
326
|
+
async def process_stream_for_date(
|
|
327
|
+
self, stream_id: HexStr, date_obj: dt.datetime
|
|
328
|
+
) -> Optional[StreamedFunds]:
|
|
329
|
+
entity = await _STREAMS_THREAD.run(
|
|
330
|
+
StreamedFunds.get_entity, stream_id, date_obj
|
|
331
|
+
)
|
|
332
|
+
if entity:
|
|
333
|
+
return entity
|
|
334
|
+
|
|
335
|
+
stream_token, start_date = await _STREAMS_THREAD.run(
|
|
336
|
+
Stream._get_token_and_start_date, stream_id
|
|
337
|
+
)
|
|
338
|
+
check_at = date_obj + timedelta(days=1) - timedelta(seconds=1)
|
|
339
|
+
if check_at > now(tz=_UTC):
|
|
340
|
+
await sleep((check_at - now(tz=_UTC)).total_seconds())
|
|
341
|
+
|
|
342
|
+
while True:
|
|
343
|
+
try:
|
|
344
|
+
block = await get_block_at_timestamp(check_at, sync=False)
|
|
345
|
+
except NoBlockFound:
|
|
346
|
+
sleep_time = (check_at - now(tz=_UTC)).total_seconds()
|
|
347
|
+
logger.debug(
|
|
348
|
+
"no block found for %s, sleeping %ss", check_at, sleep_time
|
|
349
|
+
)
|
|
350
|
+
await sleep(sleep_time)
|
|
351
|
+
else:
|
|
352
|
+
break
|
|
353
|
+
|
|
354
|
+
price_fut = create_task(get_price(stream_token, block, sync=False))
|
|
355
|
+
start_timestamp = await _get_start_timestamp(stream_id, block)
|
|
356
|
+
if start_timestamp == 0:
|
|
357
|
+
if await _STREAMS_THREAD.run(Stream.check_closed, stream_id):
|
|
358
|
+
price_fut.cancel()
|
|
359
|
+
return None
|
|
360
|
+
|
|
361
|
+
while start_timestamp == 0:
|
|
362
|
+
block -= 1
|
|
363
|
+
start_timestamp = await _get_start_timestamp(stream_id, block)
|
|
364
|
+
|
|
365
|
+
block_datetime = fromtimestamp(await _get_block_timestamp(block), tz=_UTC)
|
|
366
|
+
assert block_datetime.date() == date_obj.date()
|
|
367
|
+
seconds_active = (check_at - block_datetime).seconds
|
|
368
|
+
is_last_day = True
|
|
369
|
+
else:
|
|
370
|
+
seconds_active = int(check_at.timestamp()) - start_timestamp
|
|
371
|
+
is_last_day = False
|
|
372
|
+
|
|
373
|
+
seconds_active_today = min(seconds_active, _ONE_DAY)
|
|
374
|
+
if seconds_active_today < _ONE_DAY and not is_last_day:
|
|
375
|
+
if date_obj.date() != start_date:
|
|
376
|
+
seconds_active_today = _ONE_DAY
|
|
377
|
+
|
|
378
|
+
with db_session:
|
|
379
|
+
price = Decimal(await price_fut)
|
|
380
|
+
entity = await _STREAMS_THREAD.run(
|
|
381
|
+
StreamedFunds.create_entity,
|
|
382
|
+
stream_id,
|
|
383
|
+
date_obj,
|
|
384
|
+
price,
|
|
385
|
+
seconds_active_today,
|
|
386
|
+
is_last_day,
|
|
387
|
+
)
|
|
388
|
+
return entity
|