dao-treasury 0.0.22__cp310-cp310-macosx_11_0_arm64.whl → 0.0.69__cp310-cp310-macosx_11_0_arm64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dao_treasury/.grafana/provisioning/dashboards/breakdowns/Expenses.json +551 -0
- dao_treasury/.grafana/provisioning/dashboards/breakdowns/Revenue.json +551 -0
- dao_treasury/.grafana/provisioning/dashboards/dashboards.yaml +7 -7
- dao_treasury/.grafana/provisioning/dashboards/streams/LlamaPay.json +220 -0
- dao_treasury/.grafana/provisioning/dashboards/summary/Monthly.json +18 -23
- dao_treasury/.grafana/provisioning/dashboards/transactions/Treasury Transactions.json +181 -29
- dao_treasury/.grafana/provisioning/dashboards/treasury/Cashflow (Including Unsorted).json +808 -0
- dao_treasury/.grafana/provisioning/dashboards/treasury/Cashflow.json +602 -0
- dao_treasury/.grafana/provisioning/dashboards/treasury/Current Treasury Assets.json +1009 -0
- dao_treasury/.grafana/provisioning/dashboards/treasury/Historical Treasury Balances.json +2989 -0
- dao_treasury/.grafana/provisioning/dashboards/treasury/Operating Cashflow.json +478 -0
- dao_treasury/.grafana/provisioning/datasources/datasources.yaml +17 -0
- dao_treasury/ENVIRONMENT_VARIABLES.py +20 -0
- dao_treasury/__init__.py +20 -0
- dao_treasury/_docker.cpython-310-darwin.so +0 -0
- dao_treasury/_docker.py +67 -38
- dao_treasury/_nicknames.cpython-310-darwin.so +0 -0
- dao_treasury/_nicknames.py +24 -2
- dao_treasury/_wallet.cpython-310-darwin.so +0 -0
- dao_treasury/_wallet.py +157 -16
- dao_treasury/constants.cpython-310-darwin.so +0 -0
- dao_treasury/constants.py +39 -0
- dao_treasury/db.py +384 -45
- dao_treasury/docker-compose.yaml +6 -5
- dao_treasury/main.py +86 -17
- dao_treasury/sorting/__init__.cpython-310-darwin.so +0 -0
- dao_treasury/sorting/__init__.py +171 -42
- dao_treasury/sorting/_matchers.cpython-310-darwin.so +0 -0
- dao_treasury/sorting/_rules.cpython-310-darwin.so +0 -0
- dao_treasury/sorting/_rules.py +1 -3
- dao_treasury/sorting/factory.cpython-310-darwin.so +0 -0
- dao_treasury/sorting/factory.py +2 -6
- dao_treasury/sorting/rule.cpython-310-darwin.so +0 -0
- dao_treasury/sorting/rule.py +13 -10
- dao_treasury/sorting/rules/__init__.cpython-310-darwin.so +0 -0
- dao_treasury/sorting/rules/__init__.py +1 -0
- dao_treasury/sorting/rules/ignore/__init__.cpython-310-darwin.so +0 -0
- dao_treasury/sorting/rules/ignore/__init__.py +1 -0
- dao_treasury/sorting/rules/ignore/llamapay.cpython-310-darwin.so +0 -0
- dao_treasury/sorting/rules/ignore/llamapay.py +20 -0
- dao_treasury/streams/__init__.cpython-310-darwin.so +0 -0
- dao_treasury/streams/__init__.py +0 -0
- dao_treasury/streams/llamapay.cpython-310-darwin.so +0 -0
- dao_treasury/streams/llamapay.py +388 -0
- dao_treasury/treasury.py +75 -28
- dao_treasury/types.cpython-310-darwin.so +0 -0
- dao_treasury-0.0.69.dist-info/METADATA +120 -0
- dao_treasury-0.0.69.dist-info/RECORD +54 -0
- dao_treasury-0.0.69.dist-info/top_level.txt +2 -0
- dao_treasury__mypyc.cpython-310-darwin.so +0 -0
- 52b51d40e96d4333695d__mypyc.cpython-310-darwin.so +0 -0
- dao_treasury/.grafana/provisioning/datasources/sqlite.yaml +0 -10
- dao_treasury-0.0.22.dist-info/METADATA +0 -63
- dao_treasury-0.0.22.dist-info/RECORD +0 -31
- dao_treasury-0.0.22.dist-info/top_level.txt +0 -2
- {dao_treasury-0.0.22.dist-info → dao_treasury-0.0.69.dist-info}/WHEEL +0 -0
dao_treasury/_docker.py
CHANGED
|
@@ -1,42 +1,65 @@
|
|
|
1
|
+
"""Docker orchestration utilities for DAO Treasury.
|
|
2
|
+
|
|
3
|
+
Provides functions to build, start, and stop Docker Compose services
|
|
4
|
+
required for analytics dashboards (Grafana, renderer). Integrates with
|
|
5
|
+
eth-portfolio's Docker setup and ensures all containers are managed
|
|
6
|
+
consistently for local analytics.
|
|
7
|
+
|
|
8
|
+
Key Responsibilities:
|
|
9
|
+
- Build and manage Grafana and renderer containers.
|
|
10
|
+
- Integrate with eth-portfolio Docker services.
|
|
11
|
+
- Provide decorators/utilities for container lifecycle management.
|
|
12
|
+
|
|
13
|
+
This is the main entry for all Docker-based orchestration.
|
|
14
|
+
"""
|
|
15
|
+
|
|
1
16
|
import logging
|
|
2
|
-
import os
|
|
3
|
-
import subprocess
|
|
4
17
|
from functools import wraps
|
|
5
|
-
from
|
|
18
|
+
from importlib import resources
|
|
19
|
+
from typing import Any, Callable, Coroutine, Final, Literal, Tuple, TypeVar, List
|
|
6
20
|
|
|
7
|
-
|
|
21
|
+
import eth_portfolio_scripts.docker
|
|
22
|
+
from eth_portfolio_scripts.docker import docker_compose
|
|
8
23
|
from typing_extensions import ParamSpec
|
|
9
24
|
|
|
10
|
-
logger = logging.getLogger(__name__)
|
|
25
|
+
logger: Final = logging.getLogger(__name__)
|
|
11
26
|
|
|
12
|
-
|
|
13
|
-
|
|
27
|
+
COMPOSE_FILE: Final = str(
|
|
28
|
+
resources.files("dao_treasury").joinpath("docker-compose.yaml")
|
|
14
29
|
)
|
|
30
|
+
"""The path of dao-treasury's docker-compose.yaml file on your machine"""
|
|
15
31
|
|
|
16
32
|
|
|
17
|
-
def up() -> None:
|
|
18
|
-
"""Build and start
|
|
33
|
+
def up(*services: str) -> None:
|
|
34
|
+
"""Build and start the specified containers defined in the compose file.
|
|
35
|
+
|
|
36
|
+
Args:
|
|
37
|
+
services: service names to bring up.
|
|
19
38
|
|
|
20
39
|
This function first builds the Docker services by invoking
|
|
21
|
-
:func:`build` and then starts
|
|
40
|
+
:func:`build` and then starts the specified services in detached mode using
|
|
22
41
|
Docker Compose. If Docker Compose is not available, it falls back
|
|
23
42
|
to the legacy ``docker-compose`` command.
|
|
24
43
|
|
|
25
44
|
Examples:
|
|
45
|
+
>>> up('grafana')
|
|
46
|
+
starting the grafana container
|
|
26
47
|
>>> up()
|
|
27
|
-
starting
|
|
48
|
+
starting all containers (grafana and renderer)
|
|
28
49
|
|
|
29
50
|
See Also:
|
|
30
51
|
:func:`build`
|
|
31
52
|
:func:`down`
|
|
32
53
|
:func:`_exec_command`
|
|
33
54
|
"""
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
55
|
+
# eth-portfolio containers must be started first so dao-treasury can attach to the eth-portfolio docker network
|
|
56
|
+
eth_portfolio_scripts.docker.up("victoria-metrics")
|
|
57
|
+
build(*services)
|
|
58
|
+
_print_notice("starting", services)
|
|
59
|
+
_exec_command(["up", "-d", *services])
|
|
37
60
|
|
|
38
61
|
|
|
39
|
-
def down(
|
|
62
|
+
def down() -> None:
|
|
40
63
|
"""Stop and remove Grafana containers.
|
|
41
64
|
|
|
42
65
|
This function brings down the Docker Compose services defined
|
|
@@ -49,10 +72,11 @@ def down(*_) -> None:
|
|
|
49
72
|
See Also:
|
|
50
73
|
:func:`up`
|
|
51
74
|
"""
|
|
75
|
+
print("stopping all dao-treasury containers")
|
|
52
76
|
_exec_command(["down"])
|
|
53
77
|
|
|
54
78
|
|
|
55
|
-
def build() -> None:
|
|
79
|
+
def build(*services: str) -> None:
|
|
56
80
|
"""Build Docker images for Grafana containers.
|
|
57
81
|
|
|
58
82
|
This function builds all services defined in the Docker Compose
|
|
@@ -67,15 +91,31 @@ def build() -> None:
|
|
|
67
91
|
:func:`up`
|
|
68
92
|
:func:`_exec_command`
|
|
69
93
|
"""
|
|
70
|
-
|
|
71
|
-
_exec_command(["build"])
|
|
94
|
+
_print_notice("building", services)
|
|
95
|
+
_exec_command(["build", *services])
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def _print_notice(
|
|
99
|
+
doing: Literal["building", "starting"], services: Tuple[str, ...]
|
|
100
|
+
) -> None:
|
|
101
|
+
if len(services) == 1:
|
|
102
|
+
container = services[0]
|
|
103
|
+
print(f"{doing} the {container} container")
|
|
104
|
+
elif len(services) == 2:
|
|
105
|
+
first, second = services
|
|
106
|
+
print(f"{doing} the {first} and {second} containers")
|
|
107
|
+
else:
|
|
108
|
+
*all_but_last, last = services
|
|
109
|
+
print(f"{doing} the {', '.join(all_but_last)}, and {last} containers")
|
|
72
110
|
|
|
73
111
|
|
|
74
112
|
_P = ParamSpec("_P")
|
|
75
113
|
_T = TypeVar("_T")
|
|
76
114
|
|
|
77
115
|
|
|
78
|
-
def ensure_containers(
|
|
116
|
+
def ensure_containers(
|
|
117
|
+
fn: Callable[_P, Coroutine[Any, Any, _T]],
|
|
118
|
+
) -> Callable[_P, Coroutine[Any, Any, _T]]:
|
|
79
119
|
"""Decorator to ensure Grafana containers are running before execution.
|
|
80
120
|
|
|
81
121
|
This async decorator starts the Docker Compose services via
|
|
@@ -104,27 +144,27 @@ def ensure_containers(fn: Callable[_P, _T]) -> Callable[_P, _T]:
|
|
|
104
144
|
"""
|
|
105
145
|
|
|
106
146
|
@wraps(fn)
|
|
107
|
-
async def compose_wrap(*args: _P.args, **kwargs: _P.kwargs):
|
|
147
|
+
async def compose_wrap(*args: _P.args, **kwargs: _P.kwargs) -> _T:
|
|
108
148
|
# register shutdown sequence
|
|
109
149
|
# TODO: argument to leave them up
|
|
110
150
|
# NOTE: do we need both this and the finally?
|
|
111
151
|
# signal.signal(signal.SIGINT, down)
|
|
112
152
|
|
|
113
153
|
# start Grafana containers
|
|
114
|
-
up()
|
|
154
|
+
up("grafana")
|
|
115
155
|
|
|
116
156
|
try:
|
|
117
157
|
# attempt to run `fn`
|
|
118
|
-
await fn(*args, **kwargs)
|
|
158
|
+
return await fn(*args, **kwargs)
|
|
119
159
|
finally:
|
|
120
160
|
# stop and remove containers
|
|
121
161
|
# down()
|
|
122
|
-
|
|
162
|
+
pass
|
|
123
163
|
|
|
124
164
|
return compose_wrap
|
|
125
165
|
|
|
126
166
|
|
|
127
|
-
def _exec_command(command:
|
|
167
|
+
def _exec_command(command: List[str], *, compose_options: Tuple[str, ...] = ()) -> None:
|
|
128
168
|
"""Execute a Docker Compose command with system checks and fallback.
|
|
129
169
|
|
|
130
170
|
This internal function ensures that Docker and Docker Compose
|
|
@@ -149,17 +189,6 @@ def _exec_command(command: Iterable[str], *, compose_options: Tuple[str] = ()) -
|
|
|
149
189
|
See Also:
|
|
150
190
|
:func:`check_system`
|
|
151
191
|
"""
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
["docker", "compose", *compose_options, "-f", compose_file, *command]
|
|
156
|
-
)
|
|
157
|
-
except (subprocess.CalledProcessError, FileNotFoundError) as e:
|
|
158
|
-
try:
|
|
159
|
-
subprocess.check_output(
|
|
160
|
-
["docker-compose", *compose_options, "-f", compose_file, *command]
|
|
161
|
-
)
|
|
162
|
-
except (subprocess.CalledProcessError, FileNotFoundError) as _e:
|
|
163
|
-
raise RuntimeError(
|
|
164
|
-
f"Error occurred while running {' '.join(command)}: {_e}"
|
|
165
|
-
) from _e
|
|
192
|
+
docker_compose._exec_command(
|
|
193
|
+
command, compose_file=COMPOSE_FILE, compose_options=compose_options
|
|
194
|
+
)
|
|
Binary file
|
dao_treasury/_nicknames.py
CHANGED
|
@@ -1,10 +1,32 @@
|
|
|
1
|
-
|
|
1
|
+
"""Address nickname setup utilities.
|
|
2
|
+
|
|
3
|
+
This module provides functions to assign human-readable nicknames to
|
|
4
|
+
important on-chain addresses (e.g., Zero Address, Disperse.app, tokens).
|
|
5
|
+
It is used at package initialization to ensure all analytics and dashboards
|
|
6
|
+
display professional, consistent labels.
|
|
7
|
+
|
|
8
|
+
Key Responsibilities:
|
|
9
|
+
- Set nicknames for core addresses in the database.
|
|
10
|
+
- Integrate with constants and token metadata.
|
|
11
|
+
- Support professional, readable analytics outputs.
|
|
12
|
+
|
|
13
|
+
This is called automatically on package import.
|
|
14
|
+
"""
|
|
15
|
+
|
|
16
|
+
from typing import Final
|
|
17
|
+
|
|
2
18
|
from pony.orm import db_session
|
|
3
19
|
|
|
20
|
+
from dao_treasury import constants
|
|
4
21
|
from dao_treasury.db import Address, _set_address_nicknames_for_tokens
|
|
5
22
|
|
|
6
23
|
|
|
24
|
+
set_nickname: Final = Address.set_nickname
|
|
25
|
+
|
|
26
|
+
|
|
7
27
|
def setup_address_nicknames_in_db() -> None:
|
|
8
28
|
with db_session:
|
|
9
|
-
|
|
29
|
+
set_nickname(constants.ZERO_ADDRESS, "Zero Address")
|
|
30
|
+
for address in constants.DISPERSE_APP:
|
|
31
|
+
set_nickname(address, "Disperse.app")
|
|
10
32
|
_set_address_nicknames_for_tokens()
|
|
Binary file
|
dao_treasury/_wallet.py
CHANGED
|
@@ -1,14 +1,20 @@
|
|
|
1
1
|
from dataclasses import dataclass
|
|
2
|
-
from
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
from typing import Dict, Final, List, Optional, final
|
|
3
4
|
|
|
5
|
+
import yaml
|
|
4
6
|
from brownie.convert.datatypes import EthAddress
|
|
5
7
|
from eth_typing import BlockNumber, ChecksumAddress, HexAddress
|
|
6
8
|
from y import convert
|
|
7
9
|
from y.time import closest_block_after_timestamp
|
|
8
10
|
|
|
11
|
+
from dao_treasury.constants import CHAINID
|
|
12
|
+
|
|
9
13
|
|
|
10
14
|
WALLETS: Final[Dict[ChecksumAddress, "TreasuryWallet"]] = {}
|
|
11
15
|
|
|
16
|
+
to_address: Final = convert.to_address
|
|
17
|
+
|
|
12
18
|
|
|
13
19
|
@final
|
|
14
20
|
@dataclass
|
|
@@ -30,8 +36,13 @@ class TreasuryWallet:
|
|
|
30
36
|
end_timestamp: Optional[int] = None
|
|
31
37
|
"""The last timestamp at which this wallet was considered owned by the DAO, if it wasn't always included in the treasury. If `end_timestamp` is provided, you cannot provide an `end_block`."""
|
|
32
38
|
|
|
39
|
+
networks: Optional[List[int]] = None
|
|
40
|
+
"""The networks where the DAO owns this wallet. If not provided, the wallet will be active on all networks."""
|
|
41
|
+
|
|
33
42
|
def __post_init__(self) -> None:
|
|
34
|
-
|
|
43
|
+
# If a user provides a wallets yaml file but forgets to wrap the address
|
|
44
|
+
# keys with quotes, it will be an integer we must convert to an address.
|
|
45
|
+
self.address = EthAddress(to_address(self.address))
|
|
35
46
|
|
|
36
47
|
start_block = self.start_block
|
|
37
48
|
start_timestamp = self.start_timestamp
|
|
@@ -66,12 +77,18 @@ class TreasuryWallet:
|
|
|
66
77
|
def check_membership(
|
|
67
78
|
address: Optional[HexAddress], block: Optional[BlockNumber] = None
|
|
68
79
|
) -> bool:
|
|
69
|
-
if address is
|
|
70
|
-
return
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
80
|
+
if address is None:
|
|
81
|
+
return False
|
|
82
|
+
wallet = TreasuryWallet._get_instance(address)
|
|
83
|
+
if wallet is None:
|
|
84
|
+
return False
|
|
85
|
+
# If networks filter is set, only include if current chain is listed
|
|
86
|
+
if wallet.networks and CHAINID not in wallet.networks:
|
|
87
|
+
return False
|
|
88
|
+
return block is None or (
|
|
89
|
+
wallet._start_block <= block
|
|
90
|
+
and (wallet._end_block is None or wallet._end_block >= block)
|
|
91
|
+
)
|
|
75
92
|
|
|
76
93
|
@property
|
|
77
94
|
def _start_block(self) -> BlockNumber:
|
|
@@ -97,13 +114,137 @@ class TreasuryWallet:
|
|
|
97
114
|
def _get_instance(address: HexAddress) -> Optional["TreasuryWallet"]:
|
|
98
115
|
# sourcery skip: use-contextlib-suppress
|
|
99
116
|
try:
|
|
100
|
-
|
|
101
|
-
except KeyError:
|
|
102
|
-
pass
|
|
103
|
-
checksummed = convert.to_address(address)
|
|
104
|
-
try:
|
|
105
|
-
instance = WALLETS[address] = WALLETS[checksummed]
|
|
117
|
+
instance = WALLETS[address]
|
|
106
118
|
except KeyError:
|
|
119
|
+
checksummed = to_address(address)
|
|
120
|
+
try:
|
|
121
|
+
instance = WALLETS[address] = WALLETS[checksummed]
|
|
122
|
+
except KeyError:
|
|
123
|
+
return None
|
|
124
|
+
if instance.networks and CHAINID not in instance.networks:
|
|
107
125
|
return None
|
|
108
|
-
|
|
109
|
-
|
|
126
|
+
return instance
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
def load_wallets_from_yaml(path: Path) -> List[TreasuryWallet]:
|
|
130
|
+
"""
|
|
131
|
+
Load a YAML mapping of wallet addresses to configuration and return a list of TreasuryWallets.
|
|
132
|
+
'timestamp' in top-level start/end is universal.
|
|
133
|
+
'block' in top-level start/end must be provided under the chain ID key.
|
|
134
|
+
Optional 'networks' key lists chain IDs where this wallet is active.
|
|
135
|
+
"""
|
|
136
|
+
try:
|
|
137
|
+
data = yaml.safe_load(path.read_bytes())
|
|
138
|
+
except Exception as e:
|
|
139
|
+
raise ValueError(f"Failed to parse wallets YAML: {e}")
|
|
140
|
+
|
|
141
|
+
if not isinstance(data, dict):
|
|
142
|
+
raise ValueError("Wallets YAML file must be a mapping of address to config")
|
|
143
|
+
|
|
144
|
+
wallets: List[TreasuryWallet] = []
|
|
145
|
+
for address, cfg in data.items():
|
|
146
|
+
# Allow bare keys
|
|
147
|
+
if cfg is None:
|
|
148
|
+
cfg = {}
|
|
149
|
+
elif not isinstance(cfg, dict):
|
|
150
|
+
raise ValueError(f"Invalid config for wallet {address}, expected mapping")
|
|
151
|
+
|
|
152
|
+
kwargs = {"address": address}
|
|
153
|
+
|
|
154
|
+
# Extract optional networks list
|
|
155
|
+
networks = cfg.get("networks")
|
|
156
|
+
if networks:
|
|
157
|
+
if not isinstance(networks, list) or not all(
|
|
158
|
+
isinstance(n, int) for n in networks
|
|
159
|
+
):
|
|
160
|
+
raise ValueError(
|
|
161
|
+
f"'networks' for wallet {address} must be a list of integers, got {networks}"
|
|
162
|
+
)
|
|
163
|
+
kwargs["networks"] = networks
|
|
164
|
+
|
|
165
|
+
# Parse start: timestamp universal, block under chain key
|
|
166
|
+
start_cfg = cfg.get("start", {})
|
|
167
|
+
if not isinstance(start_cfg, dict):
|
|
168
|
+
raise ValueError(
|
|
169
|
+
f"Invalid 'start' for wallet {address}. Expected mapping, got {start_cfg}."
|
|
170
|
+
)
|
|
171
|
+
for key, value in start_cfg.items():
|
|
172
|
+
if key == "timestamp":
|
|
173
|
+
if "start_block" in kwargs:
|
|
174
|
+
raise ValueError(
|
|
175
|
+
"You cannot provide both a start block and a start timestamp"
|
|
176
|
+
)
|
|
177
|
+
kwargs["start_timestamp"] = value
|
|
178
|
+
elif key == "block":
|
|
179
|
+
if not isinstance(value, dict):
|
|
180
|
+
raise ValueError(
|
|
181
|
+
f"Invalid start block for wallet {address}. Expected mapping, got {value}."
|
|
182
|
+
)
|
|
183
|
+
for chainid, start_block in value.items():
|
|
184
|
+
if not isinstance(chainid, int):
|
|
185
|
+
raise ValueError(
|
|
186
|
+
f"Invalid chainid for wallet {address} start block. Expected integer, got {chainid}."
|
|
187
|
+
)
|
|
188
|
+
if not isinstance(start_block, int):
|
|
189
|
+
raise ValueError(
|
|
190
|
+
f"Invalid start block for wallet {address}. Expected integer, got {start_block}."
|
|
191
|
+
)
|
|
192
|
+
if chainid == CHAINID:
|
|
193
|
+
if "start_timestamp" in kwargs:
|
|
194
|
+
raise ValueError(
|
|
195
|
+
"You cannot provide both a start block and a start timestamp"
|
|
196
|
+
)
|
|
197
|
+
kwargs["start_block"] = start_block
|
|
198
|
+
else:
|
|
199
|
+
raise ValueError(
|
|
200
|
+
f"Invalid key: {key}. Valid options are 'block' or 'timestamp'."
|
|
201
|
+
)
|
|
202
|
+
|
|
203
|
+
chain_block = start_cfg.get(str(CHAINID)) or start_cfg.get(CHAINID)
|
|
204
|
+
if chain_block is not None:
|
|
205
|
+
if not isinstance(chain_block, int):
|
|
206
|
+
raise ValueError(
|
|
207
|
+
f"Invalid start.block for chain {CHAINID} on {address}"
|
|
208
|
+
)
|
|
209
|
+
kwargs["start_block"] = chain_block
|
|
210
|
+
|
|
211
|
+
# Parse end: timestamp universal, block under chain key
|
|
212
|
+
end_cfg = cfg.get("end", {})
|
|
213
|
+
if not isinstance(end_cfg, dict):
|
|
214
|
+
raise ValueError(
|
|
215
|
+
f"Invalid 'end' for wallet {address}. Expected mapping, got {end_cfg}."
|
|
216
|
+
)
|
|
217
|
+
|
|
218
|
+
for key, value in end_cfg.items():
|
|
219
|
+
if key == "timestamp":
|
|
220
|
+
if "end_block" in kwargs:
|
|
221
|
+
raise ValueError(
|
|
222
|
+
"You cannot provide both an end block and an end timestamp"
|
|
223
|
+
)
|
|
224
|
+
kwargs["end_timestamp"] = value
|
|
225
|
+
elif key == "block":
|
|
226
|
+
if not isinstance(value, dict):
|
|
227
|
+
raise ValueError(
|
|
228
|
+
f"Invalid end block for wallet {address}. Expected mapping, got {value}."
|
|
229
|
+
)
|
|
230
|
+
for chainid, end_block in value.items():
|
|
231
|
+
if not isinstance(chainid, int):
|
|
232
|
+
raise ValueError(
|
|
233
|
+
f"Invalid chainid for wallet {address} end block. Expected integer, got {chainid}."
|
|
234
|
+
)
|
|
235
|
+
if not isinstance(end_block, int):
|
|
236
|
+
raise ValueError(
|
|
237
|
+
f"Invalid end block for wallet {address}. Expected integer, got {end_block}."
|
|
238
|
+
)
|
|
239
|
+
if chainid == CHAINID:
|
|
240
|
+
kwargs["end_block"] = end_block
|
|
241
|
+
else:
|
|
242
|
+
raise ValueError(
|
|
243
|
+
f"Invalid key: {key}. Valid options are 'block' or 'timestamp'."
|
|
244
|
+
)
|
|
245
|
+
|
|
246
|
+
wallet = TreasuryWallet(**kwargs)
|
|
247
|
+
print(f"initialized {wallet}")
|
|
248
|
+
wallets.append(wallet)
|
|
249
|
+
|
|
250
|
+
return wallets
|
|
Binary file
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
"""Core constants for DAO Treasury.
|
|
2
|
+
|
|
3
|
+
All constants are marked with `Final`, ensuring immutability and allowing
|
|
4
|
+
mypyc to compile them as extremely fast C-level constants for maximum
|
|
5
|
+
performance. Defines chain IDs, zero address, and key contract addresses
|
|
6
|
+
(e.g., Disperse.app) used throughout the system for transaction processing,
|
|
7
|
+
nickname assignment, and analytics.
|
|
8
|
+
|
|
9
|
+
Key Responsibilities:
|
|
10
|
+
- Provide canonical addresses and chain IDs.
|
|
11
|
+
- Support nickname setup and transaction categorization.
|
|
12
|
+
- Guarantee fast, immutable constants at runtime.
|
|
13
|
+
|
|
14
|
+
This is the single source of truth for system-wide constants.
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
from typing import Final
|
|
18
|
+
|
|
19
|
+
import eth_portfolio._utils
|
|
20
|
+
import y.constants
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
CHAINID: Final = y.constants.CHAINID
|
|
24
|
+
# TODO: add docstring
|
|
25
|
+
|
|
26
|
+
ZERO_ADDRESS: Final = "0x0000000000000000000000000000000000000000"
|
|
27
|
+
# TODO: add docstring
|
|
28
|
+
|
|
29
|
+
# TODO: move disperse.app stuff from yearn-treasury to dao-treasury and then write a docs file
|
|
30
|
+
DISPERSE_APP: Final = (
|
|
31
|
+
"0xD152f549545093347A162Dce210e7293f1452150",
|
|
32
|
+
"0xd15fE25eD0Dba12fE05e7029C88b10C25e8880E3",
|
|
33
|
+
)
|
|
34
|
+
"""If your treasury sends funds to disperse.app, we create additional txs in the db so each individual send can be accounted for."""
|
|
35
|
+
# TODO: all crosslink to disperse.py once ready
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
SUPPRESS_ERROR_LOGS: Final = eth_portfolio._utils.SUPPRESS_ERROR_LOGS
|
|
39
|
+
"""Append tokens here when you don't expect them to price successfully and do not want to see the associated error logs."""
|