dao-treasury 0.0.10__cp310-cp310-win32.whl → 0.0.70__cp310-cp310-win32.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. dao_treasury/.grafana/provisioning/dashboards/breakdowns/Expenses.json +551 -0
  2. dao_treasury/.grafana/provisioning/dashboards/breakdowns/Revenue.json +551 -0
  3. dao_treasury/.grafana/provisioning/dashboards/dashboards.yaml +7 -7
  4. dao_treasury/.grafana/provisioning/dashboards/streams/LlamaPay.json +220 -0
  5. dao_treasury/.grafana/provisioning/dashboards/summary/Monthly.json +153 -29
  6. dao_treasury/.grafana/provisioning/dashboards/transactions/Treasury Transactions.json +181 -29
  7. dao_treasury/.grafana/provisioning/dashboards/treasury/Cashflow (Including Unsorted).json +808 -0
  8. dao_treasury/.grafana/provisioning/dashboards/treasury/Cashflow.json +602 -0
  9. dao_treasury/.grafana/provisioning/dashboards/treasury/Current Treasury Assets.json +981 -0
  10. dao_treasury/.grafana/provisioning/dashboards/treasury/Historical Treasury Balances.json +2989 -0
  11. dao_treasury/.grafana/provisioning/dashboards/treasury/Operating Cashflow.json +478 -0
  12. dao_treasury/.grafana/provisioning/datasources/datasources.yaml +17 -0
  13. dao_treasury/ENVIRONMENT_VARIABLES.py +20 -0
  14. dao_treasury/__init__.py +36 -10
  15. dao_treasury/_docker.cp310-win32.pyd +0 -0
  16. dao_treasury/_docker.py +169 -37
  17. dao_treasury/_nicknames.cp310-win32.pyd +0 -0
  18. dao_treasury/_nicknames.py +32 -0
  19. dao_treasury/_wallet.cp310-win32.pyd +0 -0
  20. dao_treasury/_wallet.py +164 -12
  21. dao_treasury/constants.cp310-win32.pyd +0 -0
  22. dao_treasury/constants.py +39 -0
  23. dao_treasury/db.py +925 -150
  24. dao_treasury/docker-compose.yaml +6 -5
  25. dao_treasury/main.py +238 -28
  26. dao_treasury/sorting/__init__.cp310-win32.pyd +0 -0
  27. dao_treasury/sorting/__init__.py +219 -115
  28. dao_treasury/sorting/_matchers.cp310-win32.pyd +0 -0
  29. dao_treasury/sorting/_matchers.py +261 -17
  30. dao_treasury/sorting/_rules.cp310-win32.pyd +0 -0
  31. dao_treasury/sorting/_rules.py +166 -21
  32. dao_treasury/sorting/factory.cp310-win32.pyd +0 -0
  33. dao_treasury/sorting/factory.py +245 -37
  34. dao_treasury/sorting/rule.cp310-win32.pyd +0 -0
  35. dao_treasury/sorting/rule.py +228 -46
  36. dao_treasury/sorting/rules/__init__.cp310-win32.pyd +0 -0
  37. dao_treasury/sorting/rules/__init__.py +1 -0
  38. dao_treasury/sorting/rules/ignore/__init__.cp310-win32.pyd +0 -0
  39. dao_treasury/sorting/rules/ignore/__init__.py +1 -0
  40. dao_treasury/sorting/rules/ignore/llamapay.cp310-win32.pyd +0 -0
  41. dao_treasury/sorting/rules/ignore/llamapay.py +20 -0
  42. dao_treasury/streams/__init__.cp310-win32.pyd +0 -0
  43. dao_treasury/streams/__init__.py +0 -0
  44. dao_treasury/streams/llamapay.cp310-win32.pyd +0 -0
  45. dao_treasury/streams/llamapay.py +388 -0
  46. dao_treasury/treasury.py +118 -25
  47. dao_treasury/types.cp310-win32.pyd +0 -0
  48. dao_treasury/types.py +104 -7
  49. dao_treasury-0.0.70.dist-info/METADATA +134 -0
  50. dao_treasury-0.0.70.dist-info/RECORD +54 -0
  51. dao_treasury-0.0.70.dist-info/top_level.txt +2 -0
  52. dao_treasury__mypyc.cp310-win32.pyd +0 -0
  53. a743a720bbc4482d330e__mypyc.cp310-win32.pyd +0 -0
  54. dao_treasury/.grafana/provisioning/datasources/sqlite.yaml +0 -10
  55. dao_treasury-0.0.10.dist-info/METADATA +0 -36
  56. dao_treasury-0.0.10.dist-info/RECORD +0 -28
  57. dao_treasury-0.0.10.dist-info/top_level.txt +0 -2
  58. {dao_treasury-0.0.10.dist-info → dao_treasury-0.0.70.dist-info}/WHEEL +0 -0
dao_treasury/_docker.py CHANGED
@@ -1,62 +1,194 @@
1
+ """Docker orchestration utilities for DAO Treasury.
2
+
3
+ Provides functions to build, start, and stop Docker Compose services
4
+ required for analytics dashboards (Grafana, renderer). Integrates with
5
+ eth-portfolio's Docker setup and ensures all containers are managed
6
+ consistently for local analytics.
7
+
8
+ Key Responsibilities:
9
+ - Build and manage Grafana and renderer containers.
10
+ - Integrate with eth-portfolio Docker services.
11
+ - Provide decorators/utilities for container lifecycle management.
12
+
13
+ This is the main entry for all Docker-based orchestration.
14
+ """
1
15
 
2
16
  import logging
3
- import os
4
- import subprocess
5
17
  from functools import wraps
6
- from typing import Callable, Iterable, Tuple, TypeVar
18
+ from importlib import resources
19
+ from typing import Any, Callable, Coroutine, Final, Literal, Tuple, TypeVar, List
7
20
 
8
- from eth_portfolio_scripts.docker import check_system
21
+ import eth_portfolio_scripts.docker
22
+ from eth_portfolio_scripts.docker import docker_compose
9
23
  from typing_extensions import ParamSpec
10
24
 
25
+ logger: Final = logging.getLogger(__name__)
11
26
 
12
- logger = logging.getLogger(__name__)
13
-
14
- compose_file = os.path.join(
15
- os.path.dirname(os.path.abspath(__file__)), 'docker-compose.yaml'
27
+ COMPOSE_FILE: Final = str(
28
+ resources.files("dao_treasury").joinpath("docker-compose.yaml")
16
29
  )
30
+ """The path of dao-treasury's docker-compose.yaml file on your machine"""
31
+
32
+
33
+ def up(*services: str) -> None:
34
+ """Build and start the specified containers defined in the compose file.
35
+
36
+ Args:
37
+ services: service names to bring up.
38
+
39
+ This function first builds the Docker services by invoking
40
+ :func:`build` and then starts the specified services in detached mode using
41
+ Docker Compose. If Docker Compose is not available, it falls back
42
+ to the legacy ``docker-compose`` command.
43
+
44
+ Examples:
45
+ >>> up('grafana')
46
+ starting the grafana container
47
+ >>> up()
48
+ starting all containers (grafana and renderer)
49
+
50
+ See Also:
51
+ :func:`build`
52
+ :func:`down`
53
+ :func:`_exec_command`
54
+ """
55
+ # eth-portfolio containers must be started first so dao-treasury can attach to the eth-portfolio docker network
56
+ eth_portfolio_scripts.docker.up("victoria-metrics")
57
+ build(*services)
58
+ _print_notice("starting", services)
59
+ _exec_command(["up", "-d", *services])
60
+
61
+
62
+ def down() -> None:
63
+ """Stop and remove Grafana containers.
64
+
65
+ This function brings down the Docker Compose services defined
66
+ in the compose file. Any positional arguments passed are ignored.
67
+
68
+ Examples:
69
+ >>> down()
70
+ # Stops containers
71
+
72
+ See Also:
73
+ :func:`up`
74
+ """
75
+ print("stopping all dao-treasury containers")
76
+ _exec_command(["down"])
77
+
78
+
79
+ def build(*services: str) -> None:
80
+ """Build Docker images for Grafana containers.
81
+
82
+ This function builds all services defined in the Docker Compose
83
+ configuration file. It is a prerequisite step before starting
84
+ containers with :func:`up`.
17
85
 
86
+ Examples:
87
+ >>> build()
88
+ building the grafana containers
18
89
 
19
- def up() -> None:
20
- build()
21
- print('starting the grafana containers')
22
- _exec_command(['up', '-d'])
90
+ See Also:
91
+ :func:`up`
92
+ :func:`_exec_command`
93
+ """
94
+ _print_notice("building", services)
95
+ _exec_command(["build", *services])
23
96
 
24
- def down(*_) -> None:
25
- _exec_command(['down'])
26
97
 
27
- def build() -> None:
28
- print("building the grafana containers")
29
- _exec_command(['build'])
98
+ def _print_notice(
99
+ doing: Literal["building", "starting"], services: Tuple[str, ...]
100
+ ) -> None:
101
+ if len(services) == 1:
102
+ container = services[0]
103
+ print(f"{doing} the {container} container")
104
+ elif len(services) == 2:
105
+ first, second = services
106
+ print(f"{doing} the {first} and {second} containers")
107
+ else:
108
+ *all_but_last, last = services
109
+ print(f"{doing} the {', '.join(all_but_last)}, and {last} containers")
30
110
 
31
- _P = ParamSpec('_P')
32
- _T = TypeVar('_T')
33
111
 
34
- def ensure_containers(fn: Callable[_P, _T]) -> Callable[_P, _T]:
112
+ _P = ParamSpec("_P")
113
+ _T = TypeVar("_T")
114
+
115
+
116
+ def ensure_containers(
117
+ fn: Callable[_P, Coroutine[Any, Any, _T]],
118
+ ) -> Callable[_P, Coroutine[Any, Any, _T]]:
119
+ """Decorator to ensure Grafana containers are running before execution.
120
+
121
+ This async decorator starts the Docker Compose services via
122
+ :func:`up` before invoking the wrapped coroutine function. Once
123
+ the wrapped function completes or raises an exception, the containers
124
+ can be torn down by calling :func:`down`, although teardown is
125
+ currently commented out.
126
+
127
+ Args:
128
+ fn: The asynchronous function to wrap.
129
+
130
+ Returns:
131
+ A new coroutine function that wraps the original.
132
+
133
+ Examples:
134
+ >>> @ensure_containers
135
+ ... async def main_task():
136
+ ... # Container-dependent logic here
137
+ ... pass
138
+ >>> import asyncio
139
+ >>> asyncio.run(main_task())
140
+
141
+ See Also:
142
+ :func:`up`
143
+ :func:`down`
144
+ """
145
+
35
146
  @wraps(fn)
36
- async def compose_wrap(*args: _P.args, **kwargs: _P.kwargs):
147
+ async def compose_wrap(*args: _P.args, **kwargs: _P.kwargs) -> _T:
37
148
  # register shutdown sequence
38
149
  # TODO: argument to leave them up
39
- # NOTE: do we need both this and the finally?
40
- #signal.signal(signal.SIGINT, down)
41
-
150
+ # NOTE: do we need both this and the finally?
151
+ # signal.signal(signal.SIGINT, down)
152
+
42
153
  # start Grafana containers
43
- up()
154
+ up("grafana")
44
155
 
45
156
  try:
46
157
  # attempt to run `fn`
47
- await fn(*args, **kwargs)
158
+ return await fn(*args, **kwargs)
48
159
  finally:
49
160
  # stop and remove containers
50
- #down()
51
- ...
161
+ # down()
162
+ pass
163
+
52
164
  return compose_wrap
53
165
 
54
- def _exec_command(command: Iterable[str], *, compose_options: Tuple[str]=()) -> None:
55
- check_system()
56
- try:
57
- subprocess.check_output(['docker', 'compose', *compose_options, '-f', compose_file, *command])
58
- except (subprocess.CalledProcessError, FileNotFoundError) as e:
59
- try:
60
- subprocess.check_output(['docker-compose', *compose_options, '-f', compose_file, *command])
61
- except (subprocess.CalledProcessError, FileNotFoundError) as _e:
62
- raise RuntimeError(f"Error occurred while running {' '.join(command)}: {_e}") from _e
166
+
167
+ def _exec_command(command: List[str], *, compose_options: Tuple[str, ...] = ()) -> None:
168
+ """Execute a Docker Compose command with system checks and fallback.
169
+
170
+ This internal function ensures that Docker and Docker Compose
171
+ are installed by calling :func:`check_system`. It then executes the
172
+ specified command using the ``docker compose`` CLI. If that fails,
173
+ it falls back to the legacy ``docker-compose`` command.
174
+
175
+ Args:
176
+ command: The sequence of command arguments for Docker Compose
177
+ (e.g., ``['up', '-d']`` or ``['down']``).
178
+ compose_options: Additional options to pass before specifying
179
+ the compose file (not commonly used).
180
+
181
+ Raises:
182
+ RuntimeError: If both ``docker compose`` and ``docker-compose``
183
+ invocations fail.
184
+
185
+ Examples:
186
+ >>> _exec_command(['up', '-d'])
187
+ # Executes `docker compose -f docker-compose.yaml up -d`
188
+
189
+ See Also:
190
+ :func:`check_system`
191
+ """
192
+ docker_compose._exec_command(
193
+ command, compose_file=COMPOSE_FILE, compose_options=compose_options
194
+ )
Binary file
@@ -0,0 +1,32 @@
1
+ """Address nickname setup utilities.
2
+
3
+ This module provides functions to assign human-readable nicknames to
4
+ important on-chain addresses (e.g., Zero Address, Disperse.app, tokens).
5
+ It is used at package initialization to ensure all analytics and dashboards
6
+ display professional, consistent labels.
7
+
8
+ Key Responsibilities:
9
+ - Set nicknames for core addresses in the database.
10
+ - Integrate with constants and token metadata.
11
+ - Support professional, readable analytics outputs.
12
+
13
+ This is called automatically on package import.
14
+ """
15
+
16
+ from typing import Final
17
+
18
+ from pony.orm import db_session
19
+
20
+ from dao_treasury import constants
21
+ from dao_treasury.db import Address, _set_address_nicknames_for_tokens
22
+
23
+
24
+ set_nickname: Final = Address.set_nickname
25
+
26
+
27
+ def setup_address_nicknames_in_db() -> None:
28
+ with db_session:
29
+ set_nickname(constants.ZERO_ADDRESS, "Zero Address")
30
+ for address in constants.DISPERSE_APP:
31
+ set_nickname(address, "Disperse.app")
32
+ _set_address_nicknames_for_tokens()
Binary file
dao_treasury/_wallet.py CHANGED
@@ -1,14 +1,20 @@
1
1
  from dataclasses import dataclass
2
- from typing import Dict, Final, Optional, final
2
+ from pathlib import Path
3
+ from typing import Dict, Final, List, Optional, final
3
4
 
5
+ import yaml
4
6
  from brownie.convert.datatypes import EthAddress
5
7
  from eth_typing import BlockNumber, ChecksumAddress, HexAddress
6
8
  from y import convert
7
9
  from y.time import closest_block_after_timestamp
8
10
 
11
+ from dao_treasury.constants import CHAINID
12
+
9
13
 
10
14
  WALLETS: Final[Dict[ChecksumAddress, "TreasuryWallet"]] = {}
11
15
 
16
+ to_address: Final = convert.to_address
17
+
12
18
 
13
19
  @final
14
20
  @dataclass
@@ -30,8 +36,13 @@ class TreasuryWallet:
30
36
  end_timestamp: Optional[int] = None
31
37
  """The last timestamp at which this wallet was considered owned by the DAO, if it wasn't always included in the treasury. If `end_timestamp` is provided, you cannot provide an `end_block`."""
32
38
 
39
+ networks: Optional[List[int]] = None
40
+ """The networks where the DAO owns this wallet. If not provided, the wallet will be active on all networks."""
41
+
33
42
  def __post_init__(self) -> None:
34
- self.address = EthAddress(self.address)
43
+ # If a user provides a wallets yaml file but forgets to wrap the address
44
+ # keys with quotes, it will be an integer we must convert to an address.
45
+ self.address = EthAddress(to_address(self.address))
35
46
 
36
47
  start_block = self.start_block
37
48
  start_timestamp = self.start_timestamp
@@ -44,7 +55,7 @@ class TreasuryWallet:
44
55
  raise ValueError("start_block can not be negative")
45
56
  if start_timestamp is not None and start_timestamp < 0:
46
57
  raise ValueError("start_timestamp can not be negative")
47
-
58
+
48
59
  end_block = self.end_block
49
60
  end_timestamp = self.end_timestamp
50
61
  if end_block is not None:
@@ -56,12 +67,29 @@ class TreasuryWallet:
56
67
  raise ValueError("end_block can not be negative")
57
68
  if end_timestamp is not None and end_timestamp < 0:
58
69
  raise ValueError("end_timestamp can not be negative")
59
-
70
+
60
71
  addr = ChecksumAddress(str(self.address))
61
72
  if addr in WALLETS:
62
73
  raise ValueError(f"TreasuryWallet {addr} already exists")
63
74
  WALLETS[addr] = self
64
75
 
76
+ @staticmethod
77
+ def check_membership(
78
+ address: Optional[HexAddress], block: Optional[BlockNumber] = None
79
+ ) -> bool:
80
+ if address is None:
81
+ return False
82
+ wallet = TreasuryWallet._get_instance(address)
83
+ if wallet is None:
84
+ return False
85
+ # If networks filter is set, only include if current chain is listed
86
+ if wallet.networks and CHAINID not in wallet.networks:
87
+ return False
88
+ return block is None or (
89
+ wallet._start_block <= block
90
+ and (wallet._end_block is None or wallet._end_block >= block)
91
+ )
92
+
65
93
  @property
66
94
  def _start_block(self) -> BlockNumber:
67
95
  start_block = self.start_block
@@ -86,13 +114,137 @@ class TreasuryWallet:
86
114
  def _get_instance(address: HexAddress) -> Optional["TreasuryWallet"]:
87
115
  # sourcery skip: use-contextlib-suppress
88
116
  try:
89
- return WALLETS[address]
90
- except KeyError:
91
- pass
92
- checksummed = convert.to_address(address)
93
- try:
94
- instance = WALLETS[address] = WALLETS[checksummed]
117
+ instance = WALLETS[address]
95
118
  except KeyError:
119
+ checksummed = to_address(address)
120
+ try:
121
+ instance = WALLETS[address] = WALLETS[checksummed]
122
+ except KeyError:
123
+ return None
124
+ if instance.networks and CHAINID not in instance.networks:
96
125
  return None
97
- else:
98
- return instance
126
+ return instance
127
+
128
+
129
+ def load_wallets_from_yaml(path: Path) -> List[TreasuryWallet]:
130
+ """
131
+ Load a YAML mapping of wallet addresses to configuration and return a list of TreasuryWallets.
132
+ 'timestamp' in top-level start/end is universal.
133
+ 'block' in top-level start/end must be provided under the chain ID key.
134
+ Optional 'networks' key lists chain IDs where this wallet is active.
135
+ """
136
+ try:
137
+ data = yaml.safe_load(path.read_bytes())
138
+ except Exception as e:
139
+ raise ValueError(f"Failed to parse wallets YAML: {e}")
140
+
141
+ if not isinstance(data, dict):
142
+ raise ValueError("Wallets YAML file must be a mapping of address to config")
143
+
144
+ wallets: List[TreasuryWallet] = []
145
+ for address, cfg in data.items():
146
+ # Allow bare keys
147
+ if cfg is None:
148
+ cfg = {}
149
+ elif not isinstance(cfg, dict):
150
+ raise ValueError(f"Invalid config for wallet {address}, expected mapping")
151
+
152
+ kwargs = {"address": address}
153
+
154
+ # Extract optional networks list
155
+ networks = cfg.get("networks")
156
+ if networks:
157
+ if not isinstance(networks, list) or not all(
158
+ isinstance(n, int) for n in networks
159
+ ):
160
+ raise ValueError(
161
+ f"'networks' for wallet {address} must be a list of integers, got {networks}"
162
+ )
163
+ kwargs["networks"] = networks
164
+
165
+ # Parse start: timestamp universal, block under chain key
166
+ start_cfg = cfg.get("start", {})
167
+ if not isinstance(start_cfg, dict):
168
+ raise ValueError(
169
+ f"Invalid 'start' for wallet {address}. Expected mapping, got {start_cfg}."
170
+ )
171
+ for key, value in start_cfg.items():
172
+ if key == "timestamp":
173
+ if "start_block" in kwargs:
174
+ raise ValueError(
175
+ "You cannot provide both a start block and a start timestamp"
176
+ )
177
+ kwargs["start_timestamp"] = value
178
+ elif key == "block":
179
+ if not isinstance(value, dict):
180
+ raise ValueError(
181
+ f"Invalid start block for wallet {address}. Expected mapping, got {value}."
182
+ )
183
+ for chainid, start_block in value.items():
184
+ if not isinstance(chainid, int):
185
+ raise ValueError(
186
+ f"Invalid chainid for wallet {address} start block. Expected integer, got {chainid}."
187
+ )
188
+ if not isinstance(start_block, int):
189
+ raise ValueError(
190
+ f"Invalid start block for wallet {address}. Expected integer, got {start_block}."
191
+ )
192
+ if chainid == CHAINID:
193
+ if "start_timestamp" in kwargs:
194
+ raise ValueError(
195
+ "You cannot provide both a start block and a start timestamp"
196
+ )
197
+ kwargs["start_block"] = start_block
198
+ else:
199
+ raise ValueError(
200
+ f"Invalid key: {key}. Valid options are 'block' or 'timestamp'."
201
+ )
202
+
203
+ chain_block = start_cfg.get(str(CHAINID)) or start_cfg.get(CHAINID)
204
+ if chain_block is not None:
205
+ if not isinstance(chain_block, int):
206
+ raise ValueError(
207
+ f"Invalid start.block for chain {CHAINID} on {address}"
208
+ )
209
+ kwargs["start_block"] = chain_block
210
+
211
+ # Parse end: timestamp universal, block under chain key
212
+ end_cfg = cfg.get("end", {})
213
+ if not isinstance(end_cfg, dict):
214
+ raise ValueError(
215
+ f"Invalid 'end' for wallet {address}. Expected mapping, got {end_cfg}."
216
+ )
217
+
218
+ for key, value in end_cfg.items():
219
+ if key == "timestamp":
220
+ if "end_block" in kwargs:
221
+ raise ValueError(
222
+ "You cannot provide both an end block and an end timestamp"
223
+ )
224
+ kwargs["end_timestamp"] = value
225
+ elif key == "block":
226
+ if not isinstance(value, dict):
227
+ raise ValueError(
228
+ f"Invalid end block for wallet {address}. Expected mapping, got {value}."
229
+ )
230
+ for chainid, end_block in value.items():
231
+ if not isinstance(chainid, int):
232
+ raise ValueError(
233
+ f"Invalid chainid for wallet {address} end block. Expected integer, got {chainid}."
234
+ )
235
+ if not isinstance(end_block, int):
236
+ raise ValueError(
237
+ f"Invalid end block for wallet {address}. Expected integer, got {end_block}."
238
+ )
239
+ if chainid == CHAINID:
240
+ kwargs["end_block"] = end_block
241
+ else:
242
+ raise ValueError(
243
+ f"Invalid key: {key}. Valid options are 'block' or 'timestamp'."
244
+ )
245
+
246
+ wallet = TreasuryWallet(**kwargs)
247
+ print(f"initialized {wallet}")
248
+ wallets.append(wallet)
249
+
250
+ return wallets
Binary file
@@ -0,0 +1,39 @@
1
+ """Core constants for DAO Treasury.
2
+
3
+ All constants are marked with `Final`, ensuring immutability and allowing
4
+ mypyc to compile them as extremely fast C-level constants for maximum
5
+ performance. Defines chain IDs, zero address, and key contract addresses
6
+ (e.g., Disperse.app) used throughout the system for transaction processing,
7
+ nickname assignment, and analytics.
8
+
9
+ Key Responsibilities:
10
+ - Provide canonical addresses and chain IDs.
11
+ - Support nickname setup and transaction categorization.
12
+ - Guarantee fast, immutable constants at runtime.
13
+
14
+ This is the single source of truth for system-wide constants.
15
+ """
16
+
17
+ from typing import Final
18
+
19
+ import eth_portfolio._utils
20
+ import y.constants
21
+
22
+
23
+ CHAINID: Final = y.constants.CHAINID
24
+ # TODO: add docstring
25
+
26
+ ZERO_ADDRESS: Final = "0x0000000000000000000000000000000000000000"
27
+ # TODO: add docstring
28
+
29
+ # TODO: move disperse.app stuff from yearn-treasury to dao-treasury and then write a docs file
30
+ DISPERSE_APP: Final = (
31
+ "0xD152f549545093347A162Dce210e7293f1452150",
32
+ "0xd15fE25eD0Dba12fE05e7029C88b10C25e8880E3",
33
+ )
34
+ """If your treasury sends funds to disperse.app, we create additional txs in the db so each individual send can be accounted for."""
35
+ # TODO: all crosslink to disperse.py once ready
36
+
37
+
38
+ SUPPRESS_ERROR_LOGS: Final = eth_portfolio._utils.SUPPRESS_ERROR_LOGS
39
+ """Append tokens here when you don't expect them to price successfully and do not want to see the associated error logs."""