dao-treasury 0.0.35__cp311-cp311-win_amd64.whl → 0.1.1__cp311-cp311-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dao-treasury might be problematic. Click here for more details.

Files changed (49) hide show
  1. dao_treasury/.grafana/provisioning/dashboards/breakdowns/Expenses.json +567 -0
  2. dao_treasury/.grafana/provisioning/dashboards/breakdowns/Revenue.json +569 -0
  3. dao_treasury/.grafana/provisioning/dashboards/dashboards.yaml +7 -57
  4. dao_treasury/.grafana/provisioning/dashboards/streams/LlamaPay.json +15 -27
  5. dao_treasury/.grafana/provisioning/dashboards/summary/Monthly.json +286 -25
  6. dao_treasury/.grafana/provisioning/dashboards/transactions/Treasury Transactions.json +54 -71
  7. dao_treasury/.grafana/provisioning/dashboards/transactions/Unsorted Transactions.json +367 -0
  8. dao_treasury/.grafana/provisioning/dashboards/treasury/Cashflow (Including Unsorted).json +172 -209
  9. dao_treasury/.grafana/provisioning/dashboards/treasury/Cashflow.json +122 -142
  10. dao_treasury/.grafana/provisioning/dashboards/treasury/Current Treasury Assets.json +941 -0
  11. dao_treasury/.grafana/provisioning/dashboards/treasury/Historical Treasury Balances.json +3931 -0
  12. dao_treasury/.grafana/provisioning/dashboards/treasury/Operating Cashflow.json +89 -108
  13. dao_treasury/.grafana/provisioning/datasources/datasources.yaml +9 -4
  14. dao_treasury/ENVIRONMENT_VARIABLES.py +12 -0
  15. dao_treasury/__init__.py +14 -4
  16. dao_treasury/_docker.cp311-win_amd64.pyd +0 -0
  17. dao_treasury/_docker.py +44 -23
  18. dao_treasury/_nicknames.cp311-win_amd64.pyd +0 -0
  19. dao_treasury/_nicknames.py +18 -2
  20. dao_treasury/_wallet.cp311-win_amd64.pyd +0 -0
  21. dao_treasury/constants.cp311-win_amd64.pyd +0 -0
  22. dao_treasury/constants.py +24 -0
  23. dao_treasury/db.py +409 -119
  24. dao_treasury/docker-compose.yaml +19 -3
  25. dao_treasury/main.py +44 -3
  26. dao_treasury/sorting/__init__.cp311-win_amd64.pyd +0 -0
  27. dao_treasury/sorting/__init__.py +38 -21
  28. dao_treasury/sorting/_matchers.cp311-win_amd64.pyd +0 -0
  29. dao_treasury/sorting/_rules.cp311-win_amd64.pyd +0 -0
  30. dao_treasury/sorting/factory.cp311-win_amd64.pyd +0 -0
  31. dao_treasury/sorting/rule.cp311-win_amd64.pyd +0 -0
  32. dao_treasury/sorting/rule.py +8 -10
  33. dao_treasury/sorting/rules/__init__.cp311-win_amd64.pyd +0 -0
  34. dao_treasury/sorting/rules/ignore/__init__.cp311-win_amd64.pyd +0 -0
  35. dao_treasury/sorting/rules/ignore/llamapay.cp311-win_amd64.pyd +0 -0
  36. dao_treasury/streams/llamapay.py +14 -2
  37. dao_treasury/treasury.py +37 -16
  38. dao_treasury/types.cp311-win_amd64.pyd +0 -0
  39. {dao_treasury-0.0.35.dist-info → dao_treasury-0.1.1.dist-info}/METADATA +19 -3
  40. dao_treasury-0.1.1.dist-info/RECORD +53 -0
  41. dao_treasury-0.1.1.dist-info/top_level.txt +2 -0
  42. dao_treasury__mypyc.cp311-win_amd64.pyd +0 -0
  43. bf2b4fe1f86ad2ea158b__mypyc.cp311-win_amd64.pyd +0 -0
  44. dao_treasury/.grafana/provisioning/dashboards/treasury/Treasury.json +0 -2018
  45. dao_treasury/streams/__init__.cp311-win_amd64.pyd +0 -0
  46. dao_treasury/streams/llamapay.cp311-win_amd64.pyd +0 -0
  47. dao_treasury-0.0.35.dist-info/RECORD +0 -51
  48. dao_treasury-0.0.35.dist-info/top_level.txt +0 -2
  49. {dao_treasury-0.0.35.dist-info → dao_treasury-0.1.1.dist-info}/WHEEL +0 -0
dao_treasury/db.py CHANGED
@@ -3,6 +3,7 @@
3
3
  Database models and utilities for DAO treasury reporting.
4
4
 
5
5
  This module defines Pony ORM entities for:
6
+
6
7
  - Blockchain networks (:class:`Chain`)
7
8
  - On-chain addresses (:class:`Address`)
8
9
  - ERC-20 tokens and native coin placeholder (:class:`Token`)
@@ -15,29 +16,44 @@ resolving integrity conflicts, caching transaction receipts,
15
16
  and creating SQL views for reporting.
16
17
  """
17
18
 
19
+ import os
18
20
  import typing
19
- from asyncio import Semaphore
21
+ from asyncio import Lock, Semaphore
22
+ from collections import OrderedDict
23
+ from datetime import date, datetime, time, timezone
20
24
  from decimal import Decimal, InvalidOperation
21
25
  from functools import lru_cache
22
26
  from logging import getLogger
23
- from os import path
24
- from pathlib import Path
25
- from typing import TYPE_CHECKING, Dict, Final, Tuple, Union, final
26
- from datetime import date, datetime, time, timezone
27
+ from typing import (
28
+ TYPE_CHECKING,
29
+ Any,
30
+ Callable,
31
+ Coroutine,
32
+ Dict,
33
+ Final,
34
+ Literal,
35
+ Tuple,
36
+ TypeVar,
37
+ Union,
38
+ final,
39
+ overload,
40
+ )
27
41
 
42
+ import eth_portfolio
28
43
  from a_sync import AsyncThreadPoolExecutor
29
44
  from brownie import chain
30
45
  from brownie.convert.datatypes import HexString
31
46
  from brownie.exceptions import EventLookupError
32
47
  from brownie.network.event import EventDict, _EventItem
33
48
  from brownie.network.transaction import TransactionReceipt
34
- from eth_typing import ChecksumAddress, HexAddress, HexStr
35
49
  from eth_portfolio.structs import (
36
50
  InternalTransfer,
37
51
  LedgerEntry,
38
52
  TokenTransfer,
39
53
  Transaction,
40
54
  )
55
+ from eth_retry import auto_retry
56
+ from eth_typing import ChecksumAddress, HexAddress, HexStr
41
57
  from pony.orm import (
42
58
  Database,
43
59
  InterfaceError,
@@ -50,9 +66,12 @@ from pony.orm import (
50
66
  composite_key,
51
67
  composite_index,
52
68
  db_session,
69
+ rollback,
53
70
  select,
54
71
  )
72
+ from typing_extensions import ParamSpec
55
73
  from y import EEE_ADDRESS, Contract, Network, convert, get_block_timestamp_async
74
+ from y._db.decorators import retry_locked
56
75
  from y.contracts import _get_code
57
76
  from y.exceptions import ContractNotVerified
58
77
 
@@ -60,20 +79,34 @@ from dao_treasury.constants import CHAINID
60
79
  from dao_treasury.types import TxGroupDbid, TxGroupName
61
80
 
62
81
 
63
- SQLITE_DIR = Path(path.expanduser("~")) / ".dao-treasury"
64
- """Path to the directory in the user's home where the DAO treasury SQLite database is stored."""
82
+ _T = TypeVar("_T")
83
+ _P = ParamSpec("_P")
84
+
85
+ EventItem = _EventItem[_EventItem[OrderedDict[str, Any]]]
65
86
 
66
- SQLITE_DIR.mkdir(parents=True, exist_ok=True)
67
87
 
88
+ # Postgres connection parameters from environment variables (with docker-compose defaults)
89
+ POSTGRES_USER = os.getenv("DAO_TREASURY_DB_USER", "dao_treasury")
90
+ POSTGRES_PASSWORD = os.getenv("DAO_TREASURY_DB_PASSWORD", "dao_treasury")
91
+ POSTGRES_DB = os.getenv("DAO_TREASURY_DB_NAME", "dao_treasury")
92
+ POSTGRES_HOST = os.getenv("DAO_TREASURY_DB_HOST", "127.0.0.1")
93
+ POSTGRES_PORT = int(os.getenv("DAO_TREASURY_DB_PORT", "8675"))
68
94
 
69
95
  _INSERT_THREAD = AsyncThreadPoolExecutor(1)
70
96
  _SORT_THREAD = AsyncThreadPoolExecutor(1)
97
+ _EVENTS_THREADS = AsyncThreadPoolExecutor(16)
71
98
  _SORT_SEMAPHORE = Semaphore(50)
72
99
 
73
100
  _UTC = timezone.utc
74
101
 
75
102
  db = Database()
76
103
 
104
+ db_ready: bool = False
105
+ startup_lock: Final = Lock()
106
+
107
+ must_sort_inbound_txgroup_dbid: TxGroupDbid = None
108
+ must_sort_outbound_txgroup_dbid: TxGroupDbid = None
109
+
77
110
  logger = getLogger("dao_treasury.db")
78
111
 
79
112
 
@@ -190,7 +223,7 @@ class Address(DbEntity):
190
223
  address = Required(str, index=True)
191
224
  """Checksum string of the on-chain address."""
192
225
 
193
- nickname = Optional(str)
226
+ nickname = Optional(str, index=True)
194
227
  """Optional human-readable label (e.g., contract name or token name)."""
195
228
 
196
229
  is_contract = Required(bool, index=True, lazy=True)
@@ -234,6 +267,10 @@ class Address(DbEntity):
234
267
  def contract(self) -> Contract:
235
268
  return Contract(self.address)
236
269
 
270
+ @property
271
+ def contract_coro(self) -> Coroutine[Any, Any, Contract]:
272
+ return Contract.coroutine(self.address)
273
+
237
274
  @staticmethod
238
275
  @lru_cache(maxsize=None)
239
276
  def get_dbid(address: HexAddress) -> int:
@@ -294,7 +331,6 @@ class Address(DbEntity):
294
331
  )
295
332
 
296
333
  commit()
297
-
298
334
  return entity # type: ignore [no-any-return]
299
335
 
300
336
  @staticmethod
@@ -373,7 +409,7 @@ class Token(DbEntity):
373
409
  symbol = Required(str, index=True, lazy=True)
374
410
  """Short ticker symbol for the token."""
375
411
 
376
- name = Required(str, lazy=True)
412
+ name = Required(str, lazy=True, index=True)
377
413
  """Full human-readable name of the token."""
378
414
 
379
415
  decimals = Required(int, lazy=True)
@@ -386,12 +422,15 @@ class Token(DbEntity):
386
422
  """Inverse relation for treasury transactions involving this token."""
387
423
  # partner_harvest_event = Set('PartnerHarvestEvent', reverse="vault", lazy=True)
388
424
 
389
- address = Required(Address, column="address_id")
425
+ address = Required(Address, column="address_id", unique=True)
390
426
  """Foreign key to the address record for this token contract."""
391
427
 
392
428
  streams = Set("Stream", reverse="token", lazy=True)
393
429
  # vesting_escrows = Set("VestingEscrow", reverse="token", lazy=True)
394
430
 
431
+ composite_index(chain, name)
432
+ composite_index(chain, symbol)
433
+
395
434
  def __eq__(self, other: Union["Token", Address, ChecksumAddress]) -> bool: # type: ignore [override]
396
435
  if isinstance(other, str):
397
436
  return self.address == other
@@ -405,6 +444,10 @@ class Token(DbEntity):
405
444
  def contract(self) -> Contract:
406
445
  return Contract(self.address.address)
407
446
 
447
+ @property
448
+ def contract_coro(self) -> Coroutine[Any, Any, Contract]:
449
+ return Contract.coroutine(self.address.address)
450
+
408
451
  @property
409
452
  def scale(self) -> int:
410
453
  """Base for division according to `decimals`, e.g., `10**decimals`.
@@ -530,13 +573,13 @@ class TxGroup(DbEntity):
530
573
  txgroup_id = PrimaryKey(int, auto=True)
531
574
  """Auto-incremented primary key for transaction groups."""
532
575
 
533
- name = Required(str)
576
+ name = Required(str, index=True)
534
577
  """Name of the grouping category, e.g., 'Revenue', 'Expenses'."""
535
578
 
536
579
  treasury_tx = Set("TreasuryTx", reverse="txgroup", lazy=True)
537
580
  """Inverse relation for treasury transactions assigned to this group."""
538
581
 
539
- parent_txgroup = Optional("TxGroup", reverse="child_txgroups")
582
+ parent_txgroup = Optional("TxGroup", reverse="child_txgroups", index=True)
540
583
  """Optional reference to a parent group for nesting."""
541
584
 
542
585
  composite_key(name, parent_txgroup)
@@ -620,6 +663,8 @@ class TxGroup(DbEntity):
620
663
  if txgroup := TxGroup.get(name=name, parent_txgroup=parent):
621
664
  return txgroup # type: ignore [no-any-return]
622
665
  raise Exception(e, name, parent) from e
666
+ else:
667
+ db.execute("REFRESH MATERIALIZED VIEW txgroup_hierarchy;")
623
668
  return txgroup # type: ignore [no-any-return]
624
669
 
625
670
 
@@ -706,6 +751,25 @@ class TreasuryTx(DbEntity):
706
751
  """Foreign key to the categorization group."""
707
752
 
708
753
  composite_index(chain, txgroup)
754
+ composite_index(chain, token)
755
+ composite_index(chain, from_address)
756
+ composite_index(chain, to_address)
757
+ composite_index(chain, from_address, to_address)
758
+ composite_index(timestamp, txgroup)
759
+ composite_index(timestamp, token)
760
+ composite_index(timestamp, from_address)
761
+ composite_index(timestamp, to_address)
762
+ composite_index(timestamp, from_address, to_address)
763
+ composite_index(timestamp, chain, txgroup)
764
+ composite_index(timestamp, chain, token)
765
+ composite_index(timestamp, chain, from_address)
766
+ composite_index(timestamp, chain, to_address)
767
+ composite_index(timestamp, chain, from_address, to_address)
768
+ composite_index(chain, timestamp, txgroup)
769
+ composite_index(chain, timestamp, token)
770
+ composite_index(chain, timestamp, from_address)
771
+ composite_index(chain, timestamp, to_address)
772
+ composite_index(chain, timestamp, from_address, to_address)
709
773
 
710
774
  @property
711
775
  def to_nickname(self) -> typing.Optional[str]:
@@ -719,6 +783,10 @@ class TreasuryTx(DbEntity):
719
783
  """Human-readable label for the sender address."""
720
784
  return self.from_address.nickname or self.from_address.address # type: ignore [union-attr]
721
785
 
786
+ @property
787
+ def token_address(self) -> ChecksumAddress:
788
+ return self.token.address.address
789
+
722
790
  @property
723
791
  def symbol(self) -> str:
724
792
  """Ticker symbol for the transferred token."""
@@ -729,7 +797,23 @@ class TreasuryTx(DbEntity):
729
797
  """Decoded event logs for this transaction."""
730
798
  return self._transaction.events
731
799
 
732
- def get_events(self, event_name: str) -> _EventItem:
800
+ async def events_async(self) -> EventDict:
801
+ """Asynchronously fetch decoded event logs for this transaction."""
802
+ tx = self._transaction
803
+ events = tx._events
804
+ if events is None:
805
+ events = await _EVENTS_THREADS.run(getattr, tx, "events")
806
+ return events
807
+
808
+ @overload
809
+ def get_events(
810
+ self, event_name: str, sync: Literal[False]
811
+ ) -> Coroutine[Any, Any, EventItem]: ...
812
+ @overload
813
+ def get_events(self, event_name: str, sync: bool = True) -> EventItem: ...
814
+ def get_events(self, event_name: str, sync: bool = True) -> EventItem:
815
+ if not sync:
816
+ return _EVENTS_THREADS.run(self.get_events, event_name)
733
817
  try:
734
818
  return self.events[event_name]
735
819
  except EventLookupError:
@@ -746,6 +830,7 @@ class TreasuryTx(DbEntity):
746
830
  return get_transaction(self.hash)
747
831
 
748
832
  @staticmethod
833
+ @auto_retry
749
834
  async def insert(entry: LedgerEntry) -> None:
750
835
  """Asynchronously insert and sort a ledger entry.
751
836
 
@@ -916,6 +1001,10 @@ class TreasuryTx(DbEntity):
916
1001
  must_sort_inbound_txgroup_dbid,
917
1002
  must_sort_outbound_txgroup_dbid,
918
1003
  ):
1004
+ with db_session:
1005
+ db.execute("REFRESH MATERIALIZED VIEW usdvalue_presum;")
1006
+ db.execute("REFRESH MATERIALIZED VIEW usdvalue_presum_revenue;")
1007
+ db.execute("REFRESH MATERIALIZED VIEW usdvalue_presum_expenses;")
919
1008
  logger.info(
920
1009
  "Sorted %s to %s", entry, TxGroup.get_fullname(txgroup_dbid)
921
1010
  )
@@ -923,29 +1012,47 @@ class TreasuryTx(DbEntity):
923
1012
  return dbid # type: ignore [no-any-return]
924
1013
 
925
1014
  @staticmethod
1015
+ @retry_locked
926
1016
  def __set_txgroup(treasury_tx_dbid: int, txgroup_dbid: TxGroupDbid) -> None:
927
1017
  with db_session:
928
1018
  TreasuryTx[treasury_tx_dbid].txgroup = txgroup_dbid
929
1019
  commit()
1020
+ db.execute("REFRESH MATERIALIZED VIEW usdvalue_presum;")
1021
+ db.execute("REFRESH MATERIALIZED VIEW usdvalue_presum_revenue;")
1022
+ db.execute("REFRESH MATERIALIZED VIEW usdvalue_presum_expenses;")
930
1023
 
931
1024
 
932
1025
  _stream_metadata_cache: Final[Dict[HexStr, Tuple[ChecksumAddress, date]]] = {}
933
1026
 
934
1027
 
1028
+ def refresh_matview(name: str) -> Callable[[Callable[_P, _T]], Callable[_P, _T]]:
1029
+ def matview_deco(fn: Callable[_P, _T]) -> Callable[_P, _T]:
1030
+ def matview_refresh_wrap(*args: _P.args, **kwargs: _P.kwargs) -> _T:
1031
+ retval = fn(*args, **kwargs)
1032
+ commit()
1033
+ db.execute(f"REFRESH MATERIALIZED VIEW {name};")
1034
+ commit()
1035
+ return retval
1036
+
1037
+ return matview_refresh_wrap
1038
+
1039
+ return matview_deco
1040
+
1041
+
935
1042
  class Stream(DbEntity):
936
1043
  _table_ = "streams"
937
1044
  stream_id = PrimaryKey(str)
938
1045
 
939
- contract = Required("Address", reverse="streams")
940
- start_block = Required(int)
941
- end_block = Optional(int)
1046
+ contract = Required("Address", reverse="streams", index=True)
1047
+ start_block = Required(int, index=True)
1048
+ end_block = Optional(int, index=True)
942
1049
  token = Required("Token", reverse="streams", index=True)
943
- from_address = Required("Address", reverse="streams_from")
944
- to_address = Required("Address", reverse="streams_to")
945
- reason = Optional(str)
1050
+ from_address = Required("Address", reverse="streams_from", index=True)
1051
+ to_address = Required("Address", reverse="streams_to", index=True)
1052
+ reason = Optional(str, index=True)
946
1053
  amount_per_second = Required(Decimal, 38, 1)
947
- status = Required(str, default="Active")
948
- txgroup = Optional("TxGroup", reverse="streams")
1054
+ status = Required(str, default="Active", index=True)
1055
+ txgroup = Optional("TxGroup", reverse="streams", index=True)
949
1056
 
950
1057
  streamed_funds = Set("StreamedFunds", lazy=True)
951
1058
 
@@ -987,10 +1094,12 @@ class Stream(DbEntity):
987
1094
  end = datetime.fromtimestamp(chain[stream.end_block].timestamp, tz=_UTC)
988
1095
  return start, end
989
1096
 
1097
+ @refresh_matview("stream_ledger")
990
1098
  def stop_stream(self, block: int) -> None:
991
1099
  self.end_block = block
992
1100
  self.status = "Stopped"
993
1101
 
1102
+ @refresh_matview("stream_ledger")
994
1103
  def pause(self) -> None:
995
1104
  self.status = "Paused"
996
1105
 
@@ -1050,6 +1159,7 @@ class StreamedFunds(DbEntity):
1050
1159
 
1051
1160
  @classmethod
1052
1161
  @db_session
1162
+ @refresh_matview("stream_ledger")
1053
1163
  def create_entity(
1054
1164
  cls,
1055
1165
  stream_id: str,
@@ -1074,23 +1184,54 @@ class StreamedFunds(DbEntity):
1074
1184
  return entity
1075
1185
 
1076
1186
 
1077
- db.bind(
1078
- provider="sqlite", # TODO: let user choose postgres with server connection params
1079
- filename=str(SQLITE_DIR / "dao-treasury.sqlite"),
1080
- create_db=True,
1081
- )
1187
+ def init_db() -> None:
1188
+ """Initialize the database if not yet initialized."""
1189
+ global db_ready
1190
+ if db_ready:
1191
+ return
1192
+
1193
+ db.bind(
1194
+ provider="postgres",
1195
+ user=POSTGRES_USER,
1196
+ password=POSTGRES_PASSWORD,
1197
+ host=POSTGRES_HOST,
1198
+ port=POSTGRES_PORT,
1199
+ database=POSTGRES_DB,
1200
+ )
1082
1201
 
1083
- db.generate_mapping(create_tables=True)
1202
+ db.generate_mapping(create_tables=True)
1084
1203
 
1204
+ with db_session:
1205
+ create_stream_ledger_matview()
1206
+ create_txgroup_hierarchy_matview()
1207
+ # create_vesting_ledger_view()
1208
+ create_general_ledger_view()
1209
+ create_unsorted_txs_view()
1210
+ create_usdval_presum_matview()
1211
+ create_usdval_presum_revenue_matview()
1212
+ create_usdval_presum_expenses_matview()
1213
+ # create_monthly_pnl_view()
1214
+
1215
+ global must_sort_inbound_txgroup_dbid
1216
+ must_sort_inbound_txgroup_dbid = TxGroup.get_dbid(name="Sort Me (Inbound)")
1085
1217
 
1086
- def _set_address_nicknames_for_tokens() -> None:
1218
+ global must_sort_outbound_txgroup_dbid
1219
+ must_sort_outbound_txgroup_dbid = TxGroup.get_dbid(name="Sort Me (Outbound)")
1220
+
1221
+ _drop_shitcoin_txs()
1222
+
1223
+ db_ready = True
1224
+
1225
+
1226
+ def set_address_nicknames_for_tokens() -> None:
1087
1227
  """Set address.nickname for addresses belonging to tokens."""
1228
+ init_db()
1088
1229
  for address in select(a for a in Address if a.token and not a.nickname):
1089
1230
  address.nickname = f"Token: {address.token.name}"
1090
1231
  db.commit()
1091
1232
 
1092
1233
 
1093
- def create_stream_ledger_view() -> None:
1234
+ def create_stream_ledger_matview() -> None:
1094
1235
  """Create or replace the SQL view `stream_ledger` for streamed funds reporting.
1095
1236
 
1096
1237
  This view joins streamed funds, streams, tokens, addresses, and txgroups
@@ -1099,60 +1240,90 @@ def create_stream_ledger_view() -> None:
1099
1240
  Examples:
1100
1241
  >>> create_stream_ledger_view()
1101
1242
  """
1102
- db.execute("""DROP VIEW IF EXISTS stream_ledger;""")
1103
- db.execute(
1104
- """
1105
- create view stream_ledger as
1106
- SELECT 'Mainnet' as chain_name,
1107
- cast(strftime('%s', date || ' 00:00:00') as INTEGER) as timestamp,
1108
- NULL as block,
1109
- NULL as hash,
1110
- NULL as log_index,
1111
- symbol as token,
1112
- d.address AS "from",
1113
- d.nickname as from_nickname,
1114
- e.address AS "to",
1115
- e.nickname as to_nickname,
1116
- amount,
1117
- price,
1118
- value_usd,
1119
- txgroup.name as txgroup,
1120
- parent.name as parent_txgroup,
1243
+ try:
1244
+ db.execute(
1245
+ """
1246
+ DROP MATERIALIZED VIEW IF EXISTS stream_ledger CASCADE;
1247
+ CREATE MATERIALIZED VIEW stream_ledger AS
1248
+ SELECT
1249
+ 'Mainnet' as chain_name,
1250
+ EXTRACT(EPOCH FROM (date::date))::integer as timestamp,
1251
+ CAST(NULL as integer) as block,
1252
+ NULL as hash,
1253
+ CAST(NULL as integer) as log_index,
1254
+ symbol as token,
1255
+ d.address AS "from",
1256
+ d.nickname as from_nickname,
1257
+ e.address AS "to",
1258
+ e.nickname as to_nickname,
1259
+ amount,
1260
+ price,
1261
+ value_usd,
1262
+ txgroup.name as txgroup,
1263
+ parent.name as parent_txgroup,
1121
1264
  txgroup.txgroup_id
1122
- FROM streamed_funds a
1123
- LEFT JOIN streams b ON a.stream = b.stream_id
1124
- LEFT JOIN tokens c ON b.token = c.token_id
1125
- LEFT JOIN addresses d ON b.from_address = d.address_id
1126
- LEFT JOIN addresses e ON b.to_address = e.address_id
1127
- LEFT JOIN txgroups txgroup ON b.txgroup = txgroup.txgroup_id
1128
- LEFT JOIN txgroups parent ON txgroup.parent_txgroup = parent.txgroup_id
1129
- """
1130
- )
1265
+ FROM streamed_funds a
1266
+ LEFT JOIN streams b ON a.stream = b.stream_id
1267
+ LEFT JOIN tokens c ON b.token = c.token_id
1268
+ LEFT JOIN addresses d ON b.from_address = d.address_id
1269
+ LEFT JOIN addresses e ON b.to_address = e.address_id
1270
+ LEFT JOIN txgroups txgroup ON b.txgroup = txgroup.txgroup_id
1271
+ LEFT JOIN txgroups parent ON txgroup.parent_txgroup = parent.txgroup_id;
1272
+
1273
+ """
1274
+ )
1275
+ except Exception as e:
1276
+ if '"stream_ledger" is not a materialized view' not in str(e):
1277
+ raise
1278
+ # we're running an old schema, lets migrate it
1279
+ rollback()
1280
+ db.execute("DROP VIEW IF EXISTS stream_ledger CASCADE;")
1281
+ commit()
1282
+ create_stream_ledger_matview()
1131
1283
 
1132
1284
 
1133
- def create_txgroup_hierarchy_view() -> None:
1285
+ def create_txgroup_hierarchy_matview() -> None:
1134
1286
  """Create or replace the SQL view `txgroup_hierarchy` for recursive txgroup hierarchy.
1135
1287
 
1136
1288
  This view exposes txgroup_id, top_category, and parent_txgroup for all txgroups,
1137
1289
  matching the recursive CTE logic used in dashboards.
1138
1290
  """
1139
- db.execute("DROP VIEW IF EXISTS txgroup_hierarchy;")
1140
- db.execute(
1141
- """
1142
- CREATE VIEW txgroup_hierarchy AS
1143
- WITH RECURSIVE group_hierarchy (txgroup_id, top_category, parent_txgroup) AS (
1144
- SELECT txgroup_id, name AS top_category, parent_txgroup
1145
- FROM txgroups
1146
- WHERE parent_txgroup IS NULL
1147
- UNION ALL
1148
- SELECT child.txgroup_id, parent.top_category, child.parent_txgroup
1149
- FROM txgroups AS child
1150
- JOIN group_hierarchy AS parent
1151
- ON child.parent_txgroup = parent.txgroup_id
1291
+ try:
1292
+ db.execute(
1293
+ """
1294
+ DROP MATERIALIZED VIEW IF EXISTS txgroup_hierarchy CASCADE;
1295
+ CREATE MATERIALIZED VIEW txgroup_hierarchy AS
1296
+ WITH RECURSIVE group_hierarchy (txgroup_id, top_category, parent_txgroup) AS (
1297
+ SELECT txgroup_id, name AS top_category, parent_txgroup
1298
+ FROM txgroups
1299
+ WHERE parent_txgroup IS NULL
1300
+ UNION ALL
1301
+ SELECT child.txgroup_id, parent.top_category, child.parent_txgroup
1302
+ FROM txgroups AS child
1303
+ JOIN group_hierarchy AS parent
1304
+ ON child.parent_txgroup = parent.txgroup_id
1305
+ )
1306
+ SELECT * FROM group_hierarchy;
1307
+
1308
+ -- Indexes
1309
+ CREATE UNIQUE INDEX idx_txgroup_hierarchy_txgroup_id
1310
+ ON txgroup_hierarchy (txgroup_id);
1311
+
1312
+ CREATE INDEX idx_txgroup_hierarchy_top_category
1313
+ ON txgroup_hierarchy (top_category);
1314
+
1315
+ CREATE INDEX idx_txgroup_hierarchy_parent
1316
+ ON txgroup_hierarchy (parent_txgroup);
1317
+ """
1152
1318
  )
1153
- SELECT * FROM group_hierarchy;
1154
- """
1155
- )
1319
+ except Exception as e:
1320
+ if '"txgroup_hierarchy" is not a materialized view' not in str(e):
1321
+ raise
1322
+ # we're running an old schema, lets migrate it
1323
+ rollback()
1324
+ db.execute("DROP VIEW IF EXISTS txgroup_hierarchy CASCADE;")
1325
+ commit()
1326
+ create_txgroup_hierarchy_matview()
1156
1327
 
1157
1328
 
1158
1329
  def create_vesting_ledger_view() -> None:
@@ -1168,11 +1339,12 @@ def create_vesting_ledger_view() -> None:
1168
1339
  """
1169
1340
  DROP VIEW IF EXISTS vesting_ledger;
1170
1341
  CREATE VIEW vesting_ledger AS
1171
- SELECT d.chain_name,
1172
- CAST(date AS timestamp) AS "timestamp",
1173
- cast(NULL as int) AS block,
1342
+ SELECT
1343
+ d.chain_name,
1344
+ date::timestamp AS "timestamp",
1345
+ CAST(NULL as integer) AS block,
1174
1346
  NULL AS "hash",
1175
- cast(NULL as int) AS "log_index",
1347
+ CAST(NULL as integer) AS "log_index",
1176
1348
  c.symbol AS "token",
1177
1349
  e.address AS "from",
1178
1350
  e.nickname as from_nickname,
@@ -1184,14 +1356,14 @@ def create_vesting_ledger_view() -> None:
1184
1356
  g.name as txgroup,
1185
1357
  h.name AS parent_txgroup,
1186
1358
  g.txgroup_id
1187
- FROM vested_funds a
1359
+ FROM vested_funds a
1188
1360
  LEFT JOIN vesting_escrows b ON a.escrow = b.escrow_id
1189
1361
  LEFT JOIN tokens c ON b.token = c.token_id
1190
1362
  LEFT JOIN chains d ON c.chain = d.chain_dbid
1191
1363
  LEFT JOIN addresses e ON b.address = e.address_id
1192
1364
  LEFT JOIN addresses f ON b.recipient = f.address_id
1193
1365
  LEFT JOIN txgroups g ON b.txgroup = g.txgroup_id
1194
- left JOIN txgroups h ON g.parent_txgroup = h.txgroup_id
1366
+ LEFT JOIN txgroups h ON g.parent_txgroup = h.txgroup_id;
1195
1367
  """
1196
1368
  )
1197
1369
 
@@ -1204,13 +1376,17 @@ def create_general_ledger_view() -> None:
1204
1376
  Examples:
1205
1377
  >>> create_general_ledger_view()
1206
1378
  """
1207
- db.execute("drop VIEW IF EXISTS general_ledger")
1208
1379
  db.execute(
1209
1380
  """
1210
- create VIEW general_ledger as
1211
- select *
1212
- from (
1213
- SELECT treasury_tx_id, b.chain_name, a.timestamp, a.block, a.hash, a.log_index, c.symbol AS token, d.address AS "from", d.nickname as from_nickname, e.address AS "to", e.nickname as to_nickname, a.amount, a.price, a.value_usd, f.name AS txgroup, g.name AS parent_txgroup, f.txgroup_id
1381
+ DROP VIEW IF EXISTS general_ledger;
1382
+ CREATE VIEW general_ledger AS
1383
+ SELECT *
1384
+ FROM (
1385
+ SELECT
1386
+ treasury_tx_id, b.chain_name, a.timestamp, a.block, a.hash, a.log_index,
1387
+ c.symbol AS token, d.address AS "from", d.nickname as from_nickname,
1388
+ e.address AS "to", e.nickname as to_nickname, a.amount, a.price, a.value_usd,
1389
+ f.name AS txgroup, g.name AS parent_txgroup, f.txgroup_id
1214
1390
  FROM treasury_txs a
1215
1391
  LEFT JOIN chains b ON a.chain = b.chain_dbid
1216
1392
  LEFT JOIN tokens c ON a.token_id = c.token_id
@@ -1219,13 +1395,15 @@ def create_general_ledger_view() -> None:
1219
1395
  LEFT JOIN txgroups f ON a.txgroup_id = f.txgroup_id
1220
1396
  LEFT JOIN txgroups g ON f.parent_txgroup = g.txgroup_id
1221
1397
  UNION
1222
- SELECT -1, chain_name, timestamp, block, hash, log_index, token, "from", from_nickname, "to", to_nickname, amount, price, value_usd, txgroup, parent_txgroup, txgroup_id
1398
+ SELECT
1399
+ -1, chain_name, timestamp, block, hash, log_index, token, "from", from_nickname,
1400
+ "to", to_nickname, amount, price, value_usd, txgroup, parent_txgroup, txgroup_id
1223
1401
  FROM stream_ledger
1224
1402
  --UNION
1225
1403
  --SELECT -1, *
1226
1404
  --FROM vesting_ledger
1227
1405
  ) a
1228
- ORDER BY timestamp
1406
+ ORDER BY timestamp;
1229
1407
  """
1230
1408
  )
1231
1409
 
@@ -1238,14 +1416,14 @@ def create_unsorted_txs_view() -> None:
1238
1416
  Examples:
1239
1417
  >>> create_unsorted_txs_view()
1240
1418
  """
1241
- db.execute("DROP VIEW IF EXISTS unsorted_txs;")
1242
1419
  db.execute(
1243
1420
  """
1244
- CREATE VIEW unsorted_txs as
1421
+ DROP VIEW IF EXISTS unsorted_txs;
1422
+ CREATE VIEW unsorted_txs AS
1245
1423
  SELECT *
1246
1424
  FROM general_ledger
1247
1425
  WHERE txgroup = 'Categorization Pending'
1248
- ORDER BY TIMESTAMP desc
1426
+ ORDER BY timestamp DESC;
1249
1427
  """
1250
1428
  )
1251
1429
 
@@ -1258,53 +1436,147 @@ def create_monthly_pnl_view() -> None:
1258
1436
  Examples:
1259
1437
  >>> create_monthly_pnl_view()
1260
1438
  """
1261
- db.execute("DROP VIEW IF EXISTS monthly_pnl;")
1262
1439
  sql = """
1440
+ DROP VIEW IF EXISTS monthly_pnl;
1263
1441
  CREATE VIEW monthly_pnl AS
1264
1442
  WITH categorized AS (
1265
- SELECT
1266
- strftime('%Y-%m', datetime(t.timestamp, 'unixepoch')) AS month,
1443
+ SELECT
1444
+ to_char(to_timestamp(t.timestamp), 'YYYY-MM') AS month,
1267
1445
  CASE
1268
- WHEN p.name IS NOT NULL THEN p.name
1269
- ELSE tg.name
1446
+ WHEN p.name IS NOT NULL THEN p.name
1447
+ ELSE tg.name
1270
1448
  END AS top_category,
1271
1449
  --COALESCE(t.value_usd, 0) AS value_usd,
1272
1450
  --COALESCE(t.gas_used, 0) * COALESCE(t.gas_price, 0) AS gas_cost
1273
- FROM treasury_txs t
1274
- JOIN txgroups tg ON t.txgroup = tg.txgroup_id
1275
- LEFT JOIN txgroups p ON tg.parent_txgroup = p.txgroup_id
1276
- WHERE tg.name <> 'Ignore'
1451
+ FROM treasury_txs t
1452
+ JOIN txgroups tg ON t.txgroup = tg.txgroup_id
1453
+ LEFT JOIN txgroups p ON tg.parent_txgroup = p.txgroup_id
1454
+ WHERE tg.name <> 'Ignore'
1277
1455
  )
1278
1456
  SELECT
1279
- month,
1280
- SUM(CASE WHEN top_category = 'Revenue' THEN value_usd ELSE 0 END) AS revenue,
1281
- SUM(CASE WHEN top_category = 'Cost of Revenue' THEN value_usd ELSE 0 END) AS cost_of_revenue,
1282
- SUM(CASE WHEN top_category = 'Expenses' THEN value_usd ELSE 0 END) AS expenses,
1283
- SUM(CASE WHEN top_category = 'Other Income' THEN value_usd ELSE 0 END) AS other_income,
1284
- SUM(CASE WHEN top_category = 'Other Expenses' THEN value_usd ELSE 0 END) AS other_expense,
1285
- (
1457
+ month,
1458
+ SUM(CASE WHEN top_category = 'Revenue' THEN value_usd ELSE 0 END) AS revenue,
1459
+ SUM(CASE WHEN top_category = 'Cost of Revenue' THEN value_usd ELSE 0 END) AS cost_of_revenue,
1460
+ SUM(CASE WHEN top_category = 'Expenses' THEN value_usd ELSE 0 END) AS expenses,
1461
+ SUM(CASE WHEN top_category = 'Other Income' THEN value_usd ELSE 0 END) AS other_income,
1462
+ SUM(CASE WHEN top_category = 'Other Expenses' THEN value_usd ELSE 0 END) AS other_expense,
1463
+ (
1286
1464
  SUM(CASE WHEN top_category = 'Revenue' THEN value_usd ELSE 0 END) -
1287
1465
  SUM(CASE WHEN top_category = 'Cost of Revenue' THEN value_usd ELSE 0 END) -
1288
1466
  SUM(CASE WHEN top_category = 'Expenses' THEN value_usd ELSE 0 END) +
1289
1467
  SUM(CASE WHEN top_category = 'Other Income' THEN value_usd ELSE 0 END) -
1290
1468
  SUM(CASE WHEN top_category = 'Other Expenses' THEN value_usd ELSE 0 END)
1291
- ) AS net_profit
1469
+ ) AS net_profit
1292
1470
  FROM categorized
1293
1471
  GROUP BY month;
1294
1472
  """
1295
1473
  db.execute(sql)
1296
1474
 
1297
1475
 
1298
- with db_session:
1299
- create_stream_ledger_view()
1300
- create_txgroup_hierarchy_view()
1301
- # create_vesting_ledger_view()
1302
- create_general_ledger_view()
1303
- create_unsorted_txs_view()
1304
- # create_monthly_pnl_view()
1476
+ def create_usdval_presum_matview() -> None:
1477
+ # This view presums usd value from the general_ledger view,
1478
+ # grouped by timestamp and txgroup
1479
+ db.execute(
1480
+ """
1481
+ DROP MATERIALIZED VIEW IF EXISTS usdvalue_presum;
1482
+ CREATE MATERIALIZED VIEW usdvalue_presum AS
1483
+ SELECT
1484
+ txgroup_id,
1485
+ timestamp,
1486
+ SUM(value_usd) AS value_usd
1487
+ FROM general_ledger
1488
+ GROUP BY txgroup_id, timestamp;
1489
+
1490
+ -- Indexes
1491
+ CREATE UNIQUE INDEX idx_usdvalue_presum_txgroup_id_timestamp
1492
+ ON usdvalue_presum (txgroup_id, timestamp);
1493
+
1494
+ CREATE UNIQUE INDEX idx_usdvalue_presum_timestamp_txgroup_id
1495
+ ON usdvalue_presum (timestamp, txgroup_id);
1496
+ """
1497
+ )
1498
+
1499
+
1500
+ def create_usdval_presum_revenue_matview() -> None:
1501
+ # This view is specifically for the Revenue Over Time dashboard.
1502
+ # It presums usd value for Revenue and Other Income categories only, pre-joining txgroups and top_category.
1503
+ db.execute(
1504
+ """
1505
+ DROP MATERIALIZED VIEW IF EXISTS usdvalue_presum_revenue;
1506
+ CREATE MATERIALIZED VIEW usdvalue_presum_revenue AS
1507
+ SELECT
1508
+ p.txgroup_id,
1509
+ t.name AS txgroup_name,
1510
+ gh.top_category,
1511
+ p.timestamp,
1512
+ SUM(p.value_usd) AS value_usd
1513
+ FROM general_ledger p
1514
+ JOIN txgroup_hierarchy gh ON p.txgroup_id = gh.txgroup_id
1515
+ JOIN txgroups t ON p.txgroup_id = t.txgroup_id
1516
+ WHERE gh.top_category IN ('Revenue', 'Other Income')
1517
+ GROUP BY p.txgroup_id, t.name, gh.top_category, p.timestamp;
1518
+
1519
+ -- Indexes
1520
+ CREATE UNIQUE INDEX idx_usdvalue_presum_revenue_txgroup_id_timestamp
1521
+ ON usdvalue_presum_revenue (txgroup_id, timestamp);
1522
+
1523
+ CREATE UNIQUE INDEX idx_usdvalue_presum_revenue_timestamp_txgroup_id
1524
+ ON usdvalue_presum_revenue (timestamp, txgroup_id);
1525
+
1526
+ CREATE INDEX idx_usdvalue_presum_revenue_txgroup_name_timestamp
1527
+ ON usdvalue_presum_revenue (txgroup_name, timestamp);
1528
+
1529
+ CREATE UNIQUE INDEX idx_usdvalue_presum_revenue_timestamp_txgroup_name
1530
+ ON usdvalue_presum_revenue (timestamp, txgroup_name);
1531
+
1532
+ CREATE UNIQUE INDEX idx_usdvalue_presum_revenue_top_category_txgroup_id_timestamp
1533
+ ON usdvalue_presum_revenue (top_category, txgroup_id, timestamp);
1534
+
1535
+ CREATE UNIQUE INDEX idx_usdvalue_presum_revenue_top_category_txgroup_name_timestamp
1536
+ ON usdvalue_presum_revenue (top_category, txgroup_name, timestamp);
1537
+ """
1538
+ )
1305
1539
 
1306
- must_sort_inbound_txgroup_dbid = TxGroup.get_dbid(name="Sort Me (Inbound)")
1307
- must_sort_outbound_txgroup_dbid = TxGroup.get_dbid(name="Sort Me (Outbound)")
1540
+
1541
+ def create_usdval_presum_expenses_matview() -> None:
1542
+ # This view is specifically for the Expenses Over Time dashboard.
1543
+ # It presums usd value for Expenses, Cost of Revenue, and Other Expense categories only, pre-joining txgroups and top_category
1544
+ db.execute(
1545
+ """
1546
+ DROP MATERIALIZED VIEW IF EXISTS usdvalue_presum_expenses;
1547
+ CREATE MATERIALIZED VIEW usdvalue_presum_expenses AS
1548
+ SELECT
1549
+ p.txgroup_id,
1550
+ g.name AS txgroup_name,
1551
+ gh.top_category,
1552
+ p.timestamp,
1553
+ SUM(p.value_usd) AS value_usd
1554
+ FROM general_ledger p
1555
+ JOIN txgroup_hierarchy gh ON p.txgroup_id = gh.txgroup_id
1556
+ JOIN txgroups g ON p.txgroup_id = g.txgroup_id
1557
+ WHERE gh.top_category IN ('Expenses', 'Cost of Revenue', 'Other Expense')
1558
+ GROUP BY p.txgroup_id, g.name, gh.top_category, p.timestamp;
1559
+
1560
+ -- Indexes
1561
+ CREATE UNIQUE INDEX idx_usdvalue_presum_expenses_txgroup_id_timestamp
1562
+ ON usdvalue_presum_expenses (txgroup_id, timestamp);
1563
+
1564
+ CREATE UNIQUE INDEX idx_usdvalue_presum_expenses_timestamp_txgroup_id
1565
+ ON usdvalue_presum_expenses (timestamp, txgroup_id);
1566
+
1567
+ CREATE INDEX idx_usdvalue_presum_expenses_txgroup_name_timestamp
1568
+ ON usdvalue_presum_expenses (txgroup_name, timestamp);
1569
+
1570
+ CREATE UNIQUE INDEX idx_usdvalue_presum_expenses_timestamp_txgroup_name
1571
+ ON usdvalue_presum_expenses (timestamp, txgroup_name);
1572
+
1573
+ CREATE UNIQUE INDEX idx_usdvalue_presum_expenses_top_category_txgroup_id_timestamp
1574
+ ON usdvalue_presum_expenses (top_category, txgroup_id, timestamp);
1575
+
1576
+ CREATE UNIQUE INDEX idx_usdvalue_presum_expenses_top_category_txgroup_name_timestamp
1577
+ ON usdvalue_presum_expenses (top_category, txgroup_name, timestamp);
1578
+ """
1579
+ )
1308
1580
 
1309
1581
 
1310
1582
  @db_session
@@ -1382,3 +1654,21 @@ def _validate_integrity_error(
1382
1654
  )
1383
1655
  else None
1384
1656
  )
1657
+
1658
+
1659
+ def _drop_shitcoin_txs() -> None:
1660
+ """
1661
+ Purge any shitcoin txs from the db.
1662
+
1663
+ These should not be frequent, and only occur if a user populated the db before a shitcoin was added to the SHITCOINS mapping.
1664
+ """
1665
+ shitcoins = eth_portfolio.SHITCOINS[CHAINID]
1666
+ with db_session:
1667
+ shitcoin_txs = select(
1668
+ tx for tx in TreasuryTx if tx.token.address.address in shitcoins
1669
+ )
1670
+ if count := shitcoin_txs.count():
1671
+ logger.info(f"Purging {count} shitcoin txs from the database...")
1672
+ for tx in shitcoin_txs:
1673
+ tx.delete()
1674
+ logger.info("Shitcoin tx purge complete.")