dao-treasury 0.0.60__cp312-cp312-win32.whl → 0.1.6__cp312-cp312-win32.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dao-treasury might be problematic. Click here for more details.

Files changed (42) hide show
  1. dao_treasury/.grafana/provisioning/dashboards/breakdowns/Expenses.json +195 -154
  2. dao_treasury/.grafana/provisioning/dashboards/breakdowns/Revenue.json +192 -149
  3. dao_treasury/.grafana/provisioning/dashboards/dashboards.yaml +7 -81
  4. dao_treasury/.grafana/provisioning/dashboards/streams/LlamaPay.json +10 -22
  5. dao_treasury/.grafana/provisioning/dashboards/summary/Monthly.json +283 -23
  6. dao_treasury/.grafana/provisioning/dashboards/transactions/Treasury Transactions.json +49 -39
  7. dao_treasury/.grafana/provisioning/dashboards/transactions/Unsorted Transactions.json +367 -0
  8. dao_treasury/.grafana/provisioning/dashboards/treasury/Cashflow (Including Unsorted).json +109 -187
  9. dao_treasury/.grafana/provisioning/dashboards/treasury/Cashflow.json +77 -127
  10. dao_treasury/.grafana/provisioning/dashboards/treasury/Current Treasury Assets.json +509 -105
  11. dao_treasury/.grafana/provisioning/dashboards/treasury/Historical Treasury Balances.json +3856 -2924
  12. dao_treasury/.grafana/provisioning/dashboards/treasury/Operating Cashflow.json +57 -97
  13. dao_treasury/.grafana/provisioning/datasources/datasources.yaml +9 -4
  14. dao_treasury/__init__.py +0 -4
  15. dao_treasury/_docker.cp312-win32.pyd +0 -0
  16. dao_treasury/_docker.py +30 -22
  17. dao_treasury/_nicknames.cp312-win32.pyd +0 -0
  18. dao_treasury/_nicknames.py +3 -2
  19. dao_treasury/_wallet.cp312-win32.pyd +0 -0
  20. dao_treasury/constants.cp312-win32.pyd +0 -0
  21. dao_treasury/db.py +383 -131
  22. dao_treasury/docker-compose.yaml +19 -3
  23. dao_treasury/main.py +42 -4
  24. dao_treasury/sorting/__init__.cp312-win32.pyd +0 -0
  25. dao_treasury/sorting/__init__.py +38 -21
  26. dao_treasury/sorting/_matchers.cp312-win32.pyd +0 -0
  27. dao_treasury/sorting/_rules.cp312-win32.pyd +0 -0
  28. dao_treasury/sorting/factory.cp312-win32.pyd +0 -0
  29. dao_treasury/sorting/rule.cp312-win32.pyd +0 -0
  30. dao_treasury/sorting/rules/__init__.cp312-win32.pyd +0 -0
  31. dao_treasury/sorting/rules/ignore/__init__.cp312-win32.pyd +0 -0
  32. dao_treasury/sorting/rules/ignore/llamapay.cp312-win32.pyd +0 -0
  33. dao_treasury/treasury.py +4 -2
  34. dao_treasury/types.cp312-win32.pyd +0 -0
  35. {dao_treasury-0.0.60.dist-info → dao_treasury-0.1.6.dist-info}/METADATA +18 -4
  36. dao_treasury-0.1.6.dist-info/RECORD +53 -0
  37. dao_treasury__mypyc.cp312-win32.pyd +0 -0
  38. dao_treasury/streams/__init__.cp312-win32.pyd +0 -0
  39. dao_treasury/streams/llamapay.cp312-win32.pyd +0 -0
  40. dao_treasury-0.0.60.dist-info/RECORD +0 -54
  41. {dao_treasury-0.0.60.dist-info → dao_treasury-0.1.6.dist-info}/WHEEL +0 -0
  42. {dao_treasury-0.0.60.dist-info → dao_treasury-0.1.6.dist-info}/top_level.txt +0 -0
dao_treasury/db.py CHANGED
@@ -16,27 +16,28 @@ resolving integrity conflicts, caching transaction receipts,
16
16
  and creating SQL views for reporting.
17
17
  """
18
18
 
19
+ import os
19
20
  import typing
20
- from asyncio import Semaphore
21
+ from asyncio import Lock, Semaphore
21
22
  from collections import OrderedDict
23
+ from datetime import date, datetime, time, timezone
22
24
  from decimal import Decimal, InvalidOperation
23
25
  from functools import lru_cache
24
26
  from logging import getLogger
25
- from os import path
26
- from pathlib import Path
27
27
  from typing import (
28
28
  TYPE_CHECKING,
29
29
  Any,
30
+ Callable,
30
31
  Coroutine,
31
32
  Dict,
32
33
  Final,
33
34
  Literal,
34
35
  Tuple,
36
+ TypeVar,
35
37
  Union,
36
38
  final,
37
39
  overload,
38
40
  )
39
- from datetime import date, datetime, time, timezone
40
41
 
41
42
  import eth_portfolio
42
43
  from a_sync import AsyncThreadPoolExecutor
@@ -65,8 +66,10 @@ from pony.orm import (
65
66
  composite_key,
66
67
  composite_index,
67
68
  db_session,
69
+ rollback,
68
70
  select,
69
71
  )
72
+ from typing_extensions import ParamSpec
70
73
  from y import EEE_ADDRESS, Contract, Network, convert, get_block_timestamp_async
71
74
  from y._db.decorators import retry_locked
72
75
  from y.contracts import _get_code
@@ -76,14 +79,18 @@ from dao_treasury.constants import CHAINID
76
79
  from dao_treasury.types import TxGroupDbid, TxGroupName
77
80
 
78
81
 
79
- EventItem = _EventItem[_EventItem[OrderedDict[str, Any]]]
80
-
82
+ _T = TypeVar("_T")
83
+ _P = ParamSpec("_P")
81
84
 
82
- SQLITE_DIR = Path(path.expanduser("~")) / ".dao-treasury"
83
- """Path to the directory in the user's home where the DAO treasury SQLite database is stored."""
85
+ EventItem = _EventItem[_EventItem[OrderedDict[str, Any]]]
84
86
 
85
- SQLITE_DIR.mkdir(parents=True, exist_ok=True)
86
87
 
88
+ # Postgres connection parameters from environment variables (with docker-compose defaults)
89
+ POSTGRES_USER = os.getenv("DAO_TREASURY_DB_USER", "dao_treasury")
90
+ POSTGRES_PASSWORD = os.getenv("DAO_TREASURY_DB_PASSWORD", "dao_treasury")
91
+ POSTGRES_DB = os.getenv("DAO_TREASURY_DB_NAME", "dao_treasury")
92
+ POSTGRES_HOST = os.getenv("DAO_TREASURY_DB_HOST", "127.0.0.1")
93
+ POSTGRES_PORT = int(os.getenv("DAO_TREASURY_DB_PORT", "8675"))
87
94
 
88
95
  _INSERT_THREAD = AsyncThreadPoolExecutor(1)
89
96
  _SORT_THREAD = AsyncThreadPoolExecutor(1)
@@ -94,6 +101,12 @@ _UTC = timezone.utc
94
101
 
95
102
  db = Database()
96
103
 
104
+ db_ready: bool = False
105
+ startup_lock: Final = Lock()
106
+
107
+ must_sort_inbound_txgroup_dbid: TxGroupDbid = None
108
+ must_sort_outbound_txgroup_dbid: TxGroupDbid = None
109
+
97
110
  logger = getLogger("dao_treasury.db")
98
111
 
99
112
 
@@ -210,7 +223,7 @@ class Address(DbEntity):
210
223
  address = Required(str, index=True)
211
224
  """Checksum string of the on-chain address."""
212
225
 
213
- nickname = Optional(str)
226
+ nickname = Optional(str, index=True)
214
227
  """Optional human-readable label (e.g., contract name or token name)."""
215
228
 
216
229
  is_contract = Required(bool, index=True, lazy=True)
@@ -396,7 +409,7 @@ class Token(DbEntity):
396
409
  symbol = Required(str, index=True, lazy=True)
397
410
  """Short ticker symbol for the token."""
398
411
 
399
- name = Required(str, lazy=True)
412
+ name = Required(str, lazy=True, index=True)
400
413
  """Full human-readable name of the token."""
401
414
 
402
415
  decimals = Required(int, lazy=True)
@@ -409,12 +422,15 @@ class Token(DbEntity):
409
422
  """Inverse relation for treasury transactions involving this token."""
410
423
  # partner_harvest_event = Set('PartnerHarvestEvent', reverse="vault", lazy=True)
411
424
 
412
- address = Required(Address, column="address_id")
425
+ address = Required(Address, column="address_id", unique=True)
413
426
  """Foreign key to the address record for this token contract."""
414
427
 
415
428
  streams = Set("Stream", reverse="token", lazy=True)
416
429
  # vesting_escrows = Set("VestingEscrow", reverse="token", lazy=True)
417
430
 
431
+ composite_index(chain, name)
432
+ composite_index(chain, symbol)
433
+
418
434
  def __eq__(self, other: Union["Token", Address, ChecksumAddress]) -> bool: # type: ignore [override]
419
435
  if isinstance(other, str):
420
436
  return self.address == other
@@ -557,13 +573,13 @@ class TxGroup(DbEntity):
557
573
  txgroup_id = PrimaryKey(int, auto=True)
558
574
  """Auto-incremented primary key for transaction groups."""
559
575
 
560
- name = Required(str)
576
+ name = Required(str, index=True)
561
577
  """Name of the grouping category, e.g., 'Revenue', 'Expenses'."""
562
578
 
563
579
  treasury_tx = Set("TreasuryTx", reverse="txgroup", lazy=True)
564
580
  """Inverse relation for treasury transactions assigned to this group."""
565
581
 
566
- parent_txgroup = Optional("TxGroup", reverse="child_txgroups")
582
+ parent_txgroup = Optional("TxGroup", reverse="child_txgroups", index=True)
567
583
  """Optional reference to a parent group for nesting."""
568
584
 
569
585
  composite_key(name, parent_txgroup)
@@ -647,6 +663,8 @@ class TxGroup(DbEntity):
647
663
  if txgroup := TxGroup.get(name=name, parent_txgroup=parent):
648
664
  return txgroup # type: ignore [no-any-return]
649
665
  raise Exception(e, name, parent) from e
666
+ else:
667
+ db.execute("REFRESH MATERIALIZED VIEW txgroup_hierarchy;")
650
668
  return txgroup # type: ignore [no-any-return]
651
669
 
652
670
 
@@ -733,6 +751,25 @@ class TreasuryTx(DbEntity):
733
751
  """Foreign key to the categorization group."""
734
752
 
735
753
  composite_index(chain, txgroup)
754
+ composite_index(chain, token)
755
+ composite_index(chain, from_address)
756
+ composite_index(chain, to_address)
757
+ composite_index(chain, from_address, to_address)
758
+ composite_index(timestamp, txgroup)
759
+ composite_index(timestamp, token)
760
+ composite_index(timestamp, from_address)
761
+ composite_index(timestamp, to_address)
762
+ composite_index(timestamp, from_address, to_address)
763
+ composite_index(timestamp, chain, txgroup)
764
+ composite_index(timestamp, chain, token)
765
+ composite_index(timestamp, chain, from_address)
766
+ composite_index(timestamp, chain, to_address)
767
+ composite_index(timestamp, chain, from_address, to_address)
768
+ composite_index(chain, timestamp, txgroup)
769
+ composite_index(chain, timestamp, token)
770
+ composite_index(chain, timestamp, from_address)
771
+ composite_index(chain, timestamp, to_address)
772
+ composite_index(chain, timestamp, from_address, to_address)
736
773
 
737
774
  @property
738
775
  def to_nickname(self) -> typing.Optional[str]:
@@ -746,6 +783,10 @@ class TreasuryTx(DbEntity):
746
783
  """Human-readable label for the sender address."""
747
784
  return self.from_address.nickname or self.from_address.address # type: ignore [union-attr]
748
785
 
786
+ @property
787
+ def token_address(self) -> ChecksumAddress:
788
+ return self.token.address.address
789
+
749
790
  @property
750
791
  def symbol(self) -> str:
751
792
  """Ticker symbol for the transferred token."""
@@ -960,6 +1001,10 @@ class TreasuryTx(DbEntity):
960
1001
  must_sort_inbound_txgroup_dbid,
961
1002
  must_sort_outbound_txgroup_dbid,
962
1003
  ):
1004
+ with db_session:
1005
+ db.execute("REFRESH MATERIALIZED VIEW usdvalue_presum;")
1006
+ db.execute("REFRESH MATERIALIZED VIEW usdvalue_presum_revenue;")
1007
+ db.execute("REFRESH MATERIALIZED VIEW usdvalue_presum_expenses;")
963
1008
  logger.info(
964
1009
  "Sorted %s to %s", entry, TxGroup.get_fullname(txgroup_dbid)
965
1010
  )
@@ -972,25 +1017,42 @@ class TreasuryTx(DbEntity):
972
1017
  with db_session:
973
1018
  TreasuryTx[treasury_tx_dbid].txgroup = txgroup_dbid
974
1019
  commit()
1020
+ db.execute("REFRESH MATERIALIZED VIEW usdvalue_presum;")
1021
+ db.execute("REFRESH MATERIALIZED VIEW usdvalue_presum_revenue;")
1022
+ db.execute("REFRESH MATERIALIZED VIEW usdvalue_presum_expenses;")
975
1023
 
976
1024
 
977
1025
  _stream_metadata_cache: Final[Dict[HexStr, Tuple[ChecksumAddress, date]]] = {}
978
1026
 
979
1027
 
1028
+ def refresh_matview(name: str) -> Callable[[Callable[_P, _T]], Callable[_P, _T]]:
1029
+ def matview_deco(fn: Callable[_P, _T]) -> Callable[_P, _T]:
1030
+ def matview_refresh_wrap(*args: _P.args, **kwargs: _P.kwargs) -> _T:
1031
+ retval = fn(*args, **kwargs)
1032
+ commit()
1033
+ db.execute(f"REFRESH MATERIALIZED VIEW {name};")
1034
+ commit()
1035
+ return retval
1036
+
1037
+ return matview_refresh_wrap
1038
+
1039
+ return matview_deco
1040
+
1041
+
980
1042
  class Stream(DbEntity):
981
1043
  _table_ = "streams"
982
1044
  stream_id = PrimaryKey(str)
983
1045
 
984
- contract = Required("Address", reverse="streams")
985
- start_block = Required(int)
986
- end_block = Optional(int)
1046
+ contract = Required("Address", reverse="streams", index=True)
1047
+ start_block = Required(int, index=True)
1048
+ end_block = Optional(int, index=True)
987
1049
  token = Required("Token", reverse="streams", index=True)
988
- from_address = Required("Address", reverse="streams_from")
989
- to_address = Required("Address", reverse="streams_to")
990
- reason = Optional(str)
1050
+ from_address = Required("Address", reverse="streams_from", index=True)
1051
+ to_address = Required("Address", reverse="streams_to", index=True)
1052
+ reason = Optional(str, index=True)
991
1053
  amount_per_second = Required(Decimal, 38, 1)
992
- status = Required(str, default="Active")
993
- txgroup = Optional("TxGroup", reverse="streams")
1054
+ status = Required(str, default="Active", index=True)
1055
+ txgroup = Optional("TxGroup", reverse="streams", index=True)
994
1056
 
995
1057
  streamed_funds = Set("StreamedFunds", lazy=True)
996
1058
 
@@ -1032,10 +1094,12 @@ class Stream(DbEntity):
1032
1094
  end = datetime.fromtimestamp(chain[stream.end_block].timestamp, tz=_UTC)
1033
1095
  return start, end
1034
1096
 
1097
+ @refresh_matview("stream_ledger")
1035
1098
  def stop_stream(self, block: int) -> None:
1036
1099
  self.end_block = block
1037
1100
  self.status = "Stopped"
1038
1101
 
1102
+ @refresh_matview("stream_ledger")
1039
1103
  def pause(self) -> None:
1040
1104
  self.status = "Paused"
1041
1105
 
@@ -1095,6 +1159,7 @@ class StreamedFunds(DbEntity):
1095
1159
 
1096
1160
  @classmethod
1097
1161
  @db_session
1162
+ @refresh_matview("stream_ledger")
1098
1163
  def create_entity(
1099
1164
  cls,
1100
1165
  stream_id: str,
@@ -1119,23 +1184,56 @@ class StreamedFunds(DbEntity):
1119
1184
  return entity
1120
1185
 
1121
1186
 
1122
- db.bind(
1123
- provider="sqlite", # TODO: let user choose postgres with server connection params
1124
- filename=str(SQLITE_DIR / "dao-treasury.sqlite"),
1125
- create_db=True,
1126
- )
1187
+ def init_db() -> None:
1188
+ """Initialize the database if not yet initialized."""
1189
+ global db_ready
1190
+ if db_ready:
1191
+ return
1192
+
1193
+ db.bind(
1194
+ provider="postgres",
1195
+ user=POSTGRES_USER,
1196
+ password=POSTGRES_PASSWORD,
1197
+ host=POSTGRES_HOST,
1198
+ port=POSTGRES_PORT,
1199
+ database=POSTGRES_DB,
1200
+ )
1201
+
1202
+ db.generate_mapping(create_tables=True)
1203
+
1204
+ with db_session:
1205
+ create_stream_ledger_matview()
1206
+ create_txgroup_hierarchy_matview()
1207
+ # create_vesting_ledger_view()
1208
+ create_general_ledger_view()
1209
+ create_unsorted_txs_view()
1210
+ create_usdval_presum_matview()
1211
+
1212
+ # depends on usdvalue_presum
1213
+ create_monthly_pnl_view()
1214
+ create_usdval_presum_revenue_matview()
1215
+ create_usdval_presum_expenses_matview()
1216
+
1217
+ global must_sort_inbound_txgroup_dbid
1218
+ must_sort_inbound_txgroup_dbid = TxGroup.get_dbid(name="Sort Me (Inbound)")
1127
1219
 
1128
- db.generate_mapping(create_tables=True)
1220
+ global must_sort_outbound_txgroup_dbid
1221
+ must_sort_outbound_txgroup_dbid = TxGroup.get_dbid(name="Sort Me (Outbound)")
1222
+
1223
+ _drop_shitcoin_txs()
1129
1224
 
1225
+ db_ready = True
1130
1226
 
1131
- def _set_address_nicknames_for_tokens() -> None:
1227
+
1228
+ def set_address_nicknames_for_tokens() -> None:
1132
1229
  """Set address.nickname for addresses belonging to tokens."""
1230
+ init_db()
1133
1231
  for address in select(a for a in Address if a.token and not a.nickname):
1134
1232
  address.nickname = f"Token: {address.token.name}"
1135
1233
  db.commit()
1136
1234
 
1137
1235
 
1138
- def create_stream_ledger_view() -> None:
1236
+ def create_stream_ledger_matview() -> None:
1139
1237
  """Create or replace the SQL view `stream_ledger` for streamed funds reporting.
1140
1238
 
1141
1239
  This view joins streamed funds, streams, tokens, addresses, and txgroups
@@ -1144,60 +1242,90 @@ def create_stream_ledger_view() -> None:
1144
1242
  Examples:
1145
1243
  >>> create_stream_ledger_view()
1146
1244
  """
1147
- db.execute("""DROP VIEW IF EXISTS stream_ledger;""")
1148
- db.execute(
1149
- """
1150
- create view stream_ledger as
1151
- SELECT 'Mainnet' as chain_name,
1152
- cast(strftime('%s', date || ' 00:00:00') as INTEGER) as timestamp,
1153
- NULL as block,
1154
- NULL as hash,
1155
- NULL as log_index,
1156
- symbol as token,
1157
- d.address AS "from",
1158
- d.nickname as from_nickname,
1159
- e.address AS "to",
1160
- e.nickname as to_nickname,
1161
- amount,
1162
- price,
1163
- value_usd,
1164
- txgroup.name as txgroup,
1165
- parent.name as parent_txgroup,
1245
+ try:
1246
+ db.execute(
1247
+ """
1248
+ DROP MATERIALIZED VIEW IF EXISTS stream_ledger CASCADE;
1249
+ CREATE MATERIALIZED VIEW stream_ledger AS
1250
+ SELECT
1251
+ 'Mainnet' as chain_name,
1252
+ EXTRACT(EPOCH FROM (date::date))::integer as timestamp,
1253
+ CAST(NULL as integer) as block,
1254
+ NULL as hash,
1255
+ CAST(NULL as integer) as log_index,
1256
+ symbol as token,
1257
+ d.address AS "from",
1258
+ d.nickname as from_nickname,
1259
+ e.address AS "to",
1260
+ e.nickname as to_nickname,
1261
+ amount,
1262
+ price,
1263
+ value_usd,
1264
+ txgroup.name as txgroup,
1265
+ parent.name as parent_txgroup,
1166
1266
  txgroup.txgroup_id
1167
- FROM streamed_funds a
1168
- LEFT JOIN streams b ON a.stream = b.stream_id
1169
- LEFT JOIN tokens c ON b.token = c.token_id
1170
- LEFT JOIN addresses d ON b.from_address = d.address_id
1171
- LEFT JOIN addresses e ON b.to_address = e.address_id
1172
- LEFT JOIN txgroups txgroup ON b.txgroup = txgroup.txgroup_id
1173
- LEFT JOIN txgroups parent ON txgroup.parent_txgroup = parent.txgroup_id
1174
- """
1175
- )
1267
+ FROM streamed_funds a
1268
+ LEFT JOIN streams b ON a.stream = b.stream_id
1269
+ LEFT JOIN tokens c ON b.token = c.token_id
1270
+ LEFT JOIN addresses d ON b.from_address = d.address_id
1271
+ LEFT JOIN addresses e ON b.to_address = e.address_id
1272
+ LEFT JOIN txgroups txgroup ON b.txgroup = txgroup.txgroup_id
1273
+ LEFT JOIN txgroups parent ON txgroup.parent_txgroup = parent.txgroup_id;
1274
+
1275
+ """
1276
+ )
1277
+ except Exception as e:
1278
+ if '"stream_ledger" is not a materialized view' not in str(e):
1279
+ raise
1280
+ # we're running an old schema, lets migrate it
1281
+ rollback()
1282
+ db.execute("DROP VIEW IF EXISTS stream_ledger CASCADE;")
1283
+ commit()
1284
+ create_stream_ledger_matview()
1176
1285
 
1177
1286
 
1178
- def create_txgroup_hierarchy_view() -> None:
1287
+ def create_txgroup_hierarchy_matview() -> None:
1179
1288
  """Create or replace the SQL view `txgroup_hierarchy` for recursive txgroup hierarchy.
1180
1289
 
1181
1290
  This view exposes txgroup_id, top_category, and parent_txgroup for all txgroups,
1182
1291
  matching the recursive CTE logic used in dashboards.
1183
1292
  """
1184
- db.execute("DROP VIEW IF EXISTS txgroup_hierarchy;")
1185
- db.execute(
1186
- """
1187
- CREATE VIEW txgroup_hierarchy AS
1188
- WITH RECURSIVE group_hierarchy (txgroup_id, top_category, parent_txgroup) AS (
1189
- SELECT txgroup_id, name AS top_category, parent_txgroup
1190
- FROM txgroups
1191
- WHERE parent_txgroup IS NULL
1192
- UNION ALL
1193
- SELECT child.txgroup_id, parent.top_category, child.parent_txgroup
1194
- FROM txgroups AS child
1195
- JOIN group_hierarchy AS parent
1196
- ON child.parent_txgroup = parent.txgroup_id
1293
+ try:
1294
+ db.execute(
1295
+ """
1296
+ DROP MATERIALIZED VIEW IF EXISTS txgroup_hierarchy CASCADE;
1297
+ CREATE MATERIALIZED VIEW txgroup_hierarchy AS
1298
+ WITH RECURSIVE group_hierarchy (txgroup_id, top_category, parent_txgroup) AS (
1299
+ SELECT txgroup_id, name AS top_category, parent_txgroup
1300
+ FROM txgroups
1301
+ WHERE parent_txgroup IS NULL
1302
+ UNION ALL
1303
+ SELECT child.txgroup_id, parent.top_category, child.parent_txgroup
1304
+ FROM txgroups AS child
1305
+ JOIN group_hierarchy AS parent
1306
+ ON child.parent_txgroup = parent.txgroup_id
1307
+ )
1308
+ SELECT * FROM group_hierarchy;
1309
+
1310
+ -- Indexes
1311
+ CREATE UNIQUE INDEX idx_txgroup_hierarchy_txgroup_id
1312
+ ON txgroup_hierarchy (txgroup_id);
1313
+
1314
+ CREATE INDEX idx_txgroup_hierarchy_top_category
1315
+ ON txgroup_hierarchy (top_category);
1316
+
1317
+ CREATE INDEX idx_txgroup_hierarchy_parent
1318
+ ON txgroup_hierarchy (parent_txgroup);
1319
+ """
1197
1320
  )
1198
- SELECT * FROM group_hierarchy;
1199
- """
1200
- )
1321
+ except Exception as e:
1322
+ if '"txgroup_hierarchy" is not a materialized view' not in str(e):
1323
+ raise
1324
+ # we're running an old schema, lets migrate it
1325
+ rollback()
1326
+ db.execute("DROP VIEW IF EXISTS txgroup_hierarchy CASCADE;")
1327
+ commit()
1328
+ create_txgroup_hierarchy_matview()
1201
1329
 
1202
1330
 
1203
1331
  def create_vesting_ledger_view() -> None:
@@ -1213,11 +1341,12 @@ def create_vesting_ledger_view() -> None:
1213
1341
  """
1214
1342
  DROP VIEW IF EXISTS vesting_ledger;
1215
1343
  CREATE VIEW vesting_ledger AS
1216
- SELECT d.chain_name,
1217
- CAST(date AS timestamp) AS "timestamp",
1218
- cast(NULL as int) AS block,
1344
+ SELECT
1345
+ d.chain_name,
1346
+ date::timestamp AS "timestamp",
1347
+ CAST(NULL as integer) AS block,
1219
1348
  NULL AS "hash",
1220
- cast(NULL as int) AS "log_index",
1349
+ CAST(NULL as integer) AS "log_index",
1221
1350
  c.symbol AS "token",
1222
1351
  e.address AS "from",
1223
1352
  e.nickname as from_nickname,
@@ -1229,14 +1358,14 @@ def create_vesting_ledger_view() -> None:
1229
1358
  g.name as txgroup,
1230
1359
  h.name AS parent_txgroup,
1231
1360
  g.txgroup_id
1232
- FROM vested_funds a
1361
+ FROM vested_funds a
1233
1362
  LEFT JOIN vesting_escrows b ON a.escrow = b.escrow_id
1234
1363
  LEFT JOIN tokens c ON b.token = c.token_id
1235
1364
  LEFT JOIN chains d ON c.chain = d.chain_dbid
1236
1365
  LEFT JOIN addresses e ON b.address = e.address_id
1237
1366
  LEFT JOIN addresses f ON b.recipient = f.address_id
1238
1367
  LEFT JOIN txgroups g ON b.txgroup = g.txgroup_id
1239
- left JOIN txgroups h ON g.parent_txgroup = h.txgroup_id
1368
+ LEFT JOIN txgroups h ON g.parent_txgroup = h.txgroup_id;
1240
1369
  """
1241
1370
  )
1242
1371
 
@@ -1249,13 +1378,17 @@ def create_general_ledger_view() -> None:
1249
1378
  Examples:
1250
1379
  >>> create_general_ledger_view()
1251
1380
  """
1252
- db.execute("drop VIEW IF EXISTS general_ledger")
1253
1381
  db.execute(
1254
1382
  """
1255
- create VIEW general_ledger as
1256
- select *
1257
- from (
1258
- SELECT treasury_tx_id, b.chain_name, a.timestamp, a.block, a.hash, a.log_index, c.symbol AS token, d.address AS "from", d.nickname as from_nickname, e.address AS "to", e.nickname as to_nickname, a.amount, a.price, a.value_usd, f.name AS txgroup, g.name AS parent_txgroup, f.txgroup_id
1383
+ DROP VIEW IF EXISTS general_ledger;
1384
+ CREATE VIEW general_ledger AS
1385
+ SELECT *
1386
+ FROM (
1387
+ SELECT
1388
+ treasury_tx_id, b.chain_name, a.timestamp, a.block, a.hash, a.log_index,
1389
+ c.symbol AS token, d.address AS "from", d.nickname as from_nickname,
1390
+ e.address AS "to", e.nickname as to_nickname, a.amount, a.price, a.value_usd,
1391
+ f.name AS txgroup, g.name AS parent_txgroup, f.txgroup_id
1259
1392
  FROM treasury_txs a
1260
1393
  LEFT JOIN chains b ON a.chain = b.chain_dbid
1261
1394
  LEFT JOIN tokens c ON a.token_id = c.token_id
@@ -1264,13 +1397,15 @@ def create_general_ledger_view() -> None:
1264
1397
  LEFT JOIN txgroups f ON a.txgroup_id = f.txgroup_id
1265
1398
  LEFT JOIN txgroups g ON f.parent_txgroup = g.txgroup_id
1266
1399
  UNION
1267
- SELECT -1, chain_name, timestamp, block, hash, log_index, token, "from", from_nickname, "to", to_nickname, amount, price, value_usd, txgroup, parent_txgroup, txgroup_id
1400
+ SELECT
1401
+ -1, chain_name, timestamp, block, hash, log_index, token, "from", from_nickname,
1402
+ "to", to_nickname, amount, price, value_usd, txgroup, parent_txgroup, txgroup_id
1268
1403
  FROM stream_ledger
1269
1404
  --UNION
1270
1405
  --SELECT -1, *
1271
1406
  --FROM vesting_ledger
1272
1407
  ) a
1273
- ORDER BY timestamp
1408
+ ORDER BY timestamp;
1274
1409
  """
1275
1410
  )
1276
1411
 
@@ -1283,14 +1418,14 @@ def create_unsorted_txs_view() -> None:
1283
1418
  Examples:
1284
1419
  >>> create_unsorted_txs_view()
1285
1420
  """
1286
- db.execute("DROP VIEW IF EXISTS unsorted_txs;")
1287
1421
  db.execute(
1288
1422
  """
1289
- CREATE VIEW unsorted_txs as
1423
+ DROP VIEW IF EXISTS unsorted_txs;
1424
+ CREATE VIEW unsorted_txs AS
1290
1425
  SELECT *
1291
1426
  FROM general_ledger
1292
1427
  WHERE txgroup = 'Categorization Pending'
1293
- ORDER BY TIMESTAMP desc
1428
+ ORDER BY timestamp DESC;
1294
1429
  """
1295
1430
  )
1296
1431
 
@@ -1303,53 +1438,173 @@ def create_monthly_pnl_view() -> None:
1303
1438
  Examples:
1304
1439
  >>> create_monthly_pnl_view()
1305
1440
  """
1306
- db.execute("DROP VIEW IF EXISTS monthly_pnl;")
1307
1441
  sql = """
1442
+ DROP VIEW IF EXISTS monthly_pnl;
1308
1443
  CREATE VIEW monthly_pnl AS
1309
- WITH categorized AS (
1310
- SELECT
1311
- strftime('%Y-%m', datetime(t.timestamp, 'unixepoch')) AS month,
1312
- CASE
1313
- WHEN p.name IS NOT NULL THEN p.name
1314
- ELSE tg.name
1315
- END AS top_category,
1316
- --COALESCE(t.value_usd, 0) AS value_usd,
1317
- --COALESCE(t.gas_used, 0) * COALESCE(t.gas_price, 0) AS gas_cost
1318
- FROM treasury_txs t
1319
- JOIN txgroups tg ON t.txgroup = tg.txgroup_id
1320
- LEFT JOIN txgroups p ON tg.parent_txgroup = p.txgroup_id
1321
- WHERE tg.name <> 'Ignore'
1444
+ WITH monthly AS (
1445
+ SELECT
1446
+ to_char(to_timestamp(timestamp), 'YYYY-MM') AS month,
1447
+ top_category,
1448
+ SUM(value_usd) AS value_usd
1449
+ FROM usdvalue_presum
1450
+ WHERE top_category <> 'Ignore'
1451
+ GROUP BY month, top_category
1322
1452
  )
1323
1453
  SELECT
1324
- month,
1325
- SUM(CASE WHEN top_category = 'Revenue' THEN value_usd ELSE 0 END) AS revenue,
1326
- SUM(CASE WHEN top_category = 'Cost of Revenue' THEN value_usd ELSE 0 END) AS cost_of_revenue,
1327
- SUM(CASE WHEN top_category = 'Expenses' THEN value_usd ELSE 0 END) AS expenses,
1328
- SUM(CASE WHEN top_category = 'Other Income' THEN value_usd ELSE 0 END) AS other_income,
1329
- SUM(CASE WHEN top_category = 'Other Expenses' THEN value_usd ELSE 0 END) AS other_expense,
1330
- (
1331
- SUM(CASE WHEN top_category = 'Revenue' THEN value_usd ELSE 0 END) -
1332
- SUM(CASE WHEN top_category = 'Cost of Revenue' THEN value_usd ELSE 0 END) -
1333
- SUM(CASE WHEN top_category = 'Expenses' THEN value_usd ELSE 0 END) +
1334
- SUM(CASE WHEN top_category = 'Other Income' THEN value_usd ELSE 0 END) -
1335
- SUM(CASE WHEN top_category = 'Other Expenses' THEN value_usd ELSE 0 END)
1336
- ) AS net_profit
1337
- FROM categorized
1454
+ month AS "Month",
1455
+ SUM(CASE WHEN top_category = 'Revenue' THEN value_usd ELSE 0 END) AS "Revenue",
1456
+ SUM(CASE WHEN top_category = 'Cost of Revenue' THEN value_usd ELSE 0 END) AS "Cost of Revenue",
1457
+ SUM(CASE WHEN top_category = 'Expenses' THEN value_usd ELSE 0 END) AS "Expenses",
1458
+ (
1459
+ SUM(CASE WHEN top_category = 'Revenue' THEN value_usd ELSE 0 END)
1460
+ - SUM(CASE WHEN top_category = 'Cost of Revenue' THEN value_usd ELSE 0 END)
1461
+ - SUM(CASE WHEN top_category = 'Expenses' THEN value_usd ELSE 0 END)
1462
+ ) AS "Operating Net",
1463
+ SUM(CASE WHEN top_category = 'Other Income' THEN value_usd ELSE 0 END) AS "Other Income",
1464
+ SUM(CASE WHEN top_category = 'Other Expenses' THEN value_usd ELSE 0 END) AS "Other Expenses",
1465
+ (
1466
+ SUM(CASE WHEN top_category = 'Revenue' THEN value_usd ELSE 0 END)
1467
+ - SUM(CASE WHEN top_category = 'Cost of Revenue' THEN value_usd ELSE 0 END)
1468
+ - SUM(CASE WHEN top_category = 'Expenses' THEN value_usd ELSE 0 END)
1469
+ + SUM(CASE WHEN top_category = 'Other Income' THEN value_usd ELSE 0 END)
1470
+ - SUM(CASE WHEN top_category = 'Other Expenses' THEN value_usd ELSE 0 END)
1471
+ ) AS "Sorted Net",
1472
+ SUM(CASE WHEN top_category = 'Sort Me (Inbound)' THEN value_usd ELSE 0 END) AS "Unsorted Income",
1473
+ SUM(CASE WHEN top_category = 'Sort Me (Outbound)' THEN value_usd ELSE 0 END) AS "Unsorted Expenses",
1474
+ (
1475
+ SUM(CASE WHEN top_category = 'Revenue' THEN value_usd ELSE 0 END)
1476
+ - SUM(CASE WHEN top_category = 'Cost of Revenue' THEN value_usd ELSE 0 END)
1477
+ - SUM(CASE WHEN top_category = 'Expenses' THEN value_usd ELSE 0 END)
1478
+ + SUM(CASE WHEN top_category = 'Other Income' THEN value_usd ELSE 0 END)
1479
+ - SUM(CASE WHEN top_category = 'Other Expenses' THEN value_usd ELSE 0 END)
1480
+ + SUM(CASE WHEN top_category = 'Sort Me (Inbound)' THEN value_usd ELSE 0 END)
1481
+ - SUM(CASE WHEN top_category = 'Sort Me (Outbound)' THEN value_usd ELSE 0 END)
1482
+ ) AS "Net",
1483
+ CAST(EXTRACT(EPOCH FROM (to_date(month || '-01', 'YYYY-MM-DD'))) * 1000 AS BIGINT) AS "month_start",
1484
+ CAST(EXTRACT(EPOCH FROM (to_date(month || '-01', 'YYYY-MM-DD') + INTERVAL '1 month' - INTERVAL '1 millisecond')) * 1000 AS BIGINT) AS "month_end"
1485
+ FROM monthly
1338
1486
  GROUP BY month;
1339
1487
  """
1340
1488
  db.execute(sql)
1341
1489
 
1342
1490
 
1343
- with db_session:
1344
- create_stream_ledger_view()
1345
- create_txgroup_hierarchy_view()
1346
- # create_vesting_ledger_view()
1347
- create_general_ledger_view()
1348
- create_unsorted_txs_view()
1349
- # create_monthly_pnl_view()
1491
+ def create_usdval_presum_matview() -> None:
1492
+ # This view presums usd value from the general_ledger view,
1493
+ # grouped by timestamp and txgroup
1494
+ db.execute(
1495
+ """
1496
+ DROP MATERIALIZED VIEW IF EXISTS usdvalue_presum;
1497
+ CREATE MATERIALIZED VIEW usdvalue_presum AS
1498
+ SELECT
1499
+ gl.txgroup_id,
1500
+ gh.top_category,
1501
+ gl.timestamp,
1502
+ SUM(value_usd) AS value_usd
1503
+ FROM general_ledger gl
1504
+ JOIN txgroup_hierarchy gh USING (txgroup_id)
1505
+ GROUP BY gl.txgroup_id, gh.top_category, gl.timestamp;
1506
+
1507
+ -- Indexes
1508
+ CREATE UNIQUE INDEX idx_usdvalue_presum_txgroup_id_timestamp
1509
+ ON usdvalue_presum (txgroup_id, timestamp);
1510
+
1511
+ CREATE UNIQUE INDEX idx_usdvalue_presum_timestamp_txgroup_id
1512
+ ON usdvalue_presum (timestamp, txgroup_id);
1513
+
1514
+ CREATE INDEX idx_usdvalue_presum_top_category_timestamp
1515
+ ON usdvalue_presum (top_category, timestamp);
1516
+
1517
+ CREATE INDEX idx_usdvalue_presum_timestamp_top_category
1518
+ ON usdvalue_presum (timestamp, top_category);
1519
+
1520
+ CREATE UNIQUE INDEX idx_usdvalue_presum_top_category_txgroup_id_timestamp
1521
+ ON usdvalue_presum (top_category, txgroup_id, timestamp);
1522
+
1523
+ CREATE UNIQUE INDEX idx_usdvalue_presum_timestamp_top_category_txgroup_id
1524
+ ON usdvalue_presum (timestamp, top_category, txgroup_id);
1525
+ """
1526
+ )
1350
1527
 
1351
- must_sort_inbound_txgroup_dbid = TxGroup.get_dbid(name="Sort Me (Inbound)")
1352
- must_sort_outbound_txgroup_dbid = TxGroup.get_dbid(name="Sort Me (Outbound)")
1528
+
1529
+ def create_usdval_presum_revenue_matview() -> None:
1530
+ # This view is specifically for the Revenue Over Time dashboard.
1531
+ # It presums usd value for Revenue and Other Income categories only, pre-joining txgroups and top_category.
1532
+ db.execute(
1533
+ """
1534
+ DROP MATERIALIZED VIEW IF EXISTS usdvalue_presum_revenue;
1535
+ CREATE MATERIALIZED VIEW usdvalue_presum_revenue AS
1536
+ SELECT
1537
+ p.txgroup_id,
1538
+ t.name AS txgroup_name,
1539
+ p.top_category,
1540
+ p.timestamp,
1541
+ SUM(p.value_usd) AS value_usd
1542
+ FROM usdvalue_presum p
1543
+ JOIN txgroups t ON p.txgroup_id = t.txgroup_id
1544
+ WHERE p.top_category IN ('Revenue', 'Other Income')
1545
+ GROUP BY p.txgroup_id, t.name, p.top_category, p.timestamp;
1546
+
1547
+ -- Indexes
1548
+ CREATE UNIQUE INDEX idx_usdvalue_presum_revenue_txgroup_id_timestamp
1549
+ ON usdvalue_presum_revenue (txgroup_id, timestamp);
1550
+
1551
+ CREATE UNIQUE INDEX idx_usdvalue_presum_revenue_timestamp_txgroup_id
1552
+ ON usdvalue_presum_revenue (timestamp, txgroup_id);
1553
+
1554
+ CREATE INDEX idx_usdvalue_presum_revenue_txgroup_name_timestamp
1555
+ ON usdvalue_presum_revenue (txgroup_name, timestamp);
1556
+
1557
+ CREATE UNIQUE INDEX idx_usdvalue_presum_revenue_timestamp_txgroup_name
1558
+ ON usdvalue_presum_revenue (timestamp, txgroup_name);
1559
+
1560
+ CREATE UNIQUE INDEX idx_usdvalue_presum_revenue_top_category_txgroup_id_timestamp
1561
+ ON usdvalue_presum_revenue (top_category, txgroup_id, timestamp);
1562
+
1563
+ CREATE UNIQUE INDEX idx_usdvalue_presum_revenue_top_category_txgroup_name_timestamp
1564
+ ON usdvalue_presum_revenue (top_category, txgroup_name, timestamp);
1565
+ """
1566
+ )
1567
+
1568
+
1569
+ def create_usdval_presum_expenses_matview() -> None:
1570
+ # This view is specifically for the Expenses Over Time dashboard.
1571
+ # It presums usd value for Expenses, Cost of Revenue, and Other Expense categories only, pre-joining txgroups and top_category
1572
+ db.execute(
1573
+ """
1574
+ DROP MATERIALIZED VIEW IF EXISTS usdvalue_presum_expenses;
1575
+ CREATE MATERIALIZED VIEW usdvalue_presum_expenses AS
1576
+ SELECT
1577
+ p.txgroup_id,
1578
+ g.name AS txgroup_name,
1579
+ p.top_category,
1580
+ p.timestamp,
1581
+ SUM(p.value_usd) AS value_usd
1582
+ FROM usdvalue_presum p
1583
+ JOIN txgroup_hierarchy gh ON p.txgroup_id = gh.txgroup_id
1584
+ JOIN txgroups g ON p.txgroup_id = g.txgroup_id
1585
+ WHERE p.top_category IN ('Expenses', 'Cost of Revenue', 'Other Expense')
1586
+ GROUP BY p.txgroup_id, g.name, p.top_category, p.timestamp;
1587
+
1588
+ -- Indexes
1589
+ CREATE UNIQUE INDEX idx_usdvalue_presum_expenses_txgroup_id_timestamp
1590
+ ON usdvalue_presum_expenses (txgroup_id, timestamp);
1591
+
1592
+ CREATE UNIQUE INDEX idx_usdvalue_presum_expenses_timestamp_txgroup_id
1593
+ ON usdvalue_presum_expenses (timestamp, txgroup_id);
1594
+
1595
+ CREATE INDEX idx_usdvalue_presum_expenses_txgroup_name_timestamp
1596
+ ON usdvalue_presum_expenses (txgroup_name, timestamp);
1597
+
1598
+ CREATE UNIQUE INDEX idx_usdvalue_presum_expenses_timestamp_txgroup_name
1599
+ ON usdvalue_presum_expenses (timestamp, txgroup_name);
1600
+
1601
+ CREATE UNIQUE INDEX idx_usdvalue_presum_expenses_top_category_txgroup_id_timestamp
1602
+ ON usdvalue_presum_expenses (top_category, txgroup_id, timestamp);
1603
+
1604
+ CREATE UNIQUE INDEX idx_usdvalue_presum_expenses_top_category_txgroup_name_timestamp
1605
+ ON usdvalue_presum_expenses (top_category, txgroup_name, timestamp);
1606
+ """
1607
+ )
1353
1608
 
1354
1609
 
1355
1610
  @db_session
@@ -1445,6 +1700,3 @@ def _drop_shitcoin_txs() -> None:
1445
1700
  for tx in shitcoin_txs:
1446
1701
  tx.delete()
1447
1702
  logger.info("Shitcoin tx purge complete.")
1448
-
1449
-
1450
- _drop_shitcoin_txs()