dao-treasury 0.0.71__cp311-cp311-win_amd64.whl → 0.1.2__cp311-cp311-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dao-treasury might be problematic. Click here for more details.

Files changed (40) hide show
  1. dao_treasury/.grafana/provisioning/dashboards/breakdowns/Expenses.json +89 -73
  2. dao_treasury/.grafana/provisioning/dashboards/breakdowns/Revenue.json +86 -68
  3. dao_treasury/.grafana/provisioning/dashboards/streams/LlamaPay.json +5 -12
  4. dao_treasury/.grafana/provisioning/dashboards/summary/Monthly.json +5 -6
  5. dao_treasury/.grafana/provisioning/dashboards/transactions/Treasury Transactions.json +220 -195
  6. dao_treasury/.grafana/provisioning/dashboards/transactions/Unsorted Transactions.json +367 -0
  7. dao_treasury/.grafana/provisioning/dashboards/treasury/Cashflow (Including Unsorted).json +61 -71
  8. dao_treasury/.grafana/provisioning/dashboards/treasury/Cashflow.json +43 -50
  9. dao_treasury/.grafana/provisioning/dashboards/treasury/Current Treasury Assets.json +50 -90
  10. dao_treasury/.grafana/provisioning/dashboards/treasury/Historical Treasury Balances.json +1403 -532
  11. dao_treasury/.grafana/provisioning/dashboards/treasury/Operating Cashflow.json +31 -36
  12. dao_treasury/.grafana/provisioning/datasources/datasources.yaml +9 -4
  13. dao_treasury/__init__.py +0 -4
  14. dao_treasury/_docker.cp311-win_amd64.pyd +0 -0
  15. dao_treasury/_docker.py +6 -2
  16. dao_treasury/_nicknames.cp311-win_amd64.pyd +0 -0
  17. dao_treasury/_nicknames.py +3 -2
  18. dao_treasury/_wallet.cp311-win_amd64.pyd +0 -0
  19. dao_treasury/constants.cp311-win_amd64.pyd +0 -0
  20. dao_treasury/db.py +353 -120
  21. dao_treasury/docker-compose.yaml +18 -2
  22. dao_treasury/main.py +5 -2
  23. dao_treasury/sorting/__init__.cp311-win_amd64.pyd +0 -0
  24. dao_treasury/sorting/__init__.py +38 -21
  25. dao_treasury/sorting/_matchers.cp311-win_amd64.pyd +0 -0
  26. dao_treasury/sorting/_rules.cp311-win_amd64.pyd +0 -0
  27. dao_treasury/sorting/factory.cp311-win_amd64.pyd +0 -0
  28. dao_treasury/sorting/rule.cp311-win_amd64.pyd +0 -0
  29. dao_treasury/sorting/rules/__init__.cp311-win_amd64.pyd +0 -0
  30. dao_treasury/sorting/rules/ignore/__init__.cp311-win_amd64.pyd +0 -0
  31. dao_treasury/sorting/rules/ignore/llamapay.cp311-win_amd64.pyd +0 -0
  32. dao_treasury/types.cp311-win_amd64.pyd +0 -0
  33. {dao_treasury-0.0.71.dist-info → dao_treasury-0.1.2.dist-info}/METADATA +3 -2
  34. dao_treasury-0.1.2.dist-info/RECORD +53 -0
  35. dao_treasury__mypyc.cp311-win_amd64.pyd +0 -0
  36. dao_treasury/streams/__init__.cp311-win_amd64.pyd +0 -0
  37. dao_treasury/streams/llamapay.cp311-win_amd64.pyd +0 -0
  38. dao_treasury-0.0.71.dist-info/RECORD +0 -54
  39. {dao_treasury-0.0.71.dist-info → dao_treasury-0.1.2.dist-info}/WHEEL +0 -0
  40. {dao_treasury-0.0.71.dist-info → dao_treasury-0.1.2.dist-info}/top_level.txt +0 -0
dao_treasury/db.py CHANGED
@@ -16,27 +16,28 @@ resolving integrity conflicts, caching transaction receipts,
16
16
  and creating SQL views for reporting.
17
17
  """
18
18
 
19
+ import os
19
20
  import typing
20
- from asyncio import Semaphore
21
+ from asyncio import Lock, Semaphore
21
22
  from collections import OrderedDict
23
+ from datetime import date, datetime, time, timezone
22
24
  from decimal import Decimal, InvalidOperation
23
25
  from functools import lru_cache
24
26
  from logging import getLogger
25
- from os import path
26
- from pathlib import Path
27
27
  from typing import (
28
28
  TYPE_CHECKING,
29
29
  Any,
30
+ Callable,
30
31
  Coroutine,
31
32
  Dict,
32
33
  Final,
33
34
  Literal,
34
35
  Tuple,
36
+ TypeVar,
35
37
  Union,
36
38
  final,
37
39
  overload,
38
40
  )
39
- from datetime import date, datetime, time, timezone
40
41
 
41
42
  import eth_portfolio
42
43
  from a_sync import AsyncThreadPoolExecutor
@@ -65,8 +66,10 @@ from pony.orm import (
65
66
  composite_key,
66
67
  composite_index,
67
68
  db_session,
69
+ rollback,
68
70
  select,
69
71
  )
72
+ from typing_extensions import ParamSpec
70
73
  from y import EEE_ADDRESS, Contract, Network, convert, get_block_timestamp_async
71
74
  from y._db.decorators import retry_locked
72
75
  from y.contracts import _get_code
@@ -76,14 +79,18 @@ from dao_treasury.constants import CHAINID
76
79
  from dao_treasury.types import TxGroupDbid, TxGroupName
77
80
 
78
81
 
79
- EventItem = _EventItem[_EventItem[OrderedDict[str, Any]]]
80
-
82
+ _T = TypeVar("_T")
83
+ _P = ParamSpec("_P")
81
84
 
82
- SQLITE_DIR = Path(path.expanduser("~")) / ".dao-treasury"
83
- """Path to the directory in the user's home where the DAO treasury SQLite database is stored."""
85
+ EventItem = _EventItem[_EventItem[OrderedDict[str, Any]]]
84
86
 
85
- SQLITE_DIR.mkdir(parents=True, exist_ok=True)
86
87
 
88
+ # Postgres connection parameters from environment variables (with docker-compose defaults)
89
+ POSTGRES_USER = os.getenv("DAO_TREASURY_DB_USER", "dao_treasury")
90
+ POSTGRES_PASSWORD = os.getenv("DAO_TREASURY_DB_PASSWORD", "dao_treasury")
91
+ POSTGRES_DB = os.getenv("DAO_TREASURY_DB_NAME", "dao_treasury")
92
+ POSTGRES_HOST = os.getenv("DAO_TREASURY_DB_HOST", "127.0.0.1")
93
+ POSTGRES_PORT = int(os.getenv("DAO_TREASURY_DB_PORT", "8675"))
87
94
 
88
95
  _INSERT_THREAD = AsyncThreadPoolExecutor(1)
89
96
  _SORT_THREAD = AsyncThreadPoolExecutor(1)
@@ -94,6 +101,12 @@ _UTC = timezone.utc
94
101
 
95
102
  db = Database()
96
103
 
104
+ db_ready: bool = False
105
+ startup_lock: Final = Lock()
106
+
107
+ must_sort_inbound_txgroup_dbid: TxGroupDbid = None
108
+ must_sort_outbound_txgroup_dbid: TxGroupDbid = None
109
+
97
110
  logger = getLogger("dao_treasury.db")
98
111
 
99
112
 
@@ -210,7 +223,7 @@ class Address(DbEntity):
210
223
  address = Required(str, index=True)
211
224
  """Checksum string of the on-chain address."""
212
225
 
213
- nickname = Optional(str)
226
+ nickname = Optional(str, index=True)
214
227
  """Optional human-readable label (e.g., contract name or token name)."""
215
228
 
216
229
  is_contract = Required(bool, index=True, lazy=True)
@@ -396,7 +409,7 @@ class Token(DbEntity):
396
409
  symbol = Required(str, index=True, lazy=True)
397
410
  """Short ticker symbol for the token."""
398
411
 
399
- name = Required(str, lazy=True)
412
+ name = Required(str, lazy=True, index=True)
400
413
  """Full human-readable name of the token."""
401
414
 
402
415
  decimals = Required(int, lazy=True)
@@ -409,12 +422,15 @@ class Token(DbEntity):
409
422
  """Inverse relation for treasury transactions involving this token."""
410
423
  # partner_harvest_event = Set('PartnerHarvestEvent', reverse="vault", lazy=True)
411
424
 
412
- address = Required(Address, column="address_id")
425
+ address = Required(Address, column="address_id", unique=True)
413
426
  """Foreign key to the address record for this token contract."""
414
427
 
415
428
  streams = Set("Stream", reverse="token", lazy=True)
416
429
  # vesting_escrows = Set("VestingEscrow", reverse="token", lazy=True)
417
430
 
431
+ composite_index(chain, name)
432
+ composite_index(chain, symbol)
433
+
418
434
  def __eq__(self, other: Union["Token", Address, ChecksumAddress]) -> bool: # type: ignore [override]
419
435
  if isinstance(other, str):
420
436
  return self.address == other
@@ -557,13 +573,13 @@ class TxGroup(DbEntity):
557
573
  txgroup_id = PrimaryKey(int, auto=True)
558
574
  """Auto-incremented primary key for transaction groups."""
559
575
 
560
- name = Required(str)
576
+ name = Required(str, index=True)
561
577
  """Name of the grouping category, e.g., 'Revenue', 'Expenses'."""
562
578
 
563
579
  treasury_tx = Set("TreasuryTx", reverse="txgroup", lazy=True)
564
580
  """Inverse relation for treasury transactions assigned to this group."""
565
581
 
566
- parent_txgroup = Optional("TxGroup", reverse="child_txgroups")
582
+ parent_txgroup = Optional("TxGroup", reverse="child_txgroups", index=True)
567
583
  """Optional reference to a parent group for nesting."""
568
584
 
569
585
  composite_key(name, parent_txgroup)
@@ -647,6 +663,8 @@ class TxGroup(DbEntity):
647
663
  if txgroup := TxGroup.get(name=name, parent_txgroup=parent):
648
664
  return txgroup # type: ignore [no-any-return]
649
665
  raise Exception(e, name, parent) from e
666
+ else:
667
+ db.execute("REFRESH MATERIALIZED VIEW txgroup_hierarchy;")
650
668
  return txgroup # type: ignore [no-any-return]
651
669
 
652
670
 
@@ -733,6 +751,25 @@ class TreasuryTx(DbEntity):
733
751
  """Foreign key to the categorization group."""
734
752
 
735
753
  composite_index(chain, txgroup)
754
+ composite_index(chain, token)
755
+ composite_index(chain, from_address)
756
+ composite_index(chain, to_address)
757
+ composite_index(chain, from_address, to_address)
758
+ composite_index(timestamp, txgroup)
759
+ composite_index(timestamp, token)
760
+ composite_index(timestamp, from_address)
761
+ composite_index(timestamp, to_address)
762
+ composite_index(timestamp, from_address, to_address)
763
+ composite_index(timestamp, chain, txgroup)
764
+ composite_index(timestamp, chain, token)
765
+ composite_index(timestamp, chain, from_address)
766
+ composite_index(timestamp, chain, to_address)
767
+ composite_index(timestamp, chain, from_address, to_address)
768
+ composite_index(chain, timestamp, txgroup)
769
+ composite_index(chain, timestamp, token)
770
+ composite_index(chain, timestamp, from_address)
771
+ composite_index(chain, timestamp, to_address)
772
+ composite_index(chain, timestamp, from_address, to_address)
736
773
 
737
774
  @property
738
775
  def to_nickname(self) -> typing.Optional[str]:
@@ -964,6 +1001,10 @@ class TreasuryTx(DbEntity):
964
1001
  must_sort_inbound_txgroup_dbid,
965
1002
  must_sort_outbound_txgroup_dbid,
966
1003
  ):
1004
+ with db_session:
1005
+ db.execute("REFRESH MATERIALIZED VIEW usdvalue_presum;")
1006
+ db.execute("REFRESH MATERIALIZED VIEW usdvalue_presum_revenue;")
1007
+ db.execute("REFRESH MATERIALIZED VIEW usdvalue_presum_expenses;")
967
1008
  logger.info(
968
1009
  "Sorted %s to %s", entry, TxGroup.get_fullname(txgroup_dbid)
969
1010
  )
@@ -976,25 +1017,42 @@ class TreasuryTx(DbEntity):
976
1017
  with db_session:
977
1018
  TreasuryTx[treasury_tx_dbid].txgroup = txgroup_dbid
978
1019
  commit()
1020
+ db.execute("REFRESH MATERIALIZED VIEW usdvalue_presum;")
1021
+ db.execute("REFRESH MATERIALIZED VIEW usdvalue_presum_revenue;")
1022
+ db.execute("REFRESH MATERIALIZED VIEW usdvalue_presum_expenses;")
979
1023
 
980
1024
 
981
1025
  _stream_metadata_cache: Final[Dict[HexStr, Tuple[ChecksumAddress, date]]] = {}
982
1026
 
983
1027
 
1028
+ def refresh_matview(name: str) -> Callable[[Callable[_P, _T]], Callable[_P, _T]]:
1029
+ def matview_deco(fn: Callable[_P, _T]) -> Callable[_P, _T]:
1030
+ def matview_refresh_wrap(*args: _P.args, **kwargs: _P.kwargs) -> _T:
1031
+ retval = fn(*args, **kwargs)
1032
+ commit()
1033
+ db.execute(f"REFRESH MATERIALIZED VIEW {name};")
1034
+ commit()
1035
+ return retval
1036
+
1037
+ return matview_refresh_wrap
1038
+
1039
+ return matview_deco
1040
+
1041
+
984
1042
  class Stream(DbEntity):
985
1043
  _table_ = "streams"
986
1044
  stream_id = PrimaryKey(str)
987
1045
 
988
- contract = Required("Address", reverse="streams")
989
- start_block = Required(int)
990
- end_block = Optional(int)
1046
+ contract = Required("Address", reverse="streams", index=True)
1047
+ start_block = Required(int, index=True)
1048
+ end_block = Optional(int, index=True)
991
1049
  token = Required("Token", reverse="streams", index=True)
992
- from_address = Required("Address", reverse="streams_from")
993
- to_address = Required("Address", reverse="streams_to")
994
- reason = Optional(str)
1050
+ from_address = Required("Address", reverse="streams_from", index=True)
1051
+ to_address = Required("Address", reverse="streams_to", index=True)
1052
+ reason = Optional(str, index=True)
995
1053
  amount_per_second = Required(Decimal, 38, 1)
996
- status = Required(str, default="Active")
997
- txgroup = Optional("TxGroup", reverse="streams")
1054
+ status = Required(str, default="Active", index=True)
1055
+ txgroup = Optional("TxGroup", reverse="streams", index=True)
998
1056
 
999
1057
  streamed_funds = Set("StreamedFunds", lazy=True)
1000
1058
 
@@ -1036,10 +1094,12 @@ class Stream(DbEntity):
1036
1094
  end = datetime.fromtimestamp(chain[stream.end_block].timestamp, tz=_UTC)
1037
1095
  return start, end
1038
1096
 
1097
+ @refresh_matview("stream_ledger")
1039
1098
  def stop_stream(self, block: int) -> None:
1040
1099
  self.end_block = block
1041
1100
  self.status = "Stopped"
1042
1101
 
1102
+ @refresh_matview("stream_ledger")
1043
1103
  def pause(self) -> None:
1044
1104
  self.status = "Paused"
1045
1105
 
@@ -1099,6 +1159,7 @@ class StreamedFunds(DbEntity):
1099
1159
 
1100
1160
  @classmethod
1101
1161
  @db_session
1162
+ @refresh_matview("stream_ledger")
1102
1163
  def create_entity(
1103
1164
  cls,
1104
1165
  stream_id: str,
@@ -1123,23 +1184,54 @@ class StreamedFunds(DbEntity):
1123
1184
  return entity
1124
1185
 
1125
1186
 
1126
- db.bind(
1127
- provider="sqlite", # TODO: let user choose postgres with server connection params
1128
- filename=str(SQLITE_DIR / "dao-treasury.sqlite"),
1129
- create_db=True,
1130
- )
1187
+ def init_db() -> None:
1188
+ """Initialize the database if not yet initialized."""
1189
+ global db_ready
1190
+ if db_ready:
1191
+ return
1131
1192
 
1132
- db.generate_mapping(create_tables=True)
1193
+ db.bind(
1194
+ provider="postgres",
1195
+ user=POSTGRES_USER,
1196
+ password=POSTGRES_PASSWORD,
1197
+ host=POSTGRES_HOST,
1198
+ port=POSTGRES_PORT,
1199
+ database=POSTGRES_DB,
1200
+ )
1201
+
1202
+ db.generate_mapping(create_tables=True)
1203
+
1204
+ with db_session:
1205
+ create_stream_ledger_matview()
1206
+ create_txgroup_hierarchy_matview()
1207
+ # create_vesting_ledger_view()
1208
+ create_general_ledger_view()
1209
+ create_unsorted_txs_view()
1210
+ create_usdval_presum_matview()
1211
+ create_usdval_presum_revenue_matview()
1212
+ create_usdval_presum_expenses_matview()
1213
+ # create_monthly_pnl_view()
1214
+
1215
+ global must_sort_inbound_txgroup_dbid
1216
+ must_sort_inbound_txgroup_dbid = TxGroup.get_dbid(name="Sort Me (Inbound)")
1217
+
1218
+ global must_sort_outbound_txgroup_dbid
1219
+ must_sort_outbound_txgroup_dbid = TxGroup.get_dbid(name="Sort Me (Outbound)")
1220
+
1221
+ _drop_shitcoin_txs()
1133
1222
 
1223
+ db_ready = True
1134
1224
 
1135
- def _set_address_nicknames_for_tokens() -> None:
1225
+
1226
+ def set_address_nicknames_for_tokens() -> None:
1136
1227
  """Set address.nickname for addresses belonging to tokens."""
1228
+ init_db()
1137
1229
  for address in select(a for a in Address if a.token and not a.nickname):
1138
1230
  address.nickname = f"Token: {address.token.name}"
1139
1231
  db.commit()
1140
1232
 
1141
1233
 
1142
- def create_stream_ledger_view() -> None:
1234
+ def create_stream_ledger_matview() -> None:
1143
1235
  """Create or replace the SQL view `stream_ledger` for streamed funds reporting.
1144
1236
 
1145
1237
  This view joins streamed funds, streams, tokens, addresses, and txgroups
@@ -1148,60 +1240,90 @@ def create_stream_ledger_view() -> None:
1148
1240
  Examples:
1149
1241
  >>> create_stream_ledger_view()
1150
1242
  """
1151
- db.execute("""DROP VIEW IF EXISTS stream_ledger;""")
1152
- db.execute(
1153
- """
1154
- create view stream_ledger as
1155
- SELECT 'Mainnet' as chain_name,
1156
- cast(strftime('%s', date || ' 00:00:00') as INTEGER) as timestamp,
1157
- NULL as block,
1158
- NULL as hash,
1159
- NULL as log_index,
1160
- symbol as token,
1161
- d.address AS "from",
1162
- d.nickname as from_nickname,
1163
- e.address AS "to",
1164
- e.nickname as to_nickname,
1165
- amount,
1166
- price,
1167
- value_usd,
1168
- txgroup.name as txgroup,
1169
- parent.name as parent_txgroup,
1243
+ try:
1244
+ db.execute(
1245
+ """
1246
+ DROP MATERIALIZED VIEW IF EXISTS stream_ledger CASCADE;
1247
+ CREATE MATERIALIZED VIEW stream_ledger AS
1248
+ SELECT
1249
+ 'Mainnet' as chain_name,
1250
+ EXTRACT(EPOCH FROM (date::date))::integer as timestamp,
1251
+ CAST(NULL as integer) as block,
1252
+ NULL as hash,
1253
+ CAST(NULL as integer) as log_index,
1254
+ symbol as token,
1255
+ d.address AS "from",
1256
+ d.nickname as from_nickname,
1257
+ e.address AS "to",
1258
+ e.nickname as to_nickname,
1259
+ amount,
1260
+ price,
1261
+ value_usd,
1262
+ txgroup.name as txgroup,
1263
+ parent.name as parent_txgroup,
1170
1264
  txgroup.txgroup_id
1171
- FROM streamed_funds a
1172
- LEFT JOIN streams b ON a.stream = b.stream_id
1173
- LEFT JOIN tokens c ON b.token = c.token_id
1174
- LEFT JOIN addresses d ON b.from_address = d.address_id
1175
- LEFT JOIN addresses e ON b.to_address = e.address_id
1176
- LEFT JOIN txgroups txgroup ON b.txgroup = txgroup.txgroup_id
1177
- LEFT JOIN txgroups parent ON txgroup.parent_txgroup = parent.txgroup_id
1178
- """
1179
- )
1265
+ FROM streamed_funds a
1266
+ LEFT JOIN streams b ON a.stream = b.stream_id
1267
+ LEFT JOIN tokens c ON b.token = c.token_id
1268
+ LEFT JOIN addresses d ON b.from_address = d.address_id
1269
+ LEFT JOIN addresses e ON b.to_address = e.address_id
1270
+ LEFT JOIN txgroups txgroup ON b.txgroup = txgroup.txgroup_id
1271
+ LEFT JOIN txgroups parent ON txgroup.parent_txgroup = parent.txgroup_id;
1272
+
1273
+ """
1274
+ )
1275
+ except Exception as e:
1276
+ if '"stream_ledger" is not a materialized view' not in str(e):
1277
+ raise
1278
+ # we're running an old schema, lets migrate it
1279
+ rollback()
1280
+ db.execute("DROP VIEW IF EXISTS stream_ledger CASCADE;")
1281
+ commit()
1282
+ create_stream_ledger_matview()
1180
1283
 
1181
1284
 
1182
- def create_txgroup_hierarchy_view() -> None:
1285
+ def create_txgroup_hierarchy_matview() -> None:
1183
1286
  """Create or replace the SQL view `txgroup_hierarchy` for recursive txgroup hierarchy.
1184
1287
 
1185
1288
  This view exposes txgroup_id, top_category, and parent_txgroup for all txgroups,
1186
1289
  matching the recursive CTE logic used in dashboards.
1187
1290
  """
1188
- db.execute("DROP VIEW IF EXISTS txgroup_hierarchy;")
1189
- db.execute(
1190
- """
1191
- CREATE VIEW txgroup_hierarchy AS
1192
- WITH RECURSIVE group_hierarchy (txgroup_id, top_category, parent_txgroup) AS (
1193
- SELECT txgroup_id, name AS top_category, parent_txgroup
1194
- FROM txgroups
1195
- WHERE parent_txgroup IS NULL
1196
- UNION ALL
1197
- SELECT child.txgroup_id, parent.top_category, child.parent_txgroup
1198
- FROM txgroups AS child
1199
- JOIN group_hierarchy AS parent
1200
- ON child.parent_txgroup = parent.txgroup_id
1291
+ try:
1292
+ db.execute(
1293
+ """
1294
+ DROP MATERIALIZED VIEW IF EXISTS txgroup_hierarchy CASCADE;
1295
+ CREATE MATERIALIZED VIEW txgroup_hierarchy AS
1296
+ WITH RECURSIVE group_hierarchy (txgroup_id, top_category, parent_txgroup) AS (
1297
+ SELECT txgroup_id, name AS top_category, parent_txgroup
1298
+ FROM txgroups
1299
+ WHERE parent_txgroup IS NULL
1300
+ UNION ALL
1301
+ SELECT child.txgroup_id, parent.top_category, child.parent_txgroup
1302
+ FROM txgroups AS child
1303
+ JOIN group_hierarchy AS parent
1304
+ ON child.parent_txgroup = parent.txgroup_id
1305
+ )
1306
+ SELECT * FROM group_hierarchy;
1307
+
1308
+ -- Indexes
1309
+ CREATE UNIQUE INDEX idx_txgroup_hierarchy_txgroup_id
1310
+ ON txgroup_hierarchy (txgroup_id);
1311
+
1312
+ CREATE INDEX idx_txgroup_hierarchy_top_category
1313
+ ON txgroup_hierarchy (top_category);
1314
+
1315
+ CREATE INDEX idx_txgroup_hierarchy_parent
1316
+ ON txgroup_hierarchy (parent_txgroup);
1317
+ """
1201
1318
  )
1202
- SELECT * FROM group_hierarchy;
1203
- """
1204
- )
1319
+ except Exception as e:
1320
+ if '"txgroup_hierarchy" is not a materialized view' not in str(e):
1321
+ raise
1322
+ # we're running an old schema, lets migrate it
1323
+ rollback()
1324
+ db.execute("DROP VIEW IF EXISTS txgroup_hierarchy CASCADE;")
1325
+ commit()
1326
+ create_txgroup_hierarchy_matview()
1205
1327
 
1206
1328
 
1207
1329
  def create_vesting_ledger_view() -> None:
@@ -1217,11 +1339,12 @@ def create_vesting_ledger_view() -> None:
1217
1339
  """
1218
1340
  DROP VIEW IF EXISTS vesting_ledger;
1219
1341
  CREATE VIEW vesting_ledger AS
1220
- SELECT d.chain_name,
1221
- CAST(date AS timestamp) AS "timestamp",
1222
- cast(NULL as int) AS block,
1342
+ SELECT
1343
+ d.chain_name,
1344
+ date::timestamp AS "timestamp",
1345
+ CAST(NULL as integer) AS block,
1223
1346
  NULL AS "hash",
1224
- cast(NULL as int) AS "log_index",
1347
+ CAST(NULL as integer) AS "log_index",
1225
1348
  c.symbol AS "token",
1226
1349
  e.address AS "from",
1227
1350
  e.nickname as from_nickname,
@@ -1233,14 +1356,14 @@ def create_vesting_ledger_view() -> None:
1233
1356
  g.name as txgroup,
1234
1357
  h.name AS parent_txgroup,
1235
1358
  g.txgroup_id
1236
- FROM vested_funds a
1359
+ FROM vested_funds a
1237
1360
  LEFT JOIN vesting_escrows b ON a.escrow = b.escrow_id
1238
1361
  LEFT JOIN tokens c ON b.token = c.token_id
1239
1362
  LEFT JOIN chains d ON c.chain = d.chain_dbid
1240
1363
  LEFT JOIN addresses e ON b.address = e.address_id
1241
1364
  LEFT JOIN addresses f ON b.recipient = f.address_id
1242
1365
  LEFT JOIN txgroups g ON b.txgroup = g.txgroup_id
1243
- left JOIN txgroups h ON g.parent_txgroup = h.txgroup_id
1366
+ LEFT JOIN txgroups h ON g.parent_txgroup = h.txgroup_id;
1244
1367
  """
1245
1368
  )
1246
1369
 
@@ -1253,13 +1376,17 @@ def create_general_ledger_view() -> None:
1253
1376
  Examples:
1254
1377
  >>> create_general_ledger_view()
1255
1378
  """
1256
- db.execute("drop VIEW IF EXISTS general_ledger")
1257
1379
  db.execute(
1258
1380
  """
1259
- create VIEW general_ledger as
1260
- select *
1261
- from (
1262
- SELECT treasury_tx_id, b.chain_name, a.timestamp, a.block, a.hash, a.log_index, c.symbol AS token, d.address AS "from", d.nickname as from_nickname, e.address AS "to", e.nickname as to_nickname, a.amount, a.price, a.value_usd, f.name AS txgroup, g.name AS parent_txgroup, f.txgroup_id
1381
+ DROP VIEW IF EXISTS general_ledger;
1382
+ CREATE VIEW general_ledger AS
1383
+ SELECT *
1384
+ FROM (
1385
+ SELECT
1386
+ treasury_tx_id, b.chain_name, a.timestamp, a.block, a.hash, a.log_index,
1387
+ c.symbol AS token, d.address AS "from", d.nickname as from_nickname,
1388
+ e.address AS "to", e.nickname as to_nickname, a.amount, a.price, a.value_usd,
1389
+ f.name AS txgroup, g.name AS parent_txgroup, f.txgroup_id
1263
1390
  FROM treasury_txs a
1264
1391
  LEFT JOIN chains b ON a.chain = b.chain_dbid
1265
1392
  LEFT JOIN tokens c ON a.token_id = c.token_id
@@ -1268,13 +1395,15 @@ def create_general_ledger_view() -> None:
1268
1395
  LEFT JOIN txgroups f ON a.txgroup_id = f.txgroup_id
1269
1396
  LEFT JOIN txgroups g ON f.parent_txgroup = g.txgroup_id
1270
1397
  UNION
1271
- SELECT -1, chain_name, timestamp, block, hash, log_index, token, "from", from_nickname, "to", to_nickname, amount, price, value_usd, txgroup, parent_txgroup, txgroup_id
1398
+ SELECT
1399
+ -1, chain_name, timestamp, block, hash, log_index, token, "from", from_nickname,
1400
+ "to", to_nickname, amount, price, value_usd, txgroup, parent_txgroup, txgroup_id
1272
1401
  FROM stream_ledger
1273
1402
  --UNION
1274
1403
  --SELECT -1, *
1275
1404
  --FROM vesting_ledger
1276
1405
  ) a
1277
- ORDER BY timestamp
1406
+ ORDER BY timestamp;
1278
1407
  """
1279
1408
  )
1280
1409
 
@@ -1287,14 +1416,14 @@ def create_unsorted_txs_view() -> None:
1287
1416
  Examples:
1288
1417
  >>> create_unsorted_txs_view()
1289
1418
  """
1290
- db.execute("DROP VIEW IF EXISTS unsorted_txs;")
1291
1419
  db.execute(
1292
1420
  """
1293
- CREATE VIEW unsorted_txs as
1421
+ DROP VIEW IF EXISTS unsorted_txs;
1422
+ CREATE VIEW unsorted_txs AS
1294
1423
  SELECT *
1295
1424
  FROM general_ledger
1296
1425
  WHERE txgroup = 'Categorization Pending'
1297
- ORDER BY TIMESTAMP desc
1426
+ ORDER BY timestamp DESC;
1298
1427
  """
1299
1428
  )
1300
1429
 
@@ -1307,53 +1436,160 @@ def create_monthly_pnl_view() -> None:
1307
1436
  Examples:
1308
1437
  >>> create_monthly_pnl_view()
1309
1438
  """
1310
- db.execute("DROP VIEW IF EXISTS monthly_pnl;")
1311
1439
  sql = """
1440
+ DROP VIEW IF EXISTS monthly_pnl;
1312
1441
  CREATE VIEW monthly_pnl AS
1313
1442
  WITH categorized AS (
1314
- SELECT
1315
- strftime('%Y-%m', datetime(t.timestamp, 'unixepoch')) AS month,
1443
+ SELECT
1444
+ to_char(to_timestamp(t.timestamp), 'YYYY-MM') AS month,
1316
1445
  CASE
1317
- WHEN p.name IS NOT NULL THEN p.name
1318
- ELSE tg.name
1446
+ WHEN p.name IS NOT NULL THEN p.name
1447
+ ELSE tg.name
1319
1448
  END AS top_category,
1320
1449
  --COALESCE(t.value_usd, 0) AS value_usd,
1321
1450
  --COALESCE(t.gas_used, 0) * COALESCE(t.gas_price, 0) AS gas_cost
1322
- FROM treasury_txs t
1323
- JOIN txgroups tg ON t.txgroup = tg.txgroup_id
1324
- LEFT JOIN txgroups p ON tg.parent_txgroup = p.txgroup_id
1325
- WHERE tg.name <> 'Ignore'
1451
+ FROM treasury_txs t
1452
+ JOIN txgroups tg ON t.txgroup = tg.txgroup_id
1453
+ LEFT JOIN txgroups p ON tg.parent_txgroup = p.txgroup_id
1454
+ WHERE tg.name <> 'Ignore'
1326
1455
  )
1327
1456
  SELECT
1328
- month,
1329
- SUM(CASE WHEN top_category = 'Revenue' THEN value_usd ELSE 0 END) AS revenue,
1330
- SUM(CASE WHEN top_category = 'Cost of Revenue' THEN value_usd ELSE 0 END) AS cost_of_revenue,
1331
- SUM(CASE WHEN top_category = 'Expenses' THEN value_usd ELSE 0 END) AS expenses,
1332
- SUM(CASE WHEN top_category = 'Other Income' THEN value_usd ELSE 0 END) AS other_income,
1333
- SUM(CASE WHEN top_category = 'Other Expenses' THEN value_usd ELSE 0 END) AS other_expense,
1334
- (
1457
+ month,
1458
+ SUM(CASE WHEN top_category = 'Revenue' THEN value_usd ELSE 0 END) AS revenue,
1459
+ SUM(CASE WHEN top_category = 'Cost of Revenue' THEN value_usd ELSE 0 END) AS cost_of_revenue,
1460
+ SUM(CASE WHEN top_category = 'Expenses' THEN value_usd ELSE 0 END) AS expenses,
1461
+ SUM(CASE WHEN top_category = 'Other Income' THEN value_usd ELSE 0 END) AS other_income,
1462
+ SUM(CASE WHEN top_category = 'Other Expenses' THEN value_usd ELSE 0 END) AS other_expense,
1463
+ (
1335
1464
  SUM(CASE WHEN top_category = 'Revenue' THEN value_usd ELSE 0 END) -
1336
1465
  SUM(CASE WHEN top_category = 'Cost of Revenue' THEN value_usd ELSE 0 END) -
1337
1466
  SUM(CASE WHEN top_category = 'Expenses' THEN value_usd ELSE 0 END) +
1338
1467
  SUM(CASE WHEN top_category = 'Other Income' THEN value_usd ELSE 0 END) -
1339
1468
  SUM(CASE WHEN top_category = 'Other Expenses' THEN value_usd ELSE 0 END)
1340
- ) AS net_profit
1469
+ ) AS net_profit
1341
1470
  FROM categorized
1342
1471
  GROUP BY month;
1343
1472
  """
1344
1473
  db.execute(sql)
1345
1474
 
1346
1475
 
1347
- with db_session:
1348
- create_stream_ledger_view()
1349
- create_txgroup_hierarchy_view()
1350
- # create_vesting_ledger_view()
1351
- create_general_ledger_view()
1352
- create_unsorted_txs_view()
1353
- # create_monthly_pnl_view()
1476
+ def create_usdval_presum_matview() -> None:
1477
+ # This view presums usd value from the general_ledger view,
1478
+ # grouped by timestamp and txgroup
1479
+ db.execute(
1480
+ """
1481
+ DROP MATERIALIZED VIEW IF EXISTS usdvalue_presum;
1482
+ CREATE MATERIALIZED VIEW usdvalue_presum AS
1483
+ SELECT
1484
+ gl.txgroup_id,
1485
+ gh.top_category,
1486
+ gl.timestamp,
1487
+ SUM(value_usd) AS value_usd
1488
+ FROM general_ledger gl
1489
+ JOIN txgroup_hierarchy gh USING (txgroup_id)
1490
+ GROUP BY gl.txgroup_id, gh.top_category, gl.timestamp;
1491
+
1492
+ -- Indexes
1493
+ CREATE UNIQUE INDEX idx_usdvalue_presum_txgroup_id_timestamp
1494
+ ON usdvalue_presum (txgroup_id, timestamp);
1495
+
1496
+ CREATE UNIQUE INDEX idx_usdvalue_presum_timestamp_txgroup_id
1497
+ ON usdvalue_presum (timestamp, txgroup_id);
1498
+
1499
+ CREATE INDEX idx_usdvalue_presum_top_category_timestamp
1500
+ ON usdvalue_presum (top_category, timestamp);
1501
+
1502
+ CREATE INDEX idx_usdvalue_presum_timestamp_top_category
1503
+ ON usdvalue_presum (timestamp, top_category);
1504
+
1505
+ CREATE UNIQUE INDEX idx_usdvalue_presum_top_category_txgroup_id_timestamp
1506
+ ON usdvalue_presum (top_category, txgroup_id, timestamp);
1507
+
1508
+ CREATE UNIQUE INDEX idx_usdvalue_presum_timestamp_top_category_txgroup_id
1509
+ ON usdvalue_presum (timestamp, top_category, txgroup_id);
1510
+ """
1511
+ )
1354
1512
 
1355
- must_sort_inbound_txgroup_dbid = TxGroup.get_dbid(name="Sort Me (Inbound)")
1356
- must_sort_outbound_txgroup_dbid = TxGroup.get_dbid(name="Sort Me (Outbound)")
1513
+
1514
+ def create_usdval_presum_revenue_matview() -> None:
1515
+ # This view is specifically for the Revenue Over Time dashboard.
1516
+ # It presums usd value for Revenue and Other Income categories only, pre-joining txgroups and top_category.
1517
+ db.execute(
1518
+ """
1519
+ DROP MATERIALIZED VIEW IF EXISTS usdvalue_presum_revenue;
1520
+ CREATE MATERIALIZED VIEW usdvalue_presum_revenue AS
1521
+ SELECT
1522
+ p.txgroup_id,
1523
+ t.name AS txgroup_name,
1524
+ p.top_category,
1525
+ p.timestamp,
1526
+ SUM(p.value_usd) AS value_usd
1527
+ FROM usdvalue_presum p
1528
+ JOIN txgroups t ON p.txgroup_id = t.txgroup_id
1529
+ WHERE p.top_category IN ('Revenue', 'Other Income')
1530
+ GROUP BY p.txgroup_id, t.name, p.top_category, p.timestamp;
1531
+
1532
+ -- Indexes
1533
+ CREATE UNIQUE INDEX idx_usdvalue_presum_revenue_txgroup_id_timestamp
1534
+ ON usdvalue_presum_revenue (txgroup_id, timestamp);
1535
+
1536
+ CREATE UNIQUE INDEX idx_usdvalue_presum_revenue_timestamp_txgroup_id
1537
+ ON usdvalue_presum_revenue (timestamp, txgroup_id);
1538
+
1539
+ CREATE INDEX idx_usdvalue_presum_revenue_txgroup_name_timestamp
1540
+ ON usdvalue_presum_revenue (txgroup_name, timestamp);
1541
+
1542
+ CREATE UNIQUE INDEX idx_usdvalue_presum_revenue_timestamp_txgroup_name
1543
+ ON usdvalue_presum_revenue (timestamp, txgroup_name);
1544
+
1545
+ CREATE UNIQUE INDEX idx_usdvalue_presum_revenue_top_category_txgroup_id_timestamp
1546
+ ON usdvalue_presum_revenue (top_category, txgroup_id, timestamp);
1547
+
1548
+ CREATE UNIQUE INDEX idx_usdvalue_presum_revenue_top_category_txgroup_name_timestamp
1549
+ ON usdvalue_presum_revenue (top_category, txgroup_name, timestamp);
1550
+ """
1551
+ )
1552
+
1553
+
1554
+ def create_usdval_presum_expenses_matview() -> None:
1555
+ # This view is specifically for the Expenses Over Time dashboard.
1556
+ # It presums usd value for Expenses, Cost of Revenue, and Other Expense categories only, pre-joining txgroups and top_category
1557
+ db.execute(
1558
+ """
1559
+ DROP MATERIALIZED VIEW IF EXISTS usdvalue_presum_expenses;
1560
+ CREATE MATERIALIZED VIEW usdvalue_presum_expenses AS
1561
+ SELECT
1562
+ p.txgroup_id,
1563
+ g.name AS txgroup_name,
1564
+ p.top_category,
1565
+ p.timestamp,
1566
+ SUM(p.value_usd) AS value_usd
1567
+ FROM usdvalue_presum p
1568
+ JOIN txgroup_hierarchy gh ON p.txgroup_id = gh.txgroup_id
1569
+ JOIN txgroups g ON p.txgroup_id = g.txgroup_id
1570
+ WHERE p.top_category IN ('Expenses', 'Cost of Revenue', 'Other Expense')
1571
+ GROUP BY p.txgroup_id, g.name, p.top_category, p.timestamp;
1572
+
1573
+ -- Indexes
1574
+ CREATE UNIQUE INDEX idx_usdvalue_presum_expenses_txgroup_id_timestamp
1575
+ ON usdvalue_presum_expenses (txgroup_id, timestamp);
1576
+
1577
+ CREATE UNIQUE INDEX idx_usdvalue_presum_expenses_timestamp_txgroup_id
1578
+ ON usdvalue_presum_expenses (timestamp, txgroup_id);
1579
+
1580
+ CREATE INDEX idx_usdvalue_presum_expenses_txgroup_name_timestamp
1581
+ ON usdvalue_presum_expenses (txgroup_name, timestamp);
1582
+
1583
+ CREATE UNIQUE INDEX idx_usdvalue_presum_expenses_timestamp_txgroup_name
1584
+ ON usdvalue_presum_expenses (timestamp, txgroup_name);
1585
+
1586
+ CREATE UNIQUE INDEX idx_usdvalue_presum_expenses_top_category_txgroup_id_timestamp
1587
+ ON usdvalue_presum_expenses (top_category, txgroup_id, timestamp);
1588
+
1589
+ CREATE UNIQUE INDEX idx_usdvalue_presum_expenses_top_category_txgroup_name_timestamp
1590
+ ON usdvalue_presum_expenses (top_category, txgroup_name, timestamp);
1591
+ """
1592
+ )
1357
1593
 
1358
1594
 
1359
1595
  @db_session
@@ -1449,6 +1685,3 @@ def _drop_shitcoin_txs() -> None:
1449
1685
  for tx in shitcoin_txs:
1450
1686
  tx.delete()
1451
1687
  logger.info("Shitcoin tx purge complete.")
1452
-
1453
-
1454
- _drop_shitcoin_txs()