dao-scripts 1.2.2__py3-none-any.whl → 1.3.0.post1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (27) hide show
  1. dao_analyzer/cache_scripts/_version.py +2 -2
  2. dao_analyzer/cache_scripts/aragon/runner.py +23 -26
  3. dao_analyzer/cache_scripts/argparser.py +14 -19
  4. dao_analyzer/cache_scripts/common/__init__.py +3 -1
  5. dao_analyzer/cache_scripts/common/api_requester.py +14 -13
  6. dao_analyzer/cache_scripts/common/blockscout.py +11 -13
  7. dao_analyzer/cache_scripts/common/common.py +55 -28
  8. dao_analyzer/cache_scripts/common/cryptocompare.py +4 -4
  9. dao_analyzer/cache_scripts/common/thegraph.py +203 -0
  10. dao_analyzer/cache_scripts/config.py +57 -15
  11. dao_analyzer/cache_scripts/daohaus/runner.py +20 -20
  12. dao_analyzer/cache_scripts/daostack/runner.py +25 -28
  13. dao_analyzer/cache_scripts/endpoints.json +14 -18
  14. dao_analyzer/cache_scripts/logging.py +98 -0
  15. dao_analyzer/cache_scripts/main.py +83 -77
  16. dao_analyzer/cache_scripts/metadata.py +6 -6
  17. dao_scripts-1.3.0.post1-py3.12-nspkg.pth +1 -0
  18. dao_scripts-1.3.0.post1.dist-info/LICENSE +674 -0
  19. {dao_scripts-1.2.2.dist-info → dao_scripts-1.3.0.post1.dist-info}/METADATA +42 -8
  20. dao_scripts-1.3.0.post1.dist-info/RECORD +32 -0
  21. {dao_scripts-1.2.2.dist-info → dao_scripts-1.3.0.post1.dist-info}/WHEEL +1 -1
  22. dao_analyzer/cache_scripts/common/graphql.py +0 -143
  23. dao_scripts-1.2.2-py3.11-nspkg.pth +0 -1
  24. dao_scripts-1.2.2.dist-info/RECORD +0 -30
  25. {dao_scripts-1.2.2.dist-info → dao_scripts-1.3.0.post1.dist-info}/entry_points.txt +0 -0
  26. {dao_scripts-1.2.2.dist-info → dao_scripts-1.3.0.post1.dist-info}/namespace_packages.txt +0 -0
  27. {dao_scripts-1.2.2.dist-info → dao_scripts-1.3.0.post1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,98 @@
1
+ import sys
2
+ import logging
3
+ from pathlib import Path
4
+ import datetime as dt
5
+
6
+ from . import config
7
+
8
+ LOG_FILE_FORMAT = "[%(levelname)s] - %(asctime)s - %(name)s - : %(message)s in %(pathname)s:%(lineno)d"
9
+ LOG_STREAM_FORMAT = "%(levelname)s: %(message)s"
10
+
11
+ class AuxDatawarehouseHandler(logging.Handler):
12
+ # Inspired by MemoryHandler
13
+
14
+ def __init__(self, aux_dw: Path, real_dw: Path, name: str, level=logging.NOTSET):
15
+ super().__init__(level=level)
16
+ self._aux_dw = aux_dw
17
+ self._real_dw = real_dw
18
+ self._fname = name
19
+
20
+ self._auxHandler = logging.FileHandler(self._aux_dw / 'logs' / f'{self._fname}.log')
21
+ self._buffer: list[logging.LogRecord] = []
22
+
23
+ self.set_name(f'aux_dw_{name}')
24
+
25
+ def setFormatter(self, fmt):
26
+ self._auxHandler.setFormatter(fmt)
27
+ return super().setFormatter(fmt)
28
+
29
+ def setLevel(self, level):
30
+ super().setLevel(level)
31
+ self._auxHandler.setLevel(level)
32
+
33
+ def emit(self, record):
34
+ self._auxHandler.emit(record)
35
+ self._buffer.append(record)
36
+
37
+ def dump(self, errors: bool):
38
+ if errors:
39
+ target: logging.Handler = logging.handlers.RotatingFileHandler(
40
+ self._real_dw / 'logs' / f'{self._fname}_error_{dt.datetime.now().isoformat()}.log',
41
+ )
42
+ else:
43
+ target: logging.Handler = logging.handlers.RotatingFileHandler(
44
+ self._real_dw / 'logs' / f'{self._fname}.log',
45
+ maxBytes=int(config.LOGGING_MAX_SIZE),
46
+ backupCount=int(config.LOGGING_BACKUP_COUNT),
47
+ )
48
+
49
+ target.setLevel(self.level)
50
+ target.setFormatter(self.formatter)
51
+
52
+ self.acquire()
53
+ try:
54
+ for record in self._buffer:
55
+ target.handle(record)
56
+ self._buffer.clear()
57
+ finally:
58
+ self.release()
59
+
60
+ _all_dw_handlers: list[AuxDatawarehouseHandler] = []
61
+ def _setup_handler_in_logger(logger: str | logging.Logger, aux_dw, real_dw, name):
62
+ _all_dw_handlers.append(h := AuxDatawarehouseHandler(aux_dw, real_dw, name))
63
+ h.setFormatter(logging.Formatter(LOG_FILE_FORMAT))
64
+
65
+ if isinstance(logger, logging.Logger):
66
+ logger.addHandler(h)
67
+ else:
68
+ logging.getLogger(logger).addHandler(h)
69
+
70
+ return h
71
+
72
+ def setup_logging(aux_dw: Path, real_dw: Path, debug: bool):
73
+ (aux_dw / 'logs').mkdir(exist_ok=True)
74
+ (real_dw / 'logs').mkdir(exist_ok=True)
75
+
76
+ logger = logging.getLogger('dao_analyzer')
77
+ logger.propagate = True
78
+
79
+ gqlLogger = logging.getLogger('gql.transport.requests')
80
+
81
+ _setup_handler_in_logger(logger, aux_dw, real_dw, 'cache_scripts')
82
+ _setup_handler_in_logger(gqlLogger, aux_dw, real_dw, 'gql_requests')
83
+
84
+ streamhandler = logging.StreamHandler(sys.stderr)
85
+ streamhandler.setLevel(logging.WARNING if debug else logging.ERROR)
86
+ streamhandler.setFormatter(logging.Formatter(LOG_STREAM_FORMAT))
87
+
88
+ logger.addHandler(streamhandler)
89
+ gqlLogger.addHandler(streamhandler)
90
+
91
+ if debug:
92
+ logger.setLevel(logging.DEBUG)
93
+ gqlLogger.setLevel(logging.DEBUG)
94
+
95
+ def finish_logging(errors: bool):
96
+ for h in _all_dw_handlers:
97
+ h.dump(errors)
98
+ h.close()
@@ -1,7 +1,6 @@
1
1
  #!/usr/bin/env python3
2
- from typing import Dict
3
-
4
2
  from datetime import datetime
3
+ import logging.handlers
5
4
  from pathlib import Path
6
5
  import portalocker as pl
7
6
  import os
@@ -11,19 +10,19 @@ import sys
11
10
  from sys import stderr
12
11
 
13
12
  import logging
14
- from logging.handlers import RotatingFileHandler
13
+
14
+ from argparse import Namespace
15
15
 
16
16
  from .aragon.runner import AragonRunner
17
17
  from .daohaus.runner import DaohausRunner
18
18
  from .daostack.runner import DaostackRunner
19
- from .common import Runner, ENDPOINTS
19
+ from .common import ENDPOINTS, NetworkRunner
20
20
  from .argparser import CacheScriptsArgParser
21
+ from ._version import __version__
22
+ from .logging import setup_logging, finish_logging
21
23
  from . import config
22
24
 
23
- LOG_FILE_FORMAT = "[%(levelname)s] - %(asctime)s - %(name)s - : %(message)s in %(pathname)s:%(lineno)d"
24
- LOG_STREAM_FORMAT = "%(levelname)s: %(message)s"
25
-
26
- AVAILABLE_PLATFORMS: Dict[str, Runner] = {
25
+ AVAILABLE_PLATFORMS: dict[str, type[NetworkRunner]] = {
27
26
  AragonRunner.name: AragonRunner,
28
27
  DaohausRunner.name: DaohausRunner,
29
28
  DaostackRunner.name: DaostackRunner
@@ -32,10 +31,9 @@ AVAILABLE_PLATFORMS: Dict[str, Runner] = {
32
31
  # Get available networks from Runners
33
32
  AVAILABLE_NETWORKS = {n for n in ENDPOINTS.keys() if not n.startswith('_')}
34
33
 
35
- def _call_platform(platform: str, datawarehouse: Path, force: bool=False, networks=None, collectors=None):
36
- p = AVAILABLE_PLATFORMS[platform]()
37
- p.set_dw(datawarehouse)
38
- p.run(networks=networks, force=force, collectors=collectors)
34
+ def _call_platform(platform: str, datawarehouse: Path, force: bool=False, networks=None, collectors=None, block_datetime=None):
35
+ p = AVAILABLE_PLATFORMS[platform](datawarehouse)
36
+ p.run(networks=networks, force=force, collectors=collectors, until_date=block_datetime)
39
37
 
40
38
  def _is_good_version(datawarehouse: Path) -> bool:
41
39
  versionfile = datawarehouse / 'version.txt'
@@ -43,64 +41,36 @@ def _is_good_version(datawarehouse: Path) -> bool:
43
41
  return False
44
42
 
45
43
  with open(versionfile, 'r') as vf:
46
- l = vf.readline().strip()
47
- return l == config.CACHE_SCRIPTS_VERSION
48
-
49
- def main_aux(datawarehouse: Path):
50
- if config.delete_force or not _is_good_version(datawarehouse):
51
- if not config.delete_force:
52
- print(f"datawarehouse version is not version {config.CACHE_SCRIPTS_VERSION}, upgrading")
53
-
54
- # We skip the dotfiles like .lock
55
- for p in datawarehouse.glob('[!.]*'):
56
- if p.is_dir():
57
- shutil.rmtree(p)
58
- else:
59
- p.unlink()
60
-
61
- logger = logging.getLogger()
62
- logger.propagate = True
63
- filehandler = RotatingFileHandler(
64
- filename=config.datawarehouse / 'cache_scripts.log',
65
- maxBytes=config.LOGGING_MAX_MB * 2**20,
66
- backupCount=config.LOGGING_BACKUP_COUNT,
67
- )
68
-
69
- filehandler.setFormatter(logging.Formatter(LOG_FILE_FORMAT))
70
- logger.addHandler(filehandler)
71
- logger.setLevel(level=logging.DEBUG if config.debug else logging.INFO)
72
-
73
- logging.getLogger('gql.transport.requests').setLevel(level=logging.DEBUG if config.debug else logging.WARNING)
74
-
75
- # Log errors to STDERR
76
- streamhandler = logging.StreamHandler(stderr)
77
- streamhandler.setLevel(logging.WARNING if config.debug else logging.ERROR)
78
- streamhandler.setFormatter(logging.Formatter(LOG_STREAM_FORMAT))
79
- logger.addHandler(streamhandler)
80
-
81
- logging.info("Running dao-scripts with arguments: %s", sys.orig_argv)
44
+ return vf.readline().strip() == __version__
45
+
46
+ def run_all(
47
+ datawarehouse: Path, delete_force: bool,
48
+ platforms: list[str], networks: list[str], collectors: list[str],
49
+ block_datetime: datetime, force: bool
50
+ ):
82
51
 
83
52
  # The default config is every platform
84
- if not config.platforms:
85
- config.platforms = AVAILABLE_PLATFORMS.keys()
53
+ if not platforms:
54
+ platforms = list(AVAILABLE_PLATFORMS.keys())
86
55
 
87
56
  # Now calling the platform and deleting if needed
88
- for p in config.platforms:
89
- _call_platform(p, datawarehouse, config.force, config.networks, config.collectors)
57
+ for platform in platforms:
58
+ _call_platform(platform, datawarehouse, force, networks, collectors, block_datetime)
90
59
 
91
60
  # write date
92
61
  data_date: str = str(datetime.now().isoformat())
93
62
 
94
- if config.block_datetime:
95
- data_date = config.block_datetime.isoformat()
63
+ if block_datetime:
64
+ data_date = block_datetime.isoformat()
96
65
 
97
66
  with open(datawarehouse / 'update_date.txt', 'w') as f:
98
67
  print(data_date, file=f)
99
68
 
100
69
  with open(datawarehouse / 'version.txt', 'w') as f:
101
- print(config.CACHE_SCRIPTS_VERSION, file=f)
70
+ print(__version__, file=f)
102
71
 
103
- def main_lock(datawarehouse: Path):
72
+ def lock_and_run(args: Namespace):
73
+ datawarehouse: Path = args.datawarehouse
104
74
  datawarehouse.mkdir(exist_ok=True)
105
75
 
106
76
  # Lock for the datawarehouse (also used by the dash)
@@ -111,28 +81,63 @@ def main_lock(datawarehouse: Path):
111
81
 
112
82
  try:
113
83
  with pl.Lock(cs_lock, 'w', timeout=1) as lock, \
114
- tempfile.TemporaryDirectory(prefix="datawarehouse_") as tmp_dw:
84
+ tempfile.TemporaryDirectory(prefix="datawarehouse_") as tmp_dw_str:
115
85
 
116
86
  # Writing pid and dir name to lock (debugging)
117
- tmp_dw = Path(tmp_dw)
87
+ tmp_dw = Path(tmp_dw_str)
118
88
  print(os.getpid(), file=lock)
119
89
  print(tmp_dw, file=lock)
120
90
  lock.flush()
121
-
122
- ignore = shutil.ignore_patterns('*.log', '.lock*')
123
-
124
- # We want to copy the dw, so we open it as readers
125
- p_lock.touch(exist_ok=True)
126
- with pl.Lock(p_lock, 'r', timeout=1, flags=pl.LOCK_SH | pl.LOCK_NB):
127
- shutil.copytree(datawarehouse, tmp_dw, dirs_exist_ok=True, ignore=ignore)
128
-
129
- main_aux(datawarehouse=tmp_dw)
130
-
131
- with pl.Lock(p_lock, 'w', timeout=10):
132
- shutil.copytree(tmp_dw, datawarehouse, dirs_exist_ok=True, ignore=ignore)
133
-
134
- # Removing pid from lock
135
- lock.truncate(0)
91
+ (datawarehouse / '.running').symlink_to(tmp_dw)
92
+
93
+ # Used to tell the loggers to use errors.log or the main logs
94
+ copied_dw = False
95
+
96
+ try:
97
+ ignore = shutil.ignore_patterns('.lock*', 'logs/*')
98
+
99
+ # We want to copy the dw, so we open it as readers
100
+ p_lock.touch(exist_ok=True)
101
+ with pl.Lock(p_lock, 'r', timeout=1, flags=pl.LOCK_SH | pl.LOCK_NB):
102
+ shutil.copytree(datawarehouse, tmp_dw, dirs_exist_ok=True, ignore=ignore)
103
+
104
+ if args.delete_force or not _is_good_version(datawarehouse):
105
+ if not args.delete_force:
106
+ print(f"datawarehouse version is not version {__version__}, upgrading")
107
+
108
+ # We skip the dotfiles like .lock
109
+ for p in datawarehouse.glob('[!.]*'):
110
+ if p.is_dir():
111
+ shutil.rmtree(p)
112
+ else:
113
+ p.unlink()
114
+
115
+ setup_logging(tmp_dw, datawarehouse, config.DEBUG)
116
+ logger = logging.getLogger('dao_analyzer.main')
117
+ logger.info(">>> Running dao-scripts with arguments: %s", sys.orig_argv)
118
+
119
+ # Execute the scripts in the aux datawarehouse
120
+ run_all(
121
+ datawarehouse=tmp_dw,
122
+ delete_force=args.delete_force,
123
+ platforms=args.platforms,
124
+ networks=args.networks,
125
+ collectors=args.collectors,
126
+ block_datetime=args.block_datetime,
127
+ force=args.force,
128
+ )
129
+
130
+ # Copying back the dw
131
+ logger.info(f"<<< Copying back the datawarehouse from {tmp_dw} to {datawarehouse}")
132
+ with pl.Lock(p_lock, 'w', timeout=10):
133
+ shutil.copytree(tmp_dw, datawarehouse, dirs_exist_ok=True, ignore=ignore)
134
+
135
+ copied_dw = True
136
+ finally:
137
+ # Removing pid from lock
138
+ lock.truncate(0)
139
+ (datawarehouse / '.running').unlink()
140
+ finish_logging(errors=not copied_dw)
136
141
  except pl.LockException:
137
142
  with open(cs_lock, 'r') as f:
138
143
  pid = int(f.readline())
@@ -145,13 +150,14 @@ def main():
145
150
  available_platforms=list(AVAILABLE_PLATFORMS.keys()),
146
151
  available_networks=AVAILABLE_NETWORKS)
147
152
 
148
- config.populate_args(parser.parse_args())
153
+ args = parser.parse_args()
154
+ config.args2config(args)
149
155
 
150
- if config.display_version:
151
- print(config.CACHE_SCRIPTS_VERSION)
156
+ if args.display_version:
157
+ print(__version__)
152
158
  exit(0)
153
159
 
154
- main_lock(config.datawarehouse)
160
+ lock_and_run(args)
155
161
 
156
162
  if __name__ == '__main__':
157
163
  main()
@@ -7,7 +7,7 @@
7
7
  <david@ddavo.me>
8
8
  """
9
9
  from json.encoder import JSONEncoder
10
- from typing import Dict
10
+ from typing import Optional
11
11
  import json
12
12
  from functools import total_ordering
13
13
  from datetime import datetime, timezone
@@ -26,7 +26,7 @@ class Block:
26
26
  self.id = init["id"] if "id" in init else self.id
27
27
 
28
28
  if "timestamp" in init:
29
- if init["timestamp"].isdigit():
29
+ if isinstance(init['timestamp'], int) or init["timestamp"].isdigit():
30
30
  self.timestamp = datetime.fromtimestamp(int(init["timestamp"]))
31
31
  else:
32
32
  self.timestamp = datetime.fromisoformat(init["timestamp"])
@@ -56,7 +56,7 @@ class Block:
56
56
 
57
57
  class CollectorMetaData:
58
58
  def __init__(self, c: str, d = None):
59
- self.block = Block()
59
+ self.block: Optional[Block] = Block()
60
60
  self._collector: str = c
61
61
  self.last_update: datetime = datetime.now(timezone.utc)
62
62
 
@@ -89,8 +89,8 @@ class MetadataEncoder(JSONEncoder):
89
89
  class RunnerMetadata:
90
90
  def __init__(self, runner):
91
91
  self._path = runner.basedir / 'metadata.json'
92
- self.collectorMetaData: Dict[str, CollectorMetaData] = {}
93
- self.errors: Dict[str, str] = {}
92
+ self.collectorMetaData: dict[str, CollectorMetaData] = {}
93
+ self.errors: dict[str, str] = {}
94
94
  self._setPrev()
95
95
 
96
96
  def _setPrev(self):
@@ -125,7 +125,7 @@ class RunnerMetadata:
125
125
  "metadata": self.collectorMetaData,
126
126
  "errors": self.errors
127
127
  }, f,
128
- indent=2 if config.debug else None,
128
+ indent=2 if config.DEBUG else None,
129
129
  cls=MetadataEncoder)
130
130
 
131
131
  def ifdump(self):
@@ -0,0 +1 @@
1
+ import sys, types, os;p = os.path.join(sys._getframe(1).f_locals['sitedir'], *('dao_analyzer',));importlib = __import__('importlib.util');__import__('importlib.machinery');m = sys.modules.setdefault('dao_analyzer', importlib.util.module_from_spec(importlib.machinery.PathFinder.find_spec('dao_analyzer', [os.path.dirname(p)])));m = m or sys.modules.setdefault('dao_analyzer', types.ModuleType('dao_analyzer'));mp = (m or []) and m.__dict__.setdefault('__path__',[]);(p not in mp) and mp.append(p)