dao-scripts 1.3.1__tar.gz → 1.4.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. {dao_scripts-1.3.1 → dao_scripts-1.4.1}/.github/workflows/ci.yml +4 -0
  2. {dao_scripts-1.3.1 → dao_scripts-1.4.1}/.github/workflows/update-datasets.yml +1 -1
  3. {dao_scripts-1.3.1 → dao_scripts-1.4.1}/CHANGELOG.md +7 -0
  4. {dao_scripts-1.3.1 → dao_scripts-1.4.1}/PKG-INFO +1 -1
  5. {dao_scripts-1.3.1 → dao_scripts-1.4.1}/dao_scripts.egg-info/PKG-INFO +1 -1
  6. {dao_scripts-1.3.1 → dao_scripts-1.4.1}/src/_version.py +2 -2
  7. {dao_scripts-1.3.1 → dao_scripts-1.4.1}/src/endpoints.json +2 -2
  8. {dao_scripts-1.3.1 → dao_scripts-1.4.1}/src/main.py +7 -2
  9. {dao_scripts-1.3.1 → dao_scripts-1.4.1}/src/utils/uploadDataWarehouse.py +17 -0
  10. {dao_scripts-1.3.1 → dao_scripts-1.4.1}/.envrc +0 -0
  11. {dao_scripts-1.3.1 → dao_scripts-1.4.1}/.gitignore +0 -0
  12. {dao_scripts-1.3.1 → dao_scripts-1.4.1}/LICENSE +0 -0
  13. {dao_scripts-1.3.1 → dao_scripts-1.4.1}/README.md +0 -0
  14. {dao_scripts-1.3.1 → dao_scripts-1.4.1}/dao_scripts.egg-info/SOURCES.txt +0 -0
  15. {dao_scripts-1.3.1 → dao_scripts-1.4.1}/dao_scripts.egg-info/dependency_links.txt +0 -0
  16. {dao_scripts-1.3.1 → dao_scripts-1.4.1}/dao_scripts.egg-info/entry_points.txt +0 -0
  17. {dao_scripts-1.3.1 → dao_scripts-1.4.1}/dao_scripts.egg-info/namespace_packages.txt +0 -0
  18. {dao_scripts-1.3.1 → dao_scripts-1.4.1}/dao_scripts.egg-info/requires.txt +0 -0
  19. {dao_scripts-1.3.1 → dao_scripts-1.4.1}/dao_scripts.egg-info/top_level.txt +0 -0
  20. {dao_scripts-1.3.1 → dao_scripts-1.4.1}/pyproject.toml +0 -0
  21. {dao_scripts-1.3.1 → dao_scripts-1.4.1}/setup.cfg +0 -0
  22. {dao_scripts-1.3.1 → dao_scripts-1.4.1}/setup.py +0 -0
  23. {dao_scripts-1.3.1 → dao_scripts-1.4.1}/src/__init__.py +0 -0
  24. {dao_scripts-1.3.1 → dao_scripts-1.4.1}/src/__main__.py +0 -0
  25. {dao_scripts-1.3.1 → dao_scripts-1.4.1}/src/aragon/__init__.py +0 -0
  26. {dao_scripts-1.3.1 → dao_scripts-1.4.1}/src/aragon/dao_names.json +0 -0
  27. {dao_scripts-1.3.1 → dao_scripts-1.4.1}/src/aragon/runner.py +0 -0
  28. {dao_scripts-1.3.1 → dao_scripts-1.4.1}/src/argparser.py +0 -0
  29. {dao_scripts-1.3.1 → dao_scripts-1.4.1}/src/common/__init__.py +0 -0
  30. {dao_scripts-1.3.1 → dao_scripts-1.4.1}/src/common/api_requester.py +0 -0
  31. {dao_scripts-1.3.1 → dao_scripts-1.4.1}/src/common/blockscout.py +0 -0
  32. {dao_scripts-1.3.1 → dao_scripts-1.4.1}/src/common/common.py +0 -0
  33. {dao_scripts-1.3.1 → dao_scripts-1.4.1}/src/common/cryptocompare.py +0 -0
  34. {dao_scripts-1.3.1 → dao_scripts-1.4.1}/src/common/thegraph.py +0 -0
  35. {dao_scripts-1.3.1 → dao_scripts-1.4.1}/src/config.py +0 -0
  36. {dao_scripts-1.3.1 → dao_scripts-1.4.1}/src/daohaus/__init__.py +0 -0
  37. {dao_scripts-1.3.1 → dao_scripts-1.4.1}/src/daohaus/runner.py +0 -0
  38. {dao_scripts-1.3.1 → dao_scripts-1.4.1}/src/daostack/__init__.py +0 -0
  39. {dao_scripts-1.3.1 → dao_scripts-1.4.1}/src/daostack/runner.py +0 -0
  40. {dao_scripts-1.3.1 → dao_scripts-1.4.1}/src/logging.py +0 -0
  41. {dao_scripts-1.3.1 → dao_scripts-1.4.1}/src/metadata.py +0 -0
  42. {dao_scripts-1.3.1 → dao_scripts-1.4.1}/src/utils/__init__.py +0 -0
  43. {dao_scripts-1.3.1 → dao_scripts-1.4.1}/test/test_placeholder.py +0 -0
@@ -52,6 +52,10 @@ jobs:
52
52
  # IMPORTANT: this permission is mandatory for trusted publishing
53
53
  id-token: write
54
54
  steps:
55
+ - name: Skip Duplicate Actions
56
+ uses: fkirc/skip-duplicate-actions@v5
57
+ with:
58
+ concurrent_skipping: same_content_newer
55
59
  - name: Download built package
56
60
  uses: actions/download-artifact@v4
57
61
  with:
@@ -52,7 +52,7 @@ jobs:
52
52
  sed -i 's/_prepare_new_version(new_deposition_data\["metadata"\]\["version"\])/_prepare_new_version(new_deposition_data["metadata"].get("version", ""))/g' "$FILE"
53
53
  sed -i 's/json=new_deposition_data/json={"metadata": new_deposition_data\["metadata"\]}/g' "$FILE"
54
54
  - name: Upload dataset
55
- run: dao-utils-upload-dw ${{matrix.repo}}
55
+ run: dao-utils-upload-dw --debug ${{matrix.repo}}
56
56
  env:
57
57
  KAGGLE_USERNAME: ${{ secrets.KAGGLE_USERNAME }}
58
58
  KAGGLE_KEY: ${{ secrets.KAGGLE_KEY }}
@@ -1,6 +1,13 @@
1
1
  # Changelog
2
2
  All notable changes to this project will be documented in this file.
3
3
 
4
+ ## 1.4.1 - 2024-06-03
5
+ - Added debug logging to `dao-utils-upload-dw`
6
+
7
+ ## 1.4.0 - 2024-06-03
8
+ - Fixed bug when process was killed
9
+ - Updated endpoint network names
10
+
4
11
  ## 1.3.1 - 2024-05-27
5
12
  - Fixed bug with `--delete-force`
6
13
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dao-scripts
3
- Version: 1.3.1
3
+ Version: 1.4.1
4
4
  Summary: "A tool to download data to monitor DAO activity"
5
5
  Home-page: https://github.com/Grasia/dao-scripts
6
6
  Author: David Davó
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dao-scripts
3
- Version: 1.3.1
3
+ Version: 1.4.1
4
4
  Summary: "A tool to download data to monitor DAO activity"
5
5
  Home-page: https://github.com/Grasia/dao-scripts
6
6
  Author: David Davó
@@ -12,5 +12,5 @@ __version__: str
12
12
  __version_tuple__: VERSION_TUPLE
13
13
  version_tuple: VERSION_TUPLE
14
14
 
15
- __version__ = version = '1.3.1'
16
- __version_tuple__ = version_tuple = (1, 3, 1)
15
+ __version__ = version = '1.4.1'
16
+ __version_tuple__ = version_tuple = (1, 4, 1)
@@ -15,11 +15,11 @@
15
15
  "daostack": "Bnqq6ARphxg2rnAvjBrQGxCPNtDGiygv235GbRzZqwfb",
16
16
  "daohaus": "2GJY9uxsLQUCvgqSfy6QCLAJgM9P9kdxBUpwNcGs7nPR"
17
17
  },
18
- "polygon": {
18
+ "matic": {
19
19
  "_blocks": "4mJTujCUWKLd4SPBYE1YXbamfNSNRMyphk8tHzczYWf9",
20
20
  "daohaus": "AkzZk4BsvfNRkRDtzz7Bc8TgktTJN4uv9tgfCehhE6fB"
21
21
  },
22
- "arbitrum": {
22
+ "arbitrum-one": {
23
23
  "_blocks": "AyY1GDtmAyavGWsdoHQ4vNoBCVWSqfDwHa6jPu5KdHgP",
24
24
  "daohaus": "2c41cggebRCMzFiDqoqDwShZtz4xYucsFKbQnEiXUTzY"
25
25
  },
@@ -83,12 +83,17 @@ def lock_and_run(args: Namespace):
83
83
  with pl.Lock(cs_lock, 'w', timeout=1) as lock, \
84
84
  tempfile.TemporaryDirectory(prefix="datawarehouse_") as tmp_dw_str:
85
85
 
86
+ running_link = datawarehouse / '.running'
87
+ if running_link.exists():
88
+ print("Program was killed, removing aux files")
89
+ running_link.unlink()
90
+
86
91
  # Writing pid and dir name to lock (debugging)
87
92
  tmp_dw = Path(tmp_dw_str)
88
93
  print(os.getpid(), file=lock)
89
94
  print(tmp_dw, file=lock)
90
95
  lock.flush()
91
- (datawarehouse / '.running').symlink_to(tmp_dw)
96
+ running_link.symlink_to(tmp_dw)
92
97
 
93
98
  # Used to tell the loggers to use errors.log or the main logs
94
99
  copied_dw = False
@@ -135,7 +140,7 @@ def lock_and_run(args: Namespace):
135
140
  finally:
136
141
  # Removing pid from lock
137
142
  lock.truncate(0)
138
- (datawarehouse / '.running').unlink()
143
+ running_link.unlink()
139
144
  finish_logging(errors=not copied_dw)
140
145
  except pl.LockException:
141
146
  with open(cs_lock, 'r') as f:
@@ -7,6 +7,7 @@ import shutil
7
7
  import json
8
8
  import requests
9
9
  from time import sleep
10
+ import logging
10
11
 
11
12
  from tqdm import tqdm
12
13
 
@@ -122,11 +123,27 @@ def main():
122
123
  default=5,
123
124
  help="Zenodo is known to return 504 error, this program will try and upload it again",
124
125
  )
126
+ parser.add_argument(
127
+ '-D', '--debug',
128
+ action='store_true',
129
+ help='Enable debug logs',
130
+ )
125
131
 
126
132
  args = parser.parse_args()
127
133
  if args.repos == 'all':
128
134
  args.repos = available_repos
129
135
 
136
+ if args.debug:
137
+ from http.client import HTTPConnection
138
+
139
+ # https://requests.readthedocs.io/en/latest/api/#api-changes
140
+ HTTPConnection.debuglevel = 1
141
+ logging.basicConfig()
142
+ logging.getLogger().setLevel(logging.DEBUG)
143
+ requests_log = logging.getLogger('urllib3')
144
+ requests_log.setLevel(logging.DEBUG)
145
+ requests_log.propagate = True
146
+
130
147
  with tempfile.TemporaryDirectory() as tmpdir:
131
148
  tmpdir = Path(tmpdir)
132
149
 
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes