gsrap 0.9.0__tar.gz → 0.10.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {gsrap-0.9.0 → gsrap-0.10.1}/PKG-INFO +5 -2
- {gsrap-0.9.0 → gsrap-0.10.1}/pyproject.toml +12 -12
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/.ipynb_checkpoints/__init__-checkpoint.py +6 -5
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/__init__.py +6 -5
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/assets/kegg_compound_to_others.pickle +0 -0
- gsrap-0.10.1/src/gsrap/assets/kegg_reaction_to_others.pickle +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/commons/.ipynb_checkpoints/biomass-checkpoint.py +3 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/commons/.ipynb_checkpoints/downloads-checkpoint.py +168 -93
- gsrap-0.10.1/src/gsrap/commons/.ipynb_checkpoints/escherutils-checkpoint.py +112 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/commons/.ipynb_checkpoints/excelhub-checkpoint.py +7 -1
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/commons/.ipynb_checkpoints/metrics-checkpoint.py +8 -8
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/commons/biomass.py +3 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/commons/downloads.py +168 -93
- gsrap-0.10.1/src/gsrap/commons/escherutils.py +112 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/commons/excelhub.py +7 -1
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/commons/metrics.py +8 -8
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/mkmodel/.ipynb_checkpoints/mkmodel-checkpoint.py +2 -2
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/mkmodel/mkmodel.py +2 -2
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/parsedb/.ipynb_checkpoints/annotation-checkpoint.py +43 -18
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/parsedb/.ipynb_checkpoints/completeness-checkpoint.py +2 -1
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/parsedb/.ipynb_checkpoints/introduce-checkpoint.py +132 -63
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/parsedb/.ipynb_checkpoints/manual-checkpoint.py +23 -3
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/parsedb/.ipynb_checkpoints/parsedb-checkpoint.py +59 -49
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/parsedb/.ipynb_checkpoints/repeating-checkpoint.py +90 -53
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/parsedb/annotation.py +43 -18
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/parsedb/completeness.py +2 -1
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/parsedb/introduce.py +132 -63
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/parsedb/manual.py +22 -2
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/parsedb/parsedb.py +59 -49
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/parsedb/repeating.py +90 -53
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/runsims/.ipynb_checkpoints/runsims-checkpoint.py +2 -1
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/runsims/.ipynb_checkpoints/simplegrowth-checkpoint.py +0 -1
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/runsims/runsims.py +2 -1
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/runsims/simplegrowth.py +0 -1
- gsrap-0.9.0/src/gsrap/assets/kegg_reaction_to_others.pickle +0 -0
- gsrap-0.9.0/src/gsrap/commons/.ipynb_checkpoints/escherutils-checkpoint.py +0 -108
- gsrap-0.9.0/src/gsrap/commons/escherutils.py +0 -108
- {gsrap-0.9.0 → gsrap-0.10.1}/LICENSE.txt +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/README.md +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/assets/.ipynb_checkpoints/PM1-checkpoint.csv +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/assets/.ipynb_checkpoints/PM2A-checkpoint.csv +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/assets/.ipynb_checkpoints/PM3B-checkpoint.csv +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/assets/.ipynb_checkpoints/PM4A-checkpoint.csv +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/assets/PM1.csv +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/assets/PM2A.csv +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/assets/PM3B.csv +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/assets/PM4A.csv +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/assets/__init__.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/commons/.ipynb_checkpoints/__init__-checkpoint.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/commons/.ipynb_checkpoints/coeffs-checkpoint.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/commons/.ipynb_checkpoints/figures-checkpoint.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/commons/.ipynb_checkpoints/fluxbal-checkpoint.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/commons/.ipynb_checkpoints/keggutils-checkpoint.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/commons/.ipynb_checkpoints/logutils-checkpoint.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/commons/.ipynb_checkpoints/medium-checkpoint.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/commons/.ipynb_checkpoints/memoteutils-checkpoint.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/commons/.ipynb_checkpoints/sbmlutils-checkpoint.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/commons/__init__.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/commons/coeffs.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/commons/figures.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/commons/fluxbal.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/commons/keggutils.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/commons/logutils.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/commons/medium.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/commons/memoteutils.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/commons/sbmlutils.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/getmaps/.ipynb_checkpoints/__init__-checkpoint.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/getmaps/.ipynb_checkpoints/getmaps-checkpoint.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/getmaps/.ipynb_checkpoints/kdown-checkpoint.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/getmaps/__init__.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/getmaps/getmaps.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/getmaps/kdown.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/mkmodel/.ipynb_checkpoints/__init__-checkpoint.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/mkmodel/.ipynb_checkpoints/biologcuration-checkpoint.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/mkmodel/.ipynb_checkpoints/gapfill-checkpoint.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/mkmodel/.ipynb_checkpoints/gapfillutils-checkpoint.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/mkmodel/.ipynb_checkpoints/polishing-checkpoint.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/mkmodel/.ipynb_checkpoints/pruner-checkpoint.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/mkmodel/__init__.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/mkmodel/biologcuration.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/mkmodel/gapfill.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/mkmodel/gapfillutils.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/mkmodel/polishing.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/mkmodel/pruner.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/parsedb/.ipynb_checkpoints/__init__-checkpoint.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/parsedb/.ipynb_checkpoints/cycles-checkpoint.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/parsedb/__init__.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/parsedb/cycles.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/runsims/.ipynb_checkpoints/__init__-checkpoint.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/runsims/.ipynb_checkpoints/biosynth-checkpoint.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/runsims/.ipynb_checkpoints/cnps-checkpoint.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/runsims/.ipynb_checkpoints/essentialgenes-checkpoint.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/runsims/.ipynb_checkpoints/growthfactors-checkpoint.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/runsims/.ipynb_checkpoints/precursors-checkpoint.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/runsims/.ipynb_checkpoints/singleomission-checkpoint.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/runsims/__init__.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/runsims/biosynth.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/runsims/cnps.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/runsims/essentialgenes.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/runsims/growthfactors.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/runsims/precursors.py +0 -0
- {gsrap-0.9.0 → gsrap-0.10.1}/src/gsrap/runsims/singleomission.py +0 -0
|
@@ -1,8 +1,9 @@
|
|
|
1
|
-
Metadata-Version: 2.
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
2
|
Name: gsrap
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.10.1
|
|
4
4
|
Summary:
|
|
5
5
|
License: GNU General Public License v3.0
|
|
6
|
+
License-File: LICENSE.txt
|
|
6
7
|
Author: Gioele Lazzari
|
|
7
8
|
Requires-Python: >=3.9
|
|
8
9
|
Classifier: License :: Other/Proprietary License
|
|
@@ -12,9 +13,11 @@ Classifier: Programming Language :: Python :: 3.10
|
|
|
12
13
|
Classifier: Programming Language :: Python :: 3.11
|
|
13
14
|
Classifier: Programming Language :: Python :: 3.12
|
|
14
15
|
Classifier: Programming Language :: Python :: 3.13
|
|
16
|
+
Classifier: Programming Language :: Python :: 3.14
|
|
15
17
|
Requires-Dist: biopython (>=1.80)
|
|
16
18
|
Requires-Dist: cobra (>=0.29)
|
|
17
19
|
Requires-Dist: colorlog (>=6.9.0)
|
|
20
|
+
Requires-Dist: escher (>=1.8.1)
|
|
18
21
|
Requires-Dist: gdown (>=5.2.0)
|
|
19
22
|
Requires-Dist: gempipe (>=1.38.1)
|
|
20
23
|
Requires-Dist: matplotlib (>=3.9.0)
|
|
@@ -1,24 +1,24 @@
|
|
|
1
1
|
[tool.poetry]
|
|
2
2
|
name = "gsrap"
|
|
3
|
-
version = "0.
|
|
3
|
+
version = "0.10.1"
|
|
4
4
|
description = ""
|
|
5
5
|
authors = ["Gioele Lazzari"]
|
|
6
6
|
license = "GNU General Public License v3.0"
|
|
7
7
|
readme = "README.md"
|
|
8
8
|
|
|
9
9
|
[tool.poetry.dependencies]
|
|
10
|
-
python = ">=3.9"
|
|
11
|
-
cobra = ">=0.29"
|
|
12
|
-
memote = ">=0.17.0"
|
|
13
|
-
pandas = ">=2.0.0"
|
|
14
|
-
openpyxl = ">=3.1.0"
|
|
15
|
-
biopython = ">=1.80"
|
|
10
|
+
python = ">=3.9"
|
|
11
|
+
cobra = ">=0.29"
|
|
12
|
+
memote = ">=0.17.0"
|
|
13
|
+
pandas = ">=2.0.0"
|
|
14
|
+
openpyxl = ">=3.1.0"
|
|
15
|
+
biopython = ">=1.80"
|
|
16
16
|
gempipe = ">=1.38.1"
|
|
17
|
-
gdown = ">=5.2.0"
|
|
18
|
-
colorlog = ">=6.9.0"
|
|
19
|
-
matplotlib = ">=3.9.0"
|
|
20
|
-
xlsxwriter = ">=3.1.0"
|
|
21
|
-
|
|
17
|
+
gdown = ">=5.2.0"
|
|
18
|
+
colorlog = ">=6.9.0"
|
|
19
|
+
matplotlib = ">=3.9.0"
|
|
20
|
+
xlsxwriter = ">=3.1.0"
|
|
21
|
+
escher = ">=1.8.1"
|
|
22
22
|
|
|
23
23
|
[build-system]
|
|
24
24
|
requires = ["poetry-core>=1.0.0"]
|
|
@@ -162,11 +162,12 @@ def main():
|
|
|
162
162
|
|
|
163
163
|
|
|
164
164
|
# print the full command line:
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
165
|
+
if args.verbose:
|
|
166
|
+
command_line = ''
|
|
167
|
+
for arg, value in vars(args).items():
|
|
168
|
+
if arg == 'subcommand': command_line = command_line + f"gsrap {value} "
|
|
169
|
+
else: command_line = command_line + f"--{arg} {value} "
|
|
170
|
+
logger.info('Inputted command line: "' + command_line.rstrip() + '".\n')
|
|
170
171
|
|
|
171
172
|
|
|
172
173
|
|
|
@@ -162,11 +162,12 @@ def main():
|
|
|
162
162
|
|
|
163
163
|
|
|
164
164
|
# print the full command line:
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
165
|
+
if args.verbose:
|
|
166
|
+
command_line = ''
|
|
167
|
+
for arg, value in vars(args).items():
|
|
168
|
+
if arg == 'subcommand': command_line = command_line + f"gsrap {value} "
|
|
169
|
+
else: command_line = command_line + f"--{arg} {value} "
|
|
170
|
+
logger.info('Inputted command line: "' + command_line.rstrip() + '".\n')
|
|
170
171
|
|
|
171
172
|
|
|
172
173
|
|
|
Binary file
|
|
Binary file
|
|
@@ -5,6 +5,7 @@ import time
|
|
|
5
5
|
import tempfile
|
|
6
6
|
import json
|
|
7
7
|
import glob
|
|
8
|
+
import sys
|
|
8
9
|
|
|
9
10
|
|
|
10
11
|
import gdown
|
|
@@ -12,6 +13,30 @@ import pandas as pnd
|
|
|
12
13
|
|
|
13
14
|
|
|
14
15
|
|
|
16
|
+
|
|
17
|
+
class SimpleLoadingWheel():
|
|
18
|
+
# a simple endless loading wheel
|
|
19
|
+
|
|
20
|
+
msg = ''
|
|
21
|
+
wheel_figures = ['|', '/', '-', '\\']
|
|
22
|
+
wheel_cnt = 0
|
|
23
|
+
|
|
24
|
+
def __init__(self, msg='Please wait... '):
|
|
25
|
+
self.msg = msg
|
|
26
|
+
|
|
27
|
+
def proceed(self):
|
|
28
|
+
print(f"{self.msg}{self.wheel_figures[self.wheel_cnt]}", file=sys.stderr, end='\r')
|
|
29
|
+
self.wheel_cnt += 1
|
|
30
|
+
if self.wheel_cnt == 4: self.wheel_cnt = 0
|
|
31
|
+
time.sleep(0.5)
|
|
32
|
+
|
|
33
|
+
def clear(self):
|
|
34
|
+
clear_str = ''.join([' ' for i in range(len(self.msg)+1)])
|
|
35
|
+
print(f'{clear_str}', file=sys.stderr, end='\r')
|
|
36
|
+
wheel_cnt = 0
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
|
|
15
40
|
def get_dbuni(logger):
|
|
16
41
|
|
|
17
42
|
|
|
@@ -29,7 +54,7 @@ def get_dbuni(logger):
|
|
|
29
54
|
|
|
30
55
|
# check table presence
|
|
31
56
|
sheet_names = exceldb.sheet_names
|
|
32
|
-
for i in ['T', 'R', 'M', '
|
|
57
|
+
for i in ['T', 'R', 'M', 'curators']:
|
|
33
58
|
if i not in sheet_names:
|
|
34
59
|
logger.error(f"Sheet '{i}' is missing!")
|
|
35
60
|
return 1
|
|
@@ -40,15 +65,15 @@ def get_dbuni(logger):
|
|
|
40
65
|
dbuni['T'] = exceldb.parse('T')
|
|
41
66
|
dbuni['R'] = exceldb.parse('R')
|
|
42
67
|
dbuni['M'] = exceldb.parse('M')
|
|
43
|
-
dbuni['
|
|
68
|
+
dbuni['curators'] = exceldb.parse('curators')
|
|
44
69
|
|
|
45
70
|
|
|
46
71
|
# check table headers
|
|
47
72
|
headers = {}
|
|
48
|
-
headers['T'] = ['rid', 'rstring', 'kr', 'gpr_manual', 'name', '
|
|
49
|
-
headers['R'] = ['rid', 'rstring', 'kr', 'gpr_manual', 'name', '
|
|
50
|
-
headers['M'] = ['pure_mid', 'formula', 'charge', 'kc', 'name', 'inchikey', '
|
|
51
|
-
headers['
|
|
73
|
+
headers['T'] = ['rid', 'rstring', 'kr', 'gpr_manual', 'name', 'curator', 'notes']
|
|
74
|
+
headers['R'] = ['rid', 'rstring', 'kr', 'gpr_manual', 'name', 'curator', 'notes']
|
|
75
|
+
headers['M'] = ['pure_mid', 'formula', 'charge', 'kc', 'name', 'inchikey', 'curator', 'notes']
|
|
76
|
+
headers['curators'] = ['username', 'first_name', 'last_name', 'role', 'mail']
|
|
52
77
|
for i in dbuni.keys():
|
|
53
78
|
if set(dbuni[i].columns) != set(headers[i]):
|
|
54
79
|
logger.error(f"Sheet '{i}' is missing the columns {set(headers[i]) - set(dbuni[i].columns)}.")
|
|
@@ -136,40 +161,141 @@ def get_dbexp(logger):
|
|
|
136
161
|
|
|
137
162
|
|
|
138
163
|
|
|
139
|
-
def get_eschermap(logger):
|
|
140
|
-
|
|
141
|
-
|
|
164
|
+
def get_eschermap(logger, map_id):
|
|
165
|
+
|
|
166
|
+
|
|
167
|
+
dict_escher_maps = {
|
|
168
|
+
'gr_other_reactions': '1un4Nqpc7l-U3k8iX67osIqM_I6AntWDy',
|
|
169
|
+
'gr_sinks_demands': '1lKS2_kXhdBWTFY_vsABRBfPUH3SidgQK',
|
|
170
|
+
'gr_unsaturates_w7': '1jh5quSsiqJ6PecvlWRRp00s9Uj-96SBK',
|
|
171
|
+
'gr_transport': '180QXr_e99GmYTUFtmV-Btw8-O7C7VeRO',
|
|
172
|
+
'map00010': '1mTMwrnNd1n4j7SHAwJ86EtOtIt9NOqp6',
|
|
173
|
+
'map00020': '1J7bHRZv1OU6E8JkpW6JxThWp-p0sqS-N',
|
|
174
|
+
'map00030': '121nngh-p29TQrvlDeVOREhVaOZ0Qe94H',
|
|
175
|
+
'map00040': '17e-e4z5pYrLG0sJRhY1lll183MAUmfeC',
|
|
176
|
+
'map00051': '1A6lpAuilGP7VvfLh_9PsqcE23YM0_RZA',
|
|
177
|
+
'map00052': '1ZDrLZIsCxK2-2rDuYbZck_ls5g-VTYAS',
|
|
178
|
+
'map00053': '1eluZkBAA3_SSP625refRJQLAp0lSGJSq',
|
|
179
|
+
'map00061': '1RDgv4yLkpYh9p2Zy_OPDc7kmJNATHpJY',
|
|
180
|
+
'map00071': '1ieltgCeNZT11cTHQ_uvWJVtfrjZ8O-yB',
|
|
181
|
+
'map00100': '1a7WAA3y9J8gQmwuM7nDfmPABxi5M3IGU',
|
|
182
|
+
'map00130': '1OXvBIbZzz4vSgrVpfXeQsxg74N5tqSNE',
|
|
183
|
+
'map00190': '1j0SQYt5fNMCvKI3GFc1cOU0YGDyWidS5',
|
|
184
|
+
'map00195': '1ceRpKB3JuxqHVrAVRVADX9sjcLPrwsoa',
|
|
185
|
+
'map00220': '1ntN8R9NiG8kNjUntPqziIblhxoZgSR7N',
|
|
186
|
+
'map00230': '1MVWZQs4V6jBk6kYv4kPait5SoBT7ZRmA',
|
|
187
|
+
'map00240': '1-n8jzvfapLDGEvCF_jmVMhbBVy7UHf-z',
|
|
188
|
+
'map00250': '1qh0dk4-bvml3FVLb5Qbw--GM0t4r-NwX',
|
|
189
|
+
'map00260': '18l-dMc2a_uEgeSY3RcTQdScAIkvstpQo',
|
|
190
|
+
'map00270': '1TGKhD3rnPgbPzL_NPR0NpO8CkWU4mTFE',
|
|
191
|
+
'map00280': '1HSP6UibDxw5KaE0Q7LZR09OYvt8ie50R',
|
|
192
|
+
'map00290': '1eObr-DGu1FSUZC0wh18UmDfgKbdpc4ez',
|
|
193
|
+
'map00300': '1fgK9prq7Z-xskFtKSsp_nBW_avBTeKe3',
|
|
194
|
+
'map00310': '1lKxkgnzXHdPZhT7QJN7Ohf7UMJix74Sl',
|
|
195
|
+
'map00330': '1R-4vMiHQNyyZkpID0DgfunPCuVGhTz5g',
|
|
196
|
+
'map00340': '16g1_a5aDs04ZrMNrEJCc5CdpgFcJGiDZ',
|
|
197
|
+
'map00350': '1kkbmpmQY3f9zqy2GNdZKPd9HOz6MrnGc',
|
|
198
|
+
'map00360': '1BC4IeVM6Px2bWleGmcX-h109RI-9KvhC',
|
|
199
|
+
'map00380': '1ZfEK2U87gW77y1sosDTHlavJ66PLRgRY',
|
|
200
|
+
'map00400': '1PKmNTpnfUaydtLQvGrJtP8LH7tj1Ka-Z',
|
|
201
|
+
'map00450': '106flHuSeJID56rhYeJ5zJgDh8owfxW2K',
|
|
202
|
+
'map00460': '1QBPL7deHN998Maj25LpVrrEB0l6_y4Q9',
|
|
203
|
+
'map00470': '1u67PtRrS2xy8clSLt2daxiwO1j0wScRL',
|
|
204
|
+
'map00480': '12p9hrG_xw-XigK4euy4-EDH8JH1t8U4c',
|
|
205
|
+
'map00500': '1erMsLa2zcZUxizPiALZN4GmuaZSpmElF',
|
|
206
|
+
'map00520': '1K9mU6M99_hFeOc8x_rYDpWPpVO-6NCN3',
|
|
207
|
+
'map00541': '1CDSQ8jqaSgdxAqMUIU6DsZnZXjXj7fe9',
|
|
208
|
+
'map00543': '1MkO45ap63_7ViJ-i8KtoEDl9q5ueXfJM',
|
|
209
|
+
'map00550': '1HCvMcqThZqTZXa2ErL3ZpcJ86gBw2Sh3',
|
|
210
|
+
'map00552': '1GcRd4ADgLVRg-RuI-6CklkNao4EgVL5s',
|
|
211
|
+
'map00561': '17_hAqfz89xBX3f_eUkGefEFiqZ0xS-hL',
|
|
212
|
+
'map00562': '1HxBd0LbZaqbrnjhpxxK7lEQGlW7HN3bU',
|
|
213
|
+
'map00564': '1XI3tcJgJbBDkDj6DVBAuzZ-Ok6uK_TfB',
|
|
214
|
+
'map00620': '1e9lGVCYs3nt-LgTPY55vLeeQtWk4Zpna',
|
|
215
|
+
'map00630': '1LXW-Htoh25wAWXdrWu6wTnegxuK9BNy5',
|
|
216
|
+
'map00640': '1kjummzfu7lgy91QVZr3IV_coLFjU9nsS',
|
|
217
|
+
'map00650': '1fBOBJzPpTmNIUnpsSeM3JxSUTqHVFJ00',
|
|
218
|
+
'map00660': '1N9HbppTMcFTWTfBQhI1bF_SxbWhkGlKe',
|
|
219
|
+
'map00670': '12jolzJP2sOUnL1z8MPGJy4dr3Jz4K0y0',
|
|
220
|
+
'map00680': '1wopxju_tgy37Lb5uO7xkcE5hiUgppzJ2',
|
|
221
|
+
'map00710': '1HF1-j4h76nPgq-x0foqNmPj9Icb3CJet',
|
|
222
|
+
'map00720': '1v40fQu6ByNq7g2_qiZ9qucBXYJPeKvpK',
|
|
223
|
+
'map00730': '1Spryv2M2eiPXF55dZoNb1_nv_f0oxN1v',
|
|
224
|
+
'map00740': '1WJ6iydXKLb2s1K8_F8OCIIurDBbGNAfM',
|
|
225
|
+
'map00750': '1Z_7Z4710P738MOQJFuExjz7Nsff0_EZk',
|
|
226
|
+
'map00760': '1PKJs7sZ2xvdHQAdz_E4faFugaownqXxw',
|
|
227
|
+
'map00770': '1nbbTwRtUF4vOxwMUSwcyU1K4ltgLNVbx',
|
|
228
|
+
'map00780': '1HUvLt4_xK_pnGbqiz8UjvZyvaGIomAxf',
|
|
229
|
+
'map00790': '13r-w61f0zPsDnpUHqJQU_r8ZXvE-cxhx',
|
|
230
|
+
'map00791': '1SX8WSG6zUOf144RhAT9QycDrjEskZi4w',
|
|
231
|
+
'map00860': '18DqnA4rNfDp2pCNyvwRg4XoTS8KefFAc',
|
|
232
|
+
'map00900': '1Xm9g8dTDJWJmQ7AY9PPybefcE6G3smTh',
|
|
233
|
+
'map00906': '1jnMtXsHVXipy9ApialPNFQDW_GCgpKas',
|
|
234
|
+
'map00910': '196D351gJ10416ZI6y1rJGLrmxCBiHLkI',
|
|
235
|
+
'map00920': '1tnm0WGewYgINyvrj-A-iv2tO--bAKwxV',
|
|
236
|
+
'map00930': '1UycN6OXFGR3g4Vwfz5zQXHmdaAJmqpox',
|
|
237
|
+
'map00970': '1-bDiZJNxkM6JhcUpbI44O3kWvhTSYlGq',
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
|
|
241
|
+
|
|
242
|
+
# return None if not requested
|
|
243
|
+
if map_id == None or map_id == '-':
|
|
244
|
+
return None
|
|
245
|
+
|
|
246
|
+
|
|
247
|
+
# check the esistance of the map
|
|
248
|
+
if map_id not in dict_escher_maps.keys():
|
|
249
|
+
logger.warning(f"No online folder is tracked for '{map_id}' yet. Please contact the developer to start tracking '{map_id}'.")
|
|
250
|
+
return None
|
|
251
|
+
|
|
252
|
+
|
|
253
|
+
# object to be returned
|
|
142
254
|
lastmap = dict()
|
|
143
255
|
|
|
144
256
|
|
|
145
|
-
folder_id = "1YE4l8IFL9pRgonAmFCf2SMRnpGJCHjil"
|
|
146
|
-
folder_url = f"https://drive.google.com/drive/folders/{folder_id}?usp=sharing"
|
|
147
|
-
|
|
148
|
-
|
|
149
257
|
# the temporary folder (/tmp/on-the-fly-code) will be deleted once exiting the with statement
|
|
150
258
|
with tempfile.TemporaryDirectory() as tmp_dir:
|
|
151
259
|
|
|
152
260
|
|
|
153
|
-
# get available versions without downloading:
|
|
261
|
+
# get available versions without downloading:
|
|
262
|
+
folder_id = dict_escher_maps[map_id]
|
|
263
|
+
folder_url = f"https://drive.google.com/drive/folders/{folder_id}?usp=sharing"
|
|
154
264
|
contents = gdown.download_folder(folder_url, output=tmp_dir, quiet=True, skip_download=True)
|
|
155
265
|
|
|
266
|
+
# check empty folder
|
|
156
267
|
if len(contents) == 0:
|
|
157
|
-
logger.
|
|
158
|
-
return
|
|
268
|
+
logger.warning(f"Online folder for '{map_id}' seems empty. Please draw and upload '{map_id}-v1.json'.")
|
|
269
|
+
return None
|
|
159
270
|
|
|
271
|
+
# check crowded folder
|
|
272
|
+
if len(contents) > 40:
|
|
273
|
+
logger.warning(f"Online folder for '{map_id}' contains >40 versions. An error will be raised at 50 versions. Please free space by moving older versions into 'older/' now.")
|
|
160
274
|
|
|
161
|
-
#
|
|
275
|
+
# create dict of available files:
|
|
162
276
|
files = {i.path: i.id for i in contents}
|
|
163
|
-
|
|
164
|
-
|
|
277
|
+
|
|
278
|
+
# check file name consistency
|
|
279
|
+
map_ids = set([i.rsplit('-v',1)[0] for i in files.keys()])
|
|
280
|
+
if len(map_ids) != 1:
|
|
281
|
+
logger.error(f"Several different map names in online folder for '{map_id}'.")
|
|
282
|
+
return 1
|
|
283
|
+
|
|
284
|
+
# check concordance to mother folder
|
|
285
|
+
if list(map_ids)[0] != map_id:
|
|
286
|
+
logger.error(f"Online folder for '{map_id}' contains versions for another map.")
|
|
287
|
+
return 1
|
|
288
|
+
|
|
289
|
+
# get the last available version (natural order)
|
|
290
|
+
last_version = max([int(i.rsplit('-v',1)[-1].replace('.json', '')) for i in files.keys()])
|
|
291
|
+
last_file_name = list(files.keys())[0].rsplit('-v',1)[0] + f"-v{last_version}.json"
|
|
165
292
|
last_file_id = files[last_file_name]
|
|
166
293
|
lastmap['filename'] = last_file_name
|
|
167
294
|
|
|
168
|
-
|
|
169
295
|
# download last version:
|
|
170
296
|
try: pathfile_tmpfolder = gdown.download(id=last_file_id, output=f"{tmp_dir}/{last_file_name}", quiet=True)
|
|
171
297
|
except:
|
|
172
|
-
logger.error("Downloading of last-version
|
|
298
|
+
logger.error(f"Downloading of last-version for '{map_id}'. Retry. If persists, please contact the developer.")
|
|
173
299
|
return 1
|
|
174
300
|
|
|
175
301
|
# load json
|
|
@@ -182,34 +308,44 @@ def get_eschermap(logger):
|
|
|
182
308
|
|
|
183
309
|
|
|
184
310
|
|
|
185
|
-
def get_databases(logger):
|
|
186
|
-
|
|
311
|
+
def get_databases(logger, map_id):
|
|
187
312
|
|
|
188
|
-
|
|
189
|
-
|
|
313
|
+
|
|
314
|
+
# define a function to fill the dict
|
|
315
|
+
def run_with_result(func, logger, results_dict, key, *extra_args):
|
|
316
|
+
result = func(logger, *extra_args)
|
|
190
317
|
results_dict[key] = result
|
|
191
318
|
|
|
192
319
|
|
|
320
|
+
# start threads
|
|
193
321
|
results_dict = dict()
|
|
194
|
-
t1 = threading.Thread(target=run_with_result, args=(
|
|
195
|
-
|
|
196
|
-
|
|
322
|
+
t1 = threading.Thread(target=run_with_result, args=(
|
|
323
|
+
get_dbuni, logger, results_dict, 'dbuni'))
|
|
324
|
+
t2 = threading.Thread(target=run_with_result, args=(
|
|
325
|
+
get_dbexp, logger, results_dict, 'dbexp'))
|
|
326
|
+
t3 = threading.Thread(target=run_with_result, args=(
|
|
327
|
+
get_eschermap, logger, results_dict, 'eschermap', map_id))
|
|
197
328
|
|
|
198
329
|
|
|
199
|
-
# wait for the longest download
|
|
330
|
+
# wait for the longest download:
|
|
200
331
|
t1.start()
|
|
201
332
|
t2.start()
|
|
202
333
|
t3.start()
|
|
334
|
+
slw = SimpleLoadingWheel(msg="Please wait... ")
|
|
203
335
|
while t1.is_alive() or t2.is_alive() or t3.is_alive():
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
336
|
+
slw.proceed()
|
|
337
|
+
slw.clear()
|
|
338
|
+
|
|
339
|
+
|
|
340
|
+
# check if errors where raised:
|
|
207
341
|
if type(results_dict['dbuni'])==int:
|
|
208
342
|
return 1
|
|
209
343
|
if type(results_dict['dbexp'])==int:
|
|
210
344
|
return 1
|
|
211
345
|
if type(results_dict['eschermap'])==int:
|
|
212
346
|
return 1
|
|
347
|
+
|
|
348
|
+
|
|
213
349
|
return (results_dict['dbuni'], results_dict['dbexp'], results_dict['eschermap'])
|
|
214
350
|
|
|
215
351
|
|
|
@@ -277,65 +413,4 @@ def check_taxon(logger, taxon, idcollection_dict):
|
|
|
277
413
|
return 1
|
|
278
414
|
|
|
279
415
|
|
|
280
|
-
"""
|
|
281
|
-
sorted(list(df.query("kingdom == 'Bacteria'")['phylum'].unique()))
|
|
282
|
-
['Acidobacteriota',
|
|
283
|
-
'Actinomycetota',
|
|
284
|
-
'Alphaproteobacteria',
|
|
285
|
-
'Aquificota',
|
|
286
|
-
'Armatimonadota',
|
|
287
|
-
'Atribacterota',
|
|
288
|
-
'Bacilli',
|
|
289
|
-
'Bacteria incertae sedis',
|
|
290
|
-
'Bacteroidota',
|
|
291
|
-
'Balneolota',
|
|
292
|
-
'Bdellovibrionota',
|
|
293
|
-
'Betaproteobacteria',
|
|
294
|
-
'Caldisericota',
|
|
295
|
-
'Calditrichota',
|
|
296
|
-
'Campylobacterota',
|
|
297
|
-
'Chlamydiota',
|
|
298
|
-
'Chlorobiota',
|
|
299
|
-
'Chloroflexota',
|
|
300
|
-
'Chrysiogenota',
|
|
301
|
-
'Cloacimonadota',
|
|
302
|
-
'Clostridia',
|
|
303
|
-
'Coprothermobacterota',
|
|
304
|
-
'Cyanobacteriota',
|
|
305
|
-
'Deferribacterota',
|
|
306
|
-
'Deinococcota',
|
|
307
|
-
'Deltaproteobacteria',
|
|
308
|
-
'Dictyoglomota',
|
|
309
|
-
'Elusimicrobiota',
|
|
310
|
-
'Enterobacteria',
|
|
311
|
-
'Fibrobacterota',
|
|
312
|
-
'Fidelibacterota',
|
|
313
|
-
'Fusobacteriota',
|
|
314
|
-
'Gemmatimonadota',
|
|
315
|
-
'Ignavibacteriota',
|
|
316
|
-
'Kiritimatiellota',
|
|
317
|
-
'Lentisphaerota',
|
|
318
|
-
'Melainabacteria',
|
|
319
|
-
'Mycoplasmatota',
|
|
320
|
-
'Myxococcota',
|
|
321
|
-
'Nitrospinota',
|
|
322
|
-
'Nitrospirota',
|
|
323
|
-
'Omnitrophota',
|
|
324
|
-
'Planctomycetota',
|
|
325
|
-
'Rhodothermota',
|
|
326
|
-
'Spirochaetota',
|
|
327
|
-
'Synergistota',
|
|
328
|
-
'Thermodesulfobacteriota',
|
|
329
|
-
'Thermodesulfobiota',
|
|
330
|
-
'Thermomicrobiota',
|
|
331
|
-
'Thermosulfidibacterota',
|
|
332
|
-
'Thermotogota',
|
|
333
|
-
'Verrucomicrobiota',
|
|
334
|
-
'Vulcanimicrobiota',
|
|
335
|
-
'other Bacillota',
|
|
336
|
-
'other Gammaproteobacteria',
|
|
337
|
-
'other Pseudomonadota',
|
|
338
|
-
'unclassified Bacteria']
|
|
339
|
-
"""
|
|
340
|
-
|
|
341
416
|
return 0
|
|
@@ -0,0 +1,112 @@
|
|
|
1
|
+
import warnings
|
|
2
|
+
import logging
|
|
3
|
+
import threading
|
|
4
|
+
|
|
5
|
+
import cobra
|
|
6
|
+
|
|
7
|
+
from .downloads import SimpleLoadingWheel
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def print_json_tree(data, level=0, max_level=2):
|
|
12
|
+
# explore contents of a json object
|
|
13
|
+
|
|
14
|
+
if level > max_level:
|
|
15
|
+
return
|
|
16
|
+
indent = ' ' * level
|
|
17
|
+
if isinstance(data, dict):
|
|
18
|
+
for key, value in data.items():
|
|
19
|
+
print(f"{indent}{key}")
|
|
20
|
+
print_tree(value, level + 1, max_level)
|
|
21
|
+
elif isinstance(data, list):
|
|
22
|
+
for i, item in enumerate(data):
|
|
23
|
+
print(f"{indent}[{i}]")
|
|
24
|
+
print_tree(item, level + 1, max_level)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def subset_for_focus(universe, rids_in_group, outdir, focus):
|
|
29
|
+
|
|
30
|
+
universe_focus = universe.copy()
|
|
31
|
+
to_remove = [r for r in universe_focus.reactions if r.id not in rids_in_group]
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
# trick to avoid the WARNING "cobra/core/group.py:147: UserWarning: need to pass in a list"
|
|
35
|
+
# triggered when trying to remove reactions that are included in groups.
|
|
36
|
+
with warnings.catch_warnings(): # temporarily suppress warnings for this block
|
|
37
|
+
warnings.simplefilter("ignore") # ignore all warnings
|
|
38
|
+
cobra_logger = logging.getLogger("cobra.util.solver")
|
|
39
|
+
old_level = cobra_logger.level
|
|
40
|
+
cobra_logger.setLevel(logging.ERROR)
|
|
41
|
+
|
|
42
|
+
universe_focus.remove_reactions(to_remove, remove_orphans=True)
|
|
43
|
+
|
|
44
|
+
# restore original behaviour:
|
|
45
|
+
cobra_logger.setLevel(old_level)
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
# save the subset for drawing in Escher!
|
|
49
|
+
cobra.io.save_json_model(universe_focus, f'{outdir}/focus_{focus}.json')
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def count_undrawn_rids_focus(logger, universe, lastmap, focus, outdir):
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
# there could be no tracked folder / no versions for this group
|
|
57
|
+
if lastmap == None:
|
|
58
|
+
return
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
# get modeled reads for this --focus:
|
|
62
|
+
rids_in_group = set()
|
|
63
|
+
try: gr = universe.groups.get_by_id(focus)
|
|
64
|
+
except:
|
|
65
|
+
logger.warning(f"Group '{focus}' not found!")
|
|
66
|
+
return
|
|
67
|
+
for r in gr.members:
|
|
68
|
+
rids_in_group.add(r.id)
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
# get rids on Escher:
|
|
72
|
+
drawn_rids = set()
|
|
73
|
+
for key, value in lastmap['json'][1]['reactions'].items():
|
|
74
|
+
drawn_rids.add(value['bigg_id'])
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
# get remaining rids for this map:
|
|
78
|
+
remainings = rids_in_group - drawn_rids
|
|
79
|
+
remainings_krs = set()
|
|
80
|
+
for rid in remainings:
|
|
81
|
+
r = universe.reactions.get_by_id(rid)
|
|
82
|
+
if 'kegg.reaction' in r.annotation.keys():
|
|
83
|
+
krs = r.annotation['kegg.reaction']
|
|
84
|
+
for kr in krs:
|
|
85
|
+
remainings_krs.add(kr)
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
if len(remainings) > 0:
|
|
89
|
+
if focus != 'gr_transport':
|
|
90
|
+
logger.warning(f"Current '{lastmap['filename']}' is {len(remainings)} reactions behind: {' '.join(list(remainings_krs))}.")
|
|
91
|
+
else:
|
|
92
|
+
logger.warning(f"Current '{lastmap['filename']}' is {len(remainings)} reactions behind.") # usually no kegg codes for tranport reactions
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
# subset the universe to ease the drawing:
|
|
96
|
+
next_version_filename = f"{focus}-v{(int(lastmap['filename'].rsplit('-v',1)[-1].replace('.json', ''))+1)}.json"
|
|
97
|
+
logger.warning(f"Writing model '{outdir}/focus_{focus}.json' to ease the drawing of '{next_version_filename}'...")
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
t1 = threading.Thread(target = subset_for_focus, args=(
|
|
101
|
+
universe, rids_in_group, outdir, focus))
|
|
102
|
+
t1.start()
|
|
103
|
+
slw = SimpleLoadingWheel(msg="Please wait... ")
|
|
104
|
+
while t1.is_alive():
|
|
105
|
+
slw.proceed()
|
|
106
|
+
slw.clear()
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
logger.warning(f"'{outdir}/focus_{focus}.json' created!")
|
|
110
|
+
else:
|
|
111
|
+
logger.info(f"Current '{lastmap['filename']}' is 0 reactions behind. Thank you ♥")
|
|
112
|
+
|
|
@@ -171,9 +171,15 @@ def write_excel_model(model, filepath, nofigs, memote_results_dict, df_E, df_B,
|
|
|
171
171
|
df_R = df_R[df_R_first_cols + sorted([c for c in df_R.columns if c not in df_R_first_cols])]
|
|
172
172
|
df_T = df_T[df_R_first_cols + sorted([c for c in df_T.columns if c not in df_R_first_cols])]
|
|
173
173
|
df_A = df_A[df_R_first_cols + sorted([c for c in df_A.columns if c not in df_R_first_cols])]
|
|
174
|
-
df_G_first_cols = ['gid', 'name', 'involved_in']
|
|
174
|
+
df_G_first_cols = ['gid', 'name', 'involved_in', 'kingdom']
|
|
175
175
|
df_G = df_G[df_G_first_cols + sorted([c for c in df_G.columns if c not in df_G_first_cols])]
|
|
176
176
|
|
|
177
|
+
# drop unused columns
|
|
178
|
+
df_M = df_M.drop(columns=["curator_codes", "curator_notes"])
|
|
179
|
+
df_R = df_R.drop(columns=["curator_codes", "curator_notes"])
|
|
180
|
+
df_T = df_T.drop(columns=["curator_codes", "curator_notes"])
|
|
181
|
+
df_G = df_G.drop(columns=["phylum"])
|
|
182
|
+
|
|
177
183
|
|
|
178
184
|
|
|
179
185
|
with pnd.ExcelWriter(filepath, engine='xlsxwriter') as writer:
|
|
@@ -54,30 +54,30 @@ def show_contributions(logger, db, goodbefore):
|
|
|
54
54
|
|
|
55
55
|
|
|
56
56
|
if goodbefore != [None, None, None]:
|
|
57
|
-
logger.debug(f"Contributions
|
|
57
|
+
logger.debug(f"Contributions counter disabled when using --goodbefore.")
|
|
58
58
|
return 0
|
|
59
59
|
|
|
60
60
|
|
|
61
61
|
# create a counter for each author
|
|
62
|
-
cnt = {author: 0 for author in db['
|
|
62
|
+
cnt = {author: 0 for author in db['curators']['username']}
|
|
63
63
|
cnt_tot = 0
|
|
64
64
|
|
|
65
65
|
|
|
66
66
|
for index, row in db['R'].iterrows():
|
|
67
|
-
if type(row['
|
|
68
|
-
logger.error(f"Missing
|
|
67
|
+
if type(row['curator']) != str:
|
|
68
|
+
logger.error(f"Missing curator in tab 'R', rid '{row['rid']}'.")
|
|
69
69
|
return 1
|
|
70
|
-
for author in row['
|
|
70
|
+
for author in row['curator'].split(';'):
|
|
71
71
|
author = author.rstrip().strip()
|
|
72
72
|
cnt[author] += 1
|
|
73
73
|
cnt_tot += 1
|
|
74
74
|
|
|
75
75
|
|
|
76
76
|
for index, row in db['T'].iterrows():
|
|
77
|
-
if type(row['
|
|
78
|
-
logger.error(f"Missing
|
|
77
|
+
if type(row['curator']) != str:
|
|
78
|
+
logger.error(f"Missing curator in tab 'T', rid '{row['rid']}'.")
|
|
79
79
|
return 1
|
|
80
|
-
for author in row['
|
|
80
|
+
for author in row['curator'].split(';'):
|
|
81
81
|
author = author.rstrip().strip()
|
|
82
82
|
cnt[author] += 1
|
|
83
83
|
cnt_tot += 1
|