seed2lp 2.0.0__py3-none-any.whl → 2.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
seed2lp/logger.py CHANGED
@@ -1,13 +1,14 @@
1
1
  import logging, logging.config
2
+ from logging import Logger
2
3
  from os import path
3
4
  from . import color, file
4
5
  import yaml
5
6
  from ._version import __version__
6
7
 
7
- ROJECT_DIR = path.dirname(path.abspath(__file__))
8
- LOG_DIR:str
9
- log:logging.Logger
10
- verbose:bool
8
+ PROJECT_DIR = path.dirname(path.abspath(__file__))
9
+ #LOG_DIR:str
10
+ #log:logging.Logger
11
+ #verbose:bool
11
12
 
12
13
  COLORS = {
13
14
  "WARNING": color.yellow,
@@ -36,34 +37,71 @@ class ColoredFormatter(logging.Formatter):
36
37
 
37
38
 
38
39
 
39
- def __init_logger__(log_path:str):
40
- """Init logger depending on log_path (and therefor run_mode)
40
+ # def __init_logger_2__(log_path:str, verbose:bool=False):
41
+ # """Init logger depending on log_path (and therefor run_mode)
42
+
43
+ # Args:
44
+ # log_path (str): Full path of logger file
45
+ # """
46
+ # global log
47
+ # #logging.config.fileConfig(path.join(ROJECT_DIR,'log_conf.yaml'))
48
+ # with open(path.join(ROJECT_DIR,'log_conf.yaml'), "rt") as f:
49
+ # config = yaml.safe_load(f.read())
50
+ # if verbose:
51
+ # config['handlers']['console']['level']='DEBUG'
52
+ # logging.config.dictConfig(config)
53
+ # formatter = logging.Formatter('%(levelname)s %(asctime)s: %(message)s')
54
+ # file_handler = logging.FileHandler(log_path)
55
+
56
+ # if verbose:
57
+ # file_handler.setLevel(logging.DEBUG)
58
+ # else:
59
+ # file_handler.setLevel(logging.INFO)
60
+ # file_handler.setFormatter(formatter)
61
+
62
+ # # create logger
63
+ # log = logging.getLogger('s2lp')
64
+ # log.addHandler(file_handler)
65
+
66
+
67
+ def init_logger(log_path: str, verbose: bool = False):
68
+ """Init logger depending on log_path (and therefor run_mode). Return a logger
69
+ needed for multiprocessing.
41
70
 
42
71
  Args:
43
72
  log_path (str): Full path of logger file
73
+ verbose (bool, optional): Uses verbose mode. Defaults to False.
74
+
75
+ Returns:
76
+ logging.Logger: logger
44
77
  """
45
- global log
46
- #logging.config.fileConfig(path.join(ROJECT_DIR,'log_conf.yaml'))
47
- with open(path.join(ROJECT_DIR,'log_conf.yaml'), "rt") as f:
48
- config = yaml.safe_load(f.read())
78
+ logger = logging.getLogger("s2lp")
79
+
80
+ if getattr(logger, "_configured", False):
81
+ return logger
82
+
83
+ # Console (YAML)
84
+ with open(path.join(PROJECT_DIR, "log_conf.yaml"), "rt") as f:
85
+ config = yaml.safe_load(f)
49
86
  if verbose:
50
- config['handlers']['console']['level']='DEBUG'
87
+ config["handlers"]["console"]["level"] = "DEBUG"
51
88
  logging.config.dictConfig(config)
52
- formatter = logging.Formatter('%(levelname)s %(asctime)s: %(message)s')
53
- file_handler = logging.FileHandler(log_path)
54
89
 
55
- if verbose:
56
- file_handler.setLevel(logging.DEBUG)
57
- else:
58
- file_handler.setLevel(logging.INFO)
90
+ # Fichier (par process)
91
+ file_handler = logging.FileHandler(log_path)
92
+ file_handler.setLevel(logging.DEBUG if verbose else logging.INFO)
93
+ formatter = logging.Formatter(
94
+ "%(levelname)s %(asctime)s [%(processName)s]: %(message)s"
95
+ )
59
96
  file_handler.setFormatter(formatter)
60
97
 
61
- # create logger
62
- log = logging.getLogger('s2lp')
63
- log.addHandler(file_handler)
98
+ logger.addHandler(file_handler)
99
+
100
+ logger._configured = True
101
+ return logger
64
102
 
65
103
 
66
- def print_log(message:str, level:str, col=None):
104
+ def print_log(logger: logging.Logger, message:str, level:str, col=None, verbose:bool=False):
67
105
  """Print and log messages
68
106
 
69
107
  Args:
@@ -81,32 +119,38 @@ def print_log(message:str, level:str, col=None):
81
119
  match level:
82
120
  case "info":
83
121
  if col:
84
- log.info(message, extra={"color": col})
122
+ logger.info(message, extra={"color": col})
85
123
  else:
86
- log.info(message)
124
+ logger.info(message)
87
125
  case "debug":
88
- log.debug(message)
126
+ logger.debug(message)
89
127
  case "warning":
90
- log.warning(message)
128
+ logger.warning(message)
91
129
  case "error":
92
- log.error(message)
130
+ logger.error(message)
93
131
 
94
132
 
95
133
 
96
- def get_logger(sbml_file:str, short_option:str, debug:bool=False):
97
- global log
98
- global verbose
99
- verbose=debug
100
- net_name = f'{path.splitext(path.basename(sbml_file))[0]}'
101
- filename = f'{net_name}_{short_option}.log'
102
- log_path = path.join(LOG_DIR, filename)
134
+ # def get_logger(sbml_file:str, short_option:str, verbose:bool=False) -> logging.Logger:
135
+ # net_name = f'{path.splitext(path.basename(sbml_file))[0]}'
136
+ # filename = f'{net_name}_{short_option}.log'
137
+ # log_path = path.join(LOG_DIR, filename)
138
+ # if file.existing_file(log_path):
139
+ # file.delete(log_path)
140
+ # logger=__init_logger__(log_path, verbose)
141
+ # logger.info(f"Seed2LP version: {__version__}")
142
+ # return logger
143
+
144
+ def get_logger(log_dir:str, sbml_file: str, short_option: str, verbose: bool = False):
145
+ net_name = path.splitext(path.basename(sbml_file))[0]
146
+ filename = f"{net_name}_{short_option}.log"
147
+ log_path = path.join(log_dir, filename)
148
+
103
149
  if file.existing_file(log_path):
104
150
  file.delete(log_path)
105
- __init_logger__(log_path)
106
- log.info(f"Seed2LP version: {__version__}")
151
+
152
+ logger = init_logger(log_path, verbose)
153
+ logger.info("Seed2LP version: %s", __version__)
154
+ return logger, log_path
107
155
 
108
156
 
109
- def set_log_dir(value):
110
- global LOG_DIR
111
- LOG_DIR = value
112
-
seed2lp/network.py CHANGED
@@ -13,7 +13,7 @@
13
13
  # - facts (str): Conversion sbml into asp facts
14
14
  # - fluxes (list): List of flux check on all set of seeds
15
15
 
16
- import os
16
+ import os, logging
17
17
  import pandas as pd
18
18
  from .reaction import Reaction
19
19
  import seed2lp.sbml as SBML
@@ -22,7 +22,7 @@ from . import flux
22
22
  from .resmod import Resmod
23
23
  from time import time
24
24
  from . import color
25
- from . import logger
25
+ from .logger import print_log
26
26
  from .file import existant_path
27
27
  import xml.etree.ElementTree as ET
28
28
  import copy
@@ -68,7 +68,8 @@ class NET_TITLE:
68
68
  ###################################################################
69
69
  class NetBase:
70
70
  def __init__(self, targets_as_seeds:bool=False, use_topological_injections:bool=False,
71
- keep_import_reactions:bool=True, accumulation:bool=False, equality_flux:bool=False):
71
+ keep_import_reactions:bool=True, accumulation:bool=False, equality_flux:bool=False,
72
+ verbose:bool=False):
72
73
  """Initialize Object NetBase
73
74
 
74
75
  Args:
@@ -77,6 +78,8 @@ class NetBase:
77
78
  keep_import_reactions (bool): Import reactions are not removed
78
79
  accumulation (bool, optional): Is accumulation authorized. Defaults to False.
79
80
  """
81
+ self.logger = logging.getLogger("s2lp")
82
+
80
83
  self.targets_as_seeds = targets_as_seeds
81
84
  self.use_topological_injections = use_topological_injections
82
85
  self.keep_import_reactions = keep_import_reactions
@@ -122,6 +125,7 @@ class NetBase:
122
125
  self.used_meta = dict()
123
126
 
124
127
  self.equality_flux=equality_flux
128
+ self.verbose=verbose
125
129
 
126
130
  ######################## GETTER ########################
127
131
  def _get_reactions(self):
@@ -154,7 +158,7 @@ class NetBase:
154
158
  def get_objective_reactant(self, ojective_name:str, species:str):
155
159
  """Get the objective reactants from SBML file
156
160
  """
157
- logger.log.info("Finding list of reactants from opbjective reaction...")
161
+ self.logger.info("Finding list of reactants from opbjective reaction...")
158
162
  reactants = SBML.get_listOfReactants_from_name(self.model[species], ojective_name)
159
163
  for reactant in reactants:
160
164
  react_name = prefixed_name = reactant.attrib.get('species')
@@ -164,7 +168,7 @@ class NetBase:
164
168
  self.targets[react_name] = [prefixed_name]
165
169
  else:
166
170
  self.targets[react_name].append(prefixed_name)
167
- logger.log.info("... DONE")
171
+ self.logger.info("... DONE")
168
172
 
169
173
 
170
174
  def get_boundaries(self, reaction, species:str):
@@ -223,7 +227,7 @@ class NetBase:
223
227
  self.is_objective_error = True
224
228
  raise ValueError(f"No objective reaction found or none has coefficient 1\n")
225
229
  else:
226
- logger.print_log(f'Objective found for {species}: {color.bold}{obj_found_name}{color.reset}', "info")
230
+ print_log(self.logger, f'Objective found for {species}: {color.bold}{obj_found_name}{color.reset}', "info", verbose=self.verbose)
227
231
  if lbound == 0 and ubound == 0:
228
232
  self.is_objective_error = True
229
233
  raise ValueError(f"Lower and upper boundaries are [0,0] \nfor objetive reaction {obj_found_name}\n")
@@ -390,31 +394,6 @@ class NetBase:
390
394
  self._set_reactions(reactions_list)
391
395
 
392
396
 
393
- # def prefix_id_network(self, name:str, species:str="", type_element:str=""):
394
- # """Prefix Reaction or Metbolite by the network name (filename) if the tool is used for community.
395
- # For single network, nothing is prefixed.
396
-
397
- # Args:
398
- # name (str): ID of the element
399
- # species (str, optional): Network name (from filename). Defaults to "".
400
- # type_element: (str, optional): "reaction" or "metabolite" or no type. Defaults to "".
401
-
402
- # Returns:
403
- # str: The name prfixed by the network if needed
404
- # """
405
- # match self.is_community, type_element:
406
- # case True,"reaction":
407
- # return sub("^R_", f"R_{species}_",name)
408
- # case True,"metabolite":
409
- # return sub("^M_", f"M_{species}_",name)
410
- # case True,"metaid":
411
- # return sub("^meta_R_", f"meta_R_{species}_",name)
412
- # case True,_:
413
- # return f"{species}_{name}"
414
- # case _,_:
415
- # return name
416
-
417
-
418
397
  def get_network(self, species:str, to_print:bool=True,
419
398
  write_sbml:bool=False):
420
399
  """Get the description of the Network from SBML file
@@ -585,12 +564,12 @@ class NetBase:
585
564
  if warning_message :
586
565
  if not self.is_community or ( self.is_community and warning_message != species):
587
566
  warning_message += "\n"
588
- logger.log.warning(warning_message)
567
+ self.logger.warning(warning_message)
589
568
  if info_message:
590
- logger.log.info(info_message)
569
+ self.logger.info(info_message)
591
570
  else:
592
- logger.log.info(warning_message)
593
- logger.log.info(info_message)
571
+ self.logger.info(warning_message)
572
+ self.logger.info(info_message)
594
573
  print("____________________________________________\n")
595
574
 
596
575
 
@@ -632,7 +611,7 @@ class NetBase:
632
611
  def convert_to_facts(self):
633
612
  """Convert the corrected Network into ASP facts
634
613
  """
635
- logger.log.info("Converting Network into ASP facts ...")
614
+ self.logger.info("Converting Network into ASP facts ...")
636
615
  facts = ""
637
616
  # Upper bound does not change on forward reaction
638
617
 
@@ -654,7 +633,7 @@ class NetBase:
654
633
  facts += f'\np_seed({quoted(possible)}).'
655
634
 
656
635
  self.facts = facts
657
- logger.log.info("... DONE")
636
+ self.logger.info("... DONE")
658
637
 
659
638
 
660
639
  def simplify(self):
@@ -832,11 +811,12 @@ class NetBase:
832
811
  'has_flux_demands', 'timer'])
833
812
  fluxes = fluxes.astype(dtypes)
834
813
 
814
+
835
815
  if self.objectives_reaction_name:
836
816
  if self.result_seeds:
837
- logger.log.info("Check fluxes Starting")
817
+ self.logger.info("Check fluxes Starting")
838
818
  model = flux.get_model(self.file)
839
- fluxes_init = flux.get_init(model, self.objectives_reaction_name)
819
+ fluxes_init = flux.get_init(model, self.objectives_reaction_name, self.logger)
840
820
  if not self.keep_import_reactions:
841
821
  fluxes_no_import = flux.stop_flux(model, self.objectives_reaction_name)
842
822
  self.model[self.name] = model
@@ -846,7 +826,7 @@ class NetBase:
846
826
  print(color.purple+"____________________________________________")
847
827
  print("____________________________________________\n"+color.reset)
848
828
 
849
- logger.log.warning("Processing in parallel. " \
829
+ self.logger.warning("Processing in parallel. " \
850
830
  "\nNo outputs will be shown. " \
851
831
  "\nPlease wait ...\n")
852
832
 
@@ -889,7 +869,7 @@ class NetBase:
889
869
  # Check if the search mode has changed
890
870
  if prev_search_mode == None or result.search_mode != prev_search_mode:
891
871
  if has_warning:
892
- logger.log.warning(WARNING_MESSAGE_LP_COBRA)
872
+ self.logger.warning(WARNING_MESSAGE_LP_COBRA)
893
873
  prev_search_mode = search_mode
894
874
  prev_solver_type = None
895
875
  if prev_solver_type != solver_type:
@@ -901,14 +881,14 @@ class NetBase:
901
881
  fluxes = pd.concat([fluxes, result_flux], ignore_index=True)
902
882
 
903
883
  if has_warning:
904
- logger.log.warning(WARNING_MESSAGE_LP_COBRA)
884
+ self.logger.warning(WARNING_MESSAGE_LP_COBRA)
905
885
  else:
906
886
  for result in self.result_seeds :
907
887
 
908
888
  if prev_search_mode == None or result.search_mode != prev_search_mode:
909
889
  if has_warning:
910
890
  print("\n")
911
- logger.log.warning(WARNING_MESSAGE_LP_COBRA)
891
+ self.logger.warning(WARNING_MESSAGE_LP_COBRA)
912
892
  print(color.yellow+"\n____________________________________________")
913
893
  print("____________________________________________\n"+color.reset)
914
894
  print(result.search_mode.center(44))
@@ -954,7 +934,7 @@ class NetBase:
954
934
 
955
935
  if has_warning:
956
936
  print("\n")
957
- logger.log.warning(WARNING_MESSAGE_LP_COBRA)
937
+ self.logger.warning(WARNING_MESSAGE_LP_COBRA)
958
938
  print(color.yellow+"\n____________________________________________\n"+color.reset)
959
939
 
960
940
  else:
@@ -970,7 +950,7 @@ class NetBase:
970
950
  Args:
971
951
  data (dict): Json data from previous seed2lp result file
972
952
  """
973
- logger.log.info("Converting data from result file ...")
953
+ self.logger.info("Converting data from result file ...")
974
954
  reaction_option = data["OPTIONS"]["REACTION"]
975
955
  match reaction_option:
976
956
  case "Remove Import Reaction":
@@ -988,7 +968,13 @@ class NetBase:
988
968
  else:
989
969
  self.accumulation = False
990
970
 
991
- self.objectives_reaction_name = data["NETWORK"]["OBJECTIVE"]
971
+
972
+ if self.is_community:
973
+ self.objectives_reaction_name = [item[1] for item in data["NETWORK"]["OBJECTIVE"]]
974
+
975
+ else:
976
+ self.objectives_reaction_name = [data["NETWORK"]["OBJECTIVE"][0][1]]
977
+
992
978
 
993
979
  if data["NETWORK"]["SEARCH_MODE"] in NET_TITLE.CONVERT_TITLE_MODE:
994
980
  self.run_mode = NET_TITLE.CONVERT_TITLE_MODE[data["NETWORK"]["SEARCH_MODE"]]
@@ -1037,7 +1023,7 @@ class NetBase:
1037
1023
  transferred_list = None
1038
1024
  self.add_result_seeds(solver_type_transmetted, search_info, name, size, seeds_list,
1039
1025
  obj_flux_lp, transferred_list=transferred_list)
1040
- logger.log.info("... DONE")
1026
+ self.logger.info("... DONE")
1041
1027
  return maximize, solve
1042
1028
 
1043
1029
 
@@ -1056,26 +1042,26 @@ class NetBase:
1056
1042
  print(f"TARGETS".center(44))
1057
1043
  print(f"FOR TARGET MODE AND FBA".center(44))
1058
1044
  print("____________________________________________\n")
1059
- logger.print_log(tgt_message, "info")
1045
+ print_log(self.logger, tgt_message, "info", verbose=self.verbose)
1060
1046
 
1061
1047
  print("\n____________________________________________\n")
1062
1048
  print(f"OBJECTVE".center(44))
1063
1049
  print(f"FOR HYBRID".center(44))
1064
1050
  print("____________________________________________\n")
1065
- logger.print_log(obj_message, "info")
1051
+ print_log(self.logger, obj_message, "info", verbose=self.verbose)
1066
1052
  print("\n")
1067
1053
 
1068
1054
 
1069
1055
  print("\n____________________________________________\n")
1070
1056
  print(f"NETWORK".center(44))
1071
1057
  print("____________________________________________\n")
1072
- logger.print_log(net_mess[0], "info")
1058
+ print_log(self.logger, net_mess[0], "info", verbose=self.verbose)
1073
1059
  if self.keep_import_reactions:
1074
- logger.print_log(net_mess[1], "info")
1060
+ print_log(self.logger, net_mess[1], "info", verbose=self.verbose)
1075
1061
  if self.run_mode != "full":
1076
- logger.print_log(net_mess[2], "info")
1062
+ print_log(self.logger, net_mess[2], "info", verbose=self.verbose)
1077
1063
  if self.run_mode != "fba":
1078
- logger.print_log(net_mess[3], "info")
1064
+ print_log(self.logger, net_mess[3], "info", verbose=self.verbose)
1079
1065
  print("\n")
1080
1066
 
1081
1067
 
@@ -1090,7 +1076,7 @@ class NetBase:
1090
1076
  bool: Return if the objective reaction has flux (True) or not (False)
1091
1077
  """
1092
1078
  model = flux.get_model(self.file)
1093
- flux.get_init(model, self.objectives_reaction_name, False)
1079
+ flux.get_init(model, self.objectives_reaction_name, self.logger, False)
1094
1080
  flux.stop_flux(model, self.objectives_reaction_name, False)
1095
1081
 
1096
1082
  result = Resmod(None, self.objectives_reaction_name,
@@ -1342,7 +1328,7 @@ class NetBase:
1342
1328
  class Network(NetBase):
1343
1329
  def __init__(self, file:str, run_mode:str=None, targets_as_seeds:bool=False, use_topological_injections:bool=False,
1344
1330
  keep_import_reactions:bool=True, input_dict:dict=None, accumulation:bool=False, to_print:bool=True,
1345
- write_sbml:bool=False):
1331
+ write_sbml:bool=False, verbose:bool=False):
1346
1332
  """Initialize Object Network
1347
1333
 
1348
1334
  Args:
@@ -1358,7 +1344,7 @@ class Network(NetBase):
1358
1344
  """
1359
1345
 
1360
1346
  super().__init__(targets_as_seeds, use_topological_injections,
1361
- keep_import_reactions, accumulation)
1347
+ keep_import_reactions, accumulation, verbose=verbose)
1362
1348
  self.file = file
1363
1349
  self.run_mode = run_mode
1364
1350
  self.file_extension = ""
@@ -1373,11 +1359,12 @@ class Network(NetBase):
1373
1359
 
1374
1360
  # Instatiate objectives from target file if given by user
1375
1361
  is_user_objective = self.check_objectives(input_dict)
1362
+
1376
1363
  # Find objectives on sbml file is not given
1377
1364
  is_reactant_found=False
1378
1365
  is_objective_error=False
1379
1366
 
1380
- logger.print_log("\nFinding objective ...", "info")
1367
+ print_log(self.logger, "\nFinding objective ...", "info")
1381
1368
  if self.objectives is None or not self.objectives:
1382
1369
  try:
1383
1370
  is_reactant_found = self.find_objectives(input_dict, self.name)
@@ -1385,19 +1372,19 @@ class Network(NetBase):
1385
1372
  # self.objectives_reaction_name.append(obj[1])
1386
1373
  except ValueError as e:
1387
1374
  is_objective_error = True
1388
- logger.log.error(str(e))
1375
+ self.logger.error(str(e))
1389
1376
  # Init networks with data given by user and objective reaction
1390
1377
  # write messages
1391
1378
  if self.run_mode is not None:
1392
1379
  # write console messages
1393
1380
  self.init_with_inputs(input_dict, is_reactant_found, is_objective_error, is_user_objective)
1394
1381
 
1395
- logger.print_log("Network normalisation in progress...", "info")
1396
- logger.print_log("Can take several minutes", "info")
1382
+ print_log(self.logger, "Network normalisation in progress...", "info", verbose=self.verbose)
1383
+ print_log(self.logger, "Can take several minutes", "info", verbose=self.verbose)
1397
1384
  normalisation_time = time()
1398
1385
  self.get_network(self.name, to_print, write_sbml)
1399
1386
  normalisation_time = time() - normalisation_time
1400
- logger.print_log(f"Normalisation total time: {round(normalisation_time, 3)}s", "info")
1387
+ print_log(self.logger, f"Normalisation total time: {round(normalisation_time, 3)}s", "info", verbose=self.verbose)
1401
1388
 
1402
1389
 
1403
1390
 
@@ -1410,7 +1397,7 @@ class Netcom(NetBase):
1410
1397
  def __init__(self, comfile:str, sbmldir:str, temp_dir:str, run_mode:str=None, run_solve:str=None, community_mode:str=None,
1411
1398
  targets_as_seeds:bool=False, use_topological_injections:bool=False, keep_import_reactions:bool=True,
1412
1399
  input_dict:dict=None, accumulation:bool=False, to_print:bool=True,
1413
- write_sbml:bool=False, equality_flux:bool=False):
1400
+ write_sbml:bool=False, equality_flux:bool=False, verbose:bool=False):
1414
1401
  """Initialise Object Netcom
1415
1402
 
1416
1403
  Args:
@@ -1427,7 +1414,7 @@ class Netcom(NetBase):
1427
1414
  write_sbml (bool, optional): Is a writing SBML file mode or not. Defaults to False.
1428
1415
  """
1429
1416
  super().__init__(targets_as_seeds, use_topological_injections,
1430
- keep_import_reactions, accumulation, equality_flux)
1417
+ keep_import_reactions, accumulation, equality_flux, verbose=verbose)
1431
1418
  self.name=""
1432
1419
  self.comfile = comfile
1433
1420
  self.sbml_dir = sbmldir
@@ -1458,14 +1445,14 @@ class Netcom(NetBase):
1458
1445
  # Find objectives on sbml file is not given
1459
1446
  is_reactant_found=False
1460
1447
  is_objective_error=False
1461
- logger.print_log("\n Finding objectives of community ...", "info")
1448
+ print_log(self.logger, "\n Finding objectives of community ...", "info", verbose=self.verbose)
1462
1449
  if self.objectives is None or not self.objectives:
1463
1450
  for species in self.species:
1464
1451
  try:
1465
1452
  is_reactant_found = self.find_objectives(input_dict, species)
1466
1453
  except ValueError as e:
1467
1454
  is_objective_error = True
1468
- logger.log.error(str(e))
1455
+ self.logger.error(str(e))
1469
1456
 
1470
1457
  # Init networks with data given by user and objective reaction
1471
1458
  # write messages
@@ -1473,14 +1460,14 @@ class Netcom(NetBase):
1473
1460
  # write console messages
1474
1461
  self.init_with_inputs(input_dict, is_reactant_found, is_objective_error, is_user_objective)
1475
1462
 
1476
- logger.print_log("Network normalisation in progress ...", "info")
1463
+ print_log(self.logger, "Network normalisation in progress ...", "info", verbose=self.verbose)
1477
1464
  normalisation_time = time()
1478
1465
  for species in self.species:
1479
1466
  self.get_network(species, to_print, write_sbml)
1480
1467
 
1481
1468
  self.write_merge_sbml_file()
1482
1469
  normalisation_time = time() - normalisation_time
1483
- logger.print_log(f"Normalisation total time: {round(normalisation_time, 3)}s", "info")
1470
+ print_log(self.logger, f"Normalisation total time: {round(normalisation_time, 3)}s", "info", verbose=self.verbose)
1484
1471
 
1485
1472
 
1486
1473
 
@@ -1518,7 +1505,7 @@ class Netcom(NetBase):
1518
1505
  existant_path(sbml_file)
1519
1506
  self.files.append(sbml_file)
1520
1507
  except FileNotFoundError as e :
1521
- logger.log.error(str(e))
1508
+ self.logger.error(str(e))
1522
1509
  exit(1)
1523
1510
  self.sbml[species], self.sbml_first_line, self.default_namespace = SBML.get_root(sbml_file)
1524
1511
  self.fbc[species] = SBML.get_fbc(self.sbml[species])
seed2lp/reaction.py CHANGED
@@ -6,8 +6,9 @@
6
6
  # - Reactants (list): List of reactants (object Metabolite)
7
7
  # - Products (list): List of list of products (object Metabolite)
8
8
 
9
+ import logging
9
10
  from seed2lp.metabolite import Metabolite
10
- from . import logger
11
+ #from . import logger
11
12
 
12
13
  class Reaction:
13
14
  def __init__(self, name:str, reversible:bool=False, lbound:float=None, ubound:float=None,
@@ -32,6 +33,7 @@ class Reaction:
32
33
  self.is_reversible_modified = False
33
34
  self.species = species
34
35
  self.has_rm_prefix = False
36
+ self.logger = logging.getLogger("s2lp")
35
37
 
36
38
 
37
39
  ######################## SETTER ########################
@@ -186,7 +188,7 @@ class Reaction:
186
188
  # a trace of bondaries into asp fact, the rm_ prefix is needed for the reverse of the remaning reactions
187
189
  if not keep_import_reactions and (is_import_reaction or (self.has_rm_prefix and is_reversed)):
188
190
  prefix = "rm_"
189
- logger.log.info(f"Reaction {self.name} artificially removed into lp facts with a prefix 'rm_'")
191
+ self.logger.info(f"Reaction {self.name} artificially removed into lp facts with a prefix 'rm_'")
190
192
 
191
193
  facts += f'{prefix}reaction("{name}").\n'
192
194
  facts += f'{prefix}bounds("{name}","{"{:.10f}".format(lbound)}","{"{:.10f}".format(ubound)}").\n'