risk-network 0.0.7b9__tar.gz → 0.0.7b11__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {risk_network-0.0.7b9 → risk_network-0.0.7b11}/PKG-INFO +1 -1
- {risk_network-0.0.7b9 → risk_network-0.0.7b11}/risk/__init__.py +1 -1
- {risk_network-0.0.7b9 → risk_network-0.0.7b11}/risk/annotations/io.py +4 -4
- {risk_network-0.0.7b9 → risk_network-0.0.7b11}/risk/log/__init__.py +1 -1
- risk_network-0.0.7b11/risk/log/config.py +48 -0
- {risk_network-0.0.7b9 → risk_network-0.0.7b11}/risk/log/params.py +4 -4
- {risk_network-0.0.7b9 → risk_network-0.0.7b11}/risk/neighborhoods/domains.py +6 -3
- {risk_network-0.0.7b9 → risk_network-0.0.7b11}/risk/neighborhoods/neighborhoods.py +3 -2
- {risk_network-0.0.7b9 → risk_network-0.0.7b11}/risk/network/io.py +22 -26
- {risk_network-0.0.7b9 → risk_network-0.0.7b11}/risk/risk.py +42 -26
- {risk_network-0.0.7b9 → risk_network-0.0.7b11}/risk/stats/permutation/test_functions.py +2 -2
- {risk_network-0.0.7b9 → risk_network-0.0.7b11}/risk_network.egg-info/PKG-INFO +1 -1
- {risk_network-0.0.7b9 → risk_network-0.0.7b11}/risk_network.egg-info/SOURCES.txt +1 -1
- risk_network-0.0.7b9/risk/log/console.py +0 -16
- {risk_network-0.0.7b9 → risk_network-0.0.7b11}/LICENSE +0 -0
- {risk_network-0.0.7b9 → risk_network-0.0.7b11}/MANIFEST.in +0 -0
- {risk_network-0.0.7b9 → risk_network-0.0.7b11}/README.md +0 -0
- {risk_network-0.0.7b9 → risk_network-0.0.7b11}/pyproject.toml +0 -0
- {risk_network-0.0.7b9 → risk_network-0.0.7b11}/risk/annotations/__init__.py +0 -0
- {risk_network-0.0.7b9 → risk_network-0.0.7b11}/risk/annotations/annotations.py +0 -0
- {risk_network-0.0.7b9 → risk_network-0.0.7b11}/risk/constants.py +0 -0
- {risk_network-0.0.7b9 → risk_network-0.0.7b11}/risk/neighborhoods/__init__.py +0 -0
- {risk_network-0.0.7b9 → risk_network-0.0.7b11}/risk/neighborhoods/community.py +0 -0
- {risk_network-0.0.7b9 → risk_network-0.0.7b11}/risk/network/__init__.py +0 -0
- {risk_network-0.0.7b9 → risk_network-0.0.7b11}/risk/network/geometry.py +0 -0
- {risk_network-0.0.7b9 → risk_network-0.0.7b11}/risk/network/graph.py +0 -0
- {risk_network-0.0.7b9 → risk_network-0.0.7b11}/risk/network/plot.py +0 -0
- {risk_network-0.0.7b9 → risk_network-0.0.7b11}/risk/stats/__init__.py +0 -0
- {risk_network-0.0.7b9 → risk_network-0.0.7b11}/risk/stats/hypergeom.py +0 -0
- {risk_network-0.0.7b9 → risk_network-0.0.7b11}/risk/stats/permutation/__init__.py +0 -0
- {risk_network-0.0.7b9 → risk_network-0.0.7b11}/risk/stats/permutation/permutation.py +0 -0
- {risk_network-0.0.7b9 → risk_network-0.0.7b11}/risk/stats/poisson.py +0 -0
- {risk_network-0.0.7b9 → risk_network-0.0.7b11}/risk/stats/stats.py +0 -0
- {risk_network-0.0.7b9 → risk_network-0.0.7b11}/risk_network.egg-info/dependency_links.txt +0 -0
- {risk_network-0.0.7b9 → risk_network-0.0.7b11}/risk_network.egg-info/requires.txt +0 -0
- {risk_network-0.0.7b9 → risk_network-0.0.7b11}/risk_network.egg-info/top_level.txt +0 -0
- {risk_network-0.0.7b9 → risk_network-0.0.7b11}/setup.cfg +0 -0
- {risk_network-0.0.7b9 → risk_network-0.0.7b11}/setup.py +0 -0
@@ -12,7 +12,7 @@ import networkx as nx
|
|
12
12
|
import pandas as pd
|
13
13
|
|
14
14
|
from risk.annotations.annotations import load_annotations
|
15
|
-
from risk.log import params,
|
15
|
+
from risk.log import params, logger, log_header
|
16
16
|
|
17
17
|
|
18
18
|
class AnnotationsIO:
|
@@ -218,7 +218,7 @@ def _log_loading(filetype: str, filepath: str = "") -> None:
|
|
218
218
|
filetype (str): The type of the file being loaded (e.g., 'Cytoscape').
|
219
219
|
filepath (str, optional): The path to the file being loaded.
|
220
220
|
"""
|
221
|
-
|
222
|
-
|
221
|
+
log_header("Loading annotations")
|
222
|
+
logger.info(f"Filetype: {filetype}")
|
223
223
|
if filepath:
|
224
|
-
|
224
|
+
logger.info(f"Filepath: {filepath}")
|
@@ -0,0 +1,48 @@
|
|
1
|
+
"""
|
2
|
+
risk/log/config
|
3
|
+
~~~~~~~~~~~~~~~
|
4
|
+
"""
|
5
|
+
|
6
|
+
import logging
|
7
|
+
|
8
|
+
# Create and configure the global logger
|
9
|
+
logger = logging.getLogger("risk_logger")
|
10
|
+
logger.setLevel(logging.INFO)
|
11
|
+
# Create and configure the console handler
|
12
|
+
console_handler = logging.StreamHandler()
|
13
|
+
console_handler.setLevel(logging.INFO)
|
14
|
+
# Set the output format for the logger
|
15
|
+
formatter = logging.Formatter("%(message)s")
|
16
|
+
console_handler.setFormatter(formatter)
|
17
|
+
# Add the console handler to the logger if not already attached
|
18
|
+
if not logger.hasHandlers():
|
19
|
+
logger.addHandler(console_handler)
|
20
|
+
|
21
|
+
|
22
|
+
def set_global_verbosity(verbose):
|
23
|
+
"""Set the global verbosity level for the logger.
|
24
|
+
|
25
|
+
Args:
|
26
|
+
verbose (bool): Whether to display all log messages (True) or only error messages (False).
|
27
|
+
|
28
|
+
Returns:
|
29
|
+
None
|
30
|
+
"""
|
31
|
+
if verbose:
|
32
|
+
logger.setLevel(logging.INFO) # Show all messages
|
33
|
+
console_handler.setLevel(logging.INFO)
|
34
|
+
else:
|
35
|
+
logger.setLevel(logging.ERROR) # Show only error messages
|
36
|
+
console_handler.setLevel(logging.ERROR)
|
37
|
+
|
38
|
+
|
39
|
+
def log_header(input_string: str) -> None:
|
40
|
+
"""Log the input string as a header with a line of dashes above and below it.
|
41
|
+
|
42
|
+
Args:
|
43
|
+
input_string (str): The string to be printed as a header.
|
44
|
+
"""
|
45
|
+
border = "-" * len(input_string)
|
46
|
+
logger.info(border)
|
47
|
+
logger.info(input_string)
|
48
|
+
logger.info(border)
|
@@ -12,7 +12,7 @@ from typing import Any, Dict
|
|
12
12
|
|
13
13
|
import numpy as np
|
14
14
|
|
15
|
-
from .
|
15
|
+
from .config import logger, log_header
|
16
16
|
|
17
17
|
# Suppress all warnings - this is to resolve warnings from multiprocessing
|
18
18
|
warnings.filterwarnings("ignore")
|
@@ -35,11 +35,11 @@ def _safe_param_export(func):
|
|
35
35
|
filepath = (
|
36
36
|
kwargs.get("filepath") or args[1]
|
37
37
|
) # Assuming filepath is always the second argument
|
38
|
-
|
38
|
+
logger.info(f"Parameters successfully exported to filepath: {filepath}")
|
39
39
|
return result
|
40
40
|
except Exception as e:
|
41
41
|
filepath = kwargs.get("filepath") or args[1]
|
42
|
-
|
42
|
+
logger.error(f"An error occurred while exporting parameters to {filepath}: {e}")
|
43
43
|
return None
|
44
44
|
|
45
45
|
return wrapper
|
@@ -161,7 +161,7 @@ class Params:
|
|
161
161
|
Returns:
|
162
162
|
dict: A dictionary containing the processed parameters.
|
163
163
|
"""
|
164
|
-
|
164
|
+
log_header("Loading parameters")
|
165
165
|
return _convert_ndarray_to_list(
|
166
166
|
{
|
167
167
|
"annotations": self.annotations,
|
@@ -15,6 +15,7 @@ from sklearn.metrics import silhouette_score
|
|
15
15
|
|
16
16
|
from risk.annotations import get_description
|
17
17
|
from risk.constants import GROUP_LINKAGE_METHODS, GROUP_DISTANCE_METRICS
|
18
|
+
from risk.log import logger
|
18
19
|
|
19
20
|
|
20
21
|
def define_domains(
|
@@ -45,10 +46,10 @@ def define_domains(
|
|
45
46
|
)
|
46
47
|
# Perform hierarchical clustering
|
47
48
|
Z = linkage(m, method=best_linkage, metric=best_metric)
|
48
|
-
|
49
|
+
logger.info(
|
49
50
|
f"Linkage criterion: '{linkage_criterion}'\nLinkage method: '{best_linkage}'\nLinkage metric: '{best_metric}'"
|
50
51
|
)
|
51
|
-
|
52
|
+
logger.info(f"Optimal linkage threshold: {round(best_threshold, 3)}")
|
52
53
|
# Calculate the optimal threshold for clustering
|
53
54
|
max_d_optimal = np.max(Z[:, 2]) * best_threshold
|
54
55
|
# Assign domains to the annotations matrix
|
@@ -58,7 +59,9 @@ def define_domains(
|
|
58
59
|
except ValueError:
|
59
60
|
# If a ValueError is encountered, handle it by assigning unique domains
|
60
61
|
n_rows = len(top_annotations)
|
61
|
-
|
62
|
+
logger.error(
|
63
|
+
f"Error encountered. Skipping clustering and assigning {n_rows} unique domains."
|
64
|
+
)
|
62
65
|
top_annotations["domain"] = range(1, n_rows + 1) # Assign unique domains
|
63
66
|
|
64
67
|
# Create DataFrames to store domain information
|
@@ -20,6 +20,7 @@ from risk.neighborhoods.community import (
|
|
20
20
|
calculate_spinglass_neighborhoods,
|
21
21
|
calculate_walktrap_neighborhoods,
|
22
22
|
)
|
23
|
+
from risk.log import logger
|
23
24
|
|
24
25
|
# Suppress DataConversionWarning
|
25
26
|
warnings.filterwarnings(action="ignore", category=DataConversionWarning)
|
@@ -129,7 +130,7 @@ def process_neighborhoods(
|
|
129
130
|
enrichment_matrix = neighborhoods["enrichment_matrix"]
|
130
131
|
binary_enrichment_matrix = neighborhoods["binary_enrichment_matrix"]
|
131
132
|
significant_enrichment_matrix = neighborhoods["significant_enrichment_matrix"]
|
132
|
-
|
133
|
+
logger.info(f"Imputation depth: {impute_depth}")
|
133
134
|
if impute_depth:
|
134
135
|
(
|
135
136
|
enrichment_matrix,
|
@@ -142,7 +143,7 @@ def process_neighborhoods(
|
|
142
143
|
max_depth=impute_depth,
|
143
144
|
)
|
144
145
|
|
145
|
-
|
146
|
+
logger.info(f"Pruning threshold: {prune_threshold}")
|
146
147
|
if prune_threshold:
|
147
148
|
(
|
148
149
|
enrichment_matrix,
|
@@ -16,7 +16,7 @@ import networkx as nx
|
|
16
16
|
import pandas as pd
|
17
17
|
|
18
18
|
from risk.network.geometry import assign_edge_lengths
|
19
|
-
from risk.log import params,
|
19
|
+
from risk.log import params, logger, log_header
|
20
20
|
|
21
21
|
|
22
22
|
class NetworkIO:
|
@@ -57,9 +57,8 @@ class NetworkIO:
|
|
57
57
|
weight_label=weight_label,
|
58
58
|
)
|
59
59
|
|
60
|
-
@
|
60
|
+
@staticmethod
|
61
61
|
def load_gpickle_network(
|
62
|
-
cls,
|
63
62
|
filepath: str,
|
64
63
|
compute_sphere: bool = True,
|
65
64
|
surface_depth: float = 0.0,
|
@@ -80,7 +79,7 @@ class NetworkIO:
|
|
80
79
|
Returns:
|
81
80
|
nx.Graph: Loaded and processed network.
|
82
81
|
"""
|
83
|
-
networkio =
|
82
|
+
networkio = NetworkIO(
|
84
83
|
compute_sphere=compute_sphere,
|
85
84
|
surface_depth=surface_depth,
|
86
85
|
min_edges_per_node=min_edges_per_node,
|
@@ -109,9 +108,8 @@ class NetworkIO:
|
|
109
108
|
# Initialize the graph
|
110
109
|
return self._initialize_graph(G)
|
111
110
|
|
112
|
-
@
|
111
|
+
@staticmethod
|
113
112
|
def load_networkx_network(
|
114
|
-
cls,
|
115
113
|
network: nx.Graph,
|
116
114
|
compute_sphere: bool = True,
|
117
115
|
surface_depth: float = 0.0,
|
@@ -132,7 +130,7 @@ class NetworkIO:
|
|
132
130
|
Returns:
|
133
131
|
nx.Graph: Loaded and processed network.
|
134
132
|
"""
|
135
|
-
networkio =
|
133
|
+
networkio = NetworkIO(
|
136
134
|
compute_sphere=compute_sphere,
|
137
135
|
surface_depth=surface_depth,
|
138
136
|
min_edges_per_node=min_edges_per_node,
|
@@ -158,9 +156,8 @@ class NetworkIO:
|
|
158
156
|
# Initialize the graph
|
159
157
|
return self._initialize_graph(network)
|
160
158
|
|
161
|
-
@
|
159
|
+
@staticmethod
|
162
160
|
def load_cytoscape_network(
|
163
|
-
cls,
|
164
161
|
filepath: str,
|
165
162
|
source_label: str = "source",
|
166
163
|
target_label: str = "target",
|
@@ -187,7 +184,7 @@ class NetworkIO:
|
|
187
184
|
Returns:
|
188
185
|
nx.Graph: Loaded and processed network.
|
189
186
|
"""
|
190
|
-
networkio =
|
187
|
+
networkio = NetworkIO(
|
191
188
|
compute_sphere=compute_sphere,
|
192
189
|
surface_depth=surface_depth,
|
193
190
|
min_edges_per_node=min_edges_per_node,
|
@@ -312,9 +309,8 @@ class NetworkIO:
|
|
312
309
|
if os.path.exists(tmp_dir):
|
313
310
|
shutil.rmtree(tmp_dir)
|
314
311
|
|
315
|
-
@
|
312
|
+
@staticmethod
|
316
313
|
def load_cytoscape_json_network(
|
317
|
-
cls,
|
318
314
|
filepath: str,
|
319
315
|
source_label: str = "source",
|
320
316
|
target_label: str = "target",
|
@@ -339,7 +335,7 @@ class NetworkIO:
|
|
339
335
|
Returns:
|
340
336
|
NetworkX graph: Loaded and processed network.
|
341
337
|
"""
|
342
|
-
networkio =
|
338
|
+
networkio = NetworkIO(
|
343
339
|
compute_sphere=compute_sphere,
|
344
340
|
surface_depth=surface_depth,
|
345
341
|
min_edges_per_node=min_edges_per_node,
|
@@ -455,10 +451,10 @@ class NetworkIO:
|
|
455
451
|
# Log the number of nodes and edges before and after cleaning
|
456
452
|
num_final_nodes = G.number_of_nodes()
|
457
453
|
num_final_edges = G.number_of_edges()
|
458
|
-
|
459
|
-
|
460
|
-
|
461
|
-
|
454
|
+
logger.info(f"Initial node count: {num_initial_nodes}")
|
455
|
+
logger.info(f"Final node count: {num_final_nodes}")
|
456
|
+
logger.info(f"Initial edge count: {num_initial_edges}")
|
457
|
+
logger.info(f"Final edge count: {num_final_edges}")
|
462
458
|
|
463
459
|
def _assign_edge_weights(self, G: nx.Graph) -> None:
|
464
460
|
"""Assign weights to the edges in the graph.
|
@@ -476,7 +472,7 @@ class NetworkIO:
|
|
476
472
|
) # Default to 1.0 if 'weight' not present
|
477
473
|
|
478
474
|
if self.include_edge_weight and missing_weights:
|
479
|
-
|
475
|
+
logger.info(f"Total edges missing weights: {missing_weights}")
|
480
476
|
|
481
477
|
def _validate_nodes(self, G: nx.Graph) -> None:
|
482
478
|
"""Validate the graph structure and attributes.
|
@@ -514,14 +510,14 @@ class NetworkIO:
|
|
514
510
|
filetype (str): The type of the file being loaded (e.g., 'CSV', 'JSON').
|
515
511
|
filepath (str, optional): The path to the file being loaded. Defaults to "".
|
516
512
|
"""
|
517
|
-
|
518
|
-
|
513
|
+
log_header("Loading network")
|
514
|
+
logger.info(f"Filetype: {filetype}")
|
519
515
|
if filepath:
|
520
|
-
|
521
|
-
|
516
|
+
logger.info(f"Filepath: {filepath}")
|
517
|
+
logger.info(f"Edge weight: {'Included' if self.include_edge_weight else 'Excluded'}")
|
522
518
|
if self.include_edge_weight:
|
523
|
-
|
524
|
-
|
525
|
-
|
519
|
+
logger.info(f"Weight label: {self.weight_label}")
|
520
|
+
logger.info(f"Minimum edges per node: {self.min_edges_per_node}")
|
521
|
+
logger.info(f"Projection: {'Sphere' if self.compute_sphere else 'Plane'}")
|
526
522
|
if self.compute_sphere:
|
527
|
-
|
523
|
+
logger.info(f"Surface depth: {self.surface_depth}")
|
@@ -10,7 +10,7 @@ import numpy as np
|
|
10
10
|
import pandas as pd
|
11
11
|
|
12
12
|
from risk.annotations import AnnotationsIO, define_top_annotations
|
13
|
-
from risk.log import params,
|
13
|
+
from risk.log import params, logger, log_header, set_global_verbosity
|
14
14
|
from risk.neighborhoods import (
|
15
15
|
define_domains,
|
16
16
|
get_network_neighborhoods,
|
@@ -33,16 +33,32 @@ class RISK(NetworkIO, AnnotationsIO):
|
|
33
33
|
and performing network-based statistical analysis, such as neighborhood significance testing.
|
34
34
|
"""
|
35
35
|
|
36
|
-
def __init__(self, *args, **kwargs):
|
37
|
-
"""Initialize the RISK class with configuration settings.
|
36
|
+
def __init__(self, *args, verbose: bool = True, **kwargs):
|
37
|
+
"""Initialize the RISK class with configuration settings.
|
38
|
+
|
39
|
+
Args:
|
40
|
+
verbose (bool): If False, suppresses all log messages to the console. Defaults to True.
|
41
|
+
*args: Variable length argument list.
|
42
|
+
**kwargs: Arbitrary keyword arguments.
|
43
|
+
|
44
|
+
Note:
|
45
|
+
- All *args and **kwargs are passed to NetworkIO's __init__ method.
|
46
|
+
- AnnotationsIO does not take any arguments and is initialized without them.
|
47
|
+
"""
|
48
|
+
# Set global verbosity for logging
|
49
|
+
set_global_verbosity(verbose)
|
38
50
|
# Initialize and log network parameters
|
39
51
|
params.initialize()
|
40
|
-
#
|
52
|
+
# Use super() to call NetworkIO's __init__ with the given arguments and keyword arguments
|
41
53
|
super().__init__(*args, **kwargs)
|
42
54
|
|
43
55
|
@property
|
44
|
-
def params(self):
|
45
|
-
"""Access the logged parameters.
|
56
|
+
def params(self) -> params:
|
57
|
+
"""Access the logged network parameters.
|
58
|
+
|
59
|
+
Returns:
|
60
|
+
Params: An instance of the Params class with logged parameters and methods to access or update them.
|
61
|
+
"""
|
46
62
|
return params
|
47
63
|
|
48
64
|
def load_neighborhoods_by_hypergeom(
|
@@ -69,7 +85,7 @@ class RISK(NetworkIO, AnnotationsIO):
|
|
69
85
|
Returns:
|
70
86
|
dict: Computed significance of neighborhoods.
|
71
87
|
"""
|
72
|
-
|
88
|
+
log_header("Running hypergeometric test")
|
73
89
|
# Log neighborhood analysis parameters
|
74
90
|
params.log_neighborhoods(
|
75
91
|
distance_metric=distance_metric,
|
@@ -122,7 +138,7 @@ class RISK(NetworkIO, AnnotationsIO):
|
|
122
138
|
Returns:
|
123
139
|
dict: Computed significance of neighborhoods.
|
124
140
|
"""
|
125
|
-
|
141
|
+
log_header("Running Poisson test")
|
126
142
|
# Log neighborhood analysis parameters
|
127
143
|
params.log_neighborhoods(
|
128
144
|
distance_metric=distance_metric,
|
@@ -181,7 +197,7 @@ class RISK(NetworkIO, AnnotationsIO):
|
|
181
197
|
Returns:
|
182
198
|
dict: Computed significance of neighborhoods.
|
183
199
|
"""
|
184
|
-
|
200
|
+
log_header("Running permutation test")
|
185
201
|
# Log neighborhood analysis parameters
|
186
202
|
params.log_neighborhoods(
|
187
203
|
distance_metric=distance_metric,
|
@@ -205,10 +221,10 @@ class RISK(NetworkIO, AnnotationsIO):
|
|
205
221
|
)
|
206
222
|
|
207
223
|
# Log and display permutation test settings
|
208
|
-
|
209
|
-
|
210
|
-
|
211
|
-
|
224
|
+
logger.info(f"Neighborhood scoring metric: '{score_metric}'")
|
225
|
+
logger.info(f"Null distribution: '{null_distribution}'")
|
226
|
+
logger.info(f"Number of permutations: {num_permutations}")
|
227
|
+
logger.info(f"Maximum workers: {max_workers}")
|
212
228
|
# Run permutation test to compute neighborhood significance
|
213
229
|
neighborhood_significance = compute_permutation_test(
|
214
230
|
neighborhoods=neighborhoods,
|
@@ -260,7 +276,7 @@ class RISK(NetworkIO, AnnotationsIO):
|
|
260
276
|
NetworkGraph: A fully initialized and processed NetworkGraph object.
|
261
277
|
"""
|
262
278
|
# Log the parameters and display headers
|
263
|
-
|
279
|
+
log_header("Finding significant neighborhoods")
|
264
280
|
params.log_graph(
|
265
281
|
tail=tail,
|
266
282
|
pval_cutoff=pval_cutoff,
|
@@ -274,9 +290,9 @@ class RISK(NetworkIO, AnnotationsIO):
|
|
274
290
|
max_cluster_size=max_cluster_size,
|
275
291
|
)
|
276
292
|
|
277
|
-
|
278
|
-
|
279
|
-
|
293
|
+
logger.info(f"p-value cutoff: {pval_cutoff}")
|
294
|
+
logger.info(f"FDR BH cutoff: {fdr_cutoff}")
|
295
|
+
logger.info(
|
280
296
|
f"Significance tail: '{tail}' ({'enrichment' if tail == 'right' else 'depletion' if tail == 'left' else 'both'})"
|
281
297
|
)
|
282
298
|
# Calculate significant neighborhoods based on the provided parameters
|
@@ -288,7 +304,7 @@ class RISK(NetworkIO, AnnotationsIO):
|
|
288
304
|
fdr_cutoff=fdr_cutoff,
|
289
305
|
)
|
290
306
|
|
291
|
-
|
307
|
+
log_header("Processing neighborhoods")
|
292
308
|
# Process neighborhoods by imputing and pruning based on the given settings
|
293
309
|
processed_neighborhoods = process_neighborhoods(
|
294
310
|
network=network,
|
@@ -297,9 +313,9 @@ class RISK(NetworkIO, AnnotationsIO):
|
|
297
313
|
prune_threshold=prune_threshold,
|
298
314
|
)
|
299
315
|
|
300
|
-
|
301
|
-
|
302
|
-
|
316
|
+
log_header("Finding top annotations")
|
317
|
+
logger.info(f"Min cluster size: {min_cluster_size}")
|
318
|
+
logger.info(f"Max cluster size: {max_cluster_size}")
|
303
319
|
# Define top annotations based on processed neighborhoods
|
304
320
|
top_annotations = self._define_top_annotations(
|
305
321
|
network=network,
|
@@ -309,7 +325,7 @@ class RISK(NetworkIO, AnnotationsIO):
|
|
309
325
|
max_cluster_size=max_cluster_size,
|
310
326
|
)
|
311
327
|
|
312
|
-
|
328
|
+
log_header("Optimizing distance threshold for domains")
|
313
329
|
# Define domains in the network using the specified clustering settings
|
314
330
|
domains = self._define_domains(
|
315
331
|
neighborhoods=processed_neighborhoods,
|
@@ -357,7 +373,7 @@ class RISK(NetworkIO, AnnotationsIO):
|
|
357
373
|
Returns:
|
358
374
|
NetworkPlotter: A NetworkPlotter object configured with the given parameters.
|
359
375
|
"""
|
360
|
-
|
376
|
+
log_header("Loading plotter")
|
361
377
|
# Log the plotter settings
|
362
378
|
params.log_plotter(
|
363
379
|
figsize=figsize,
|
@@ -398,9 +414,9 @@ class RISK(NetworkIO, AnnotationsIO):
|
|
398
414
|
else:
|
399
415
|
for_print_distance_metric = distance_metric
|
400
416
|
# Log and display neighborhood settings
|
401
|
-
|
402
|
-
|
403
|
-
|
417
|
+
logger.info(f"Distance metric: '{for_print_distance_metric}'")
|
418
|
+
logger.info(f"Edge length threshold: {edge_length_threshold}")
|
419
|
+
logger.info(f"Random seed: {random_seed}")
|
404
420
|
|
405
421
|
# Compute neighborhoods based on the network and distance metric
|
406
422
|
neighborhoods = get_network_neighborhoods(
|
@@ -1,16 +0,0 @@
|
|
1
|
-
"""
|
2
|
-
risk/log/console
|
3
|
-
~~~~~~~~~~~~~~~~
|
4
|
-
"""
|
5
|
-
|
6
|
-
|
7
|
-
def print_header(input_string: str) -> None:
|
8
|
-
"""Print the input string as a header with a line of dashes above and below it.
|
9
|
-
|
10
|
-
Args:
|
11
|
-
input_string (str): The string to be printed as a header.
|
12
|
-
"""
|
13
|
-
border = "-" * len(input_string)
|
14
|
-
print(border)
|
15
|
-
print(input_string)
|
16
|
-
print(border)
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|