risk-network 0.0.9b6__py3-none-any.whl → 0.0.9b8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- risk/__init__.py +1 -1
- risk/annotations/io.py +12 -4
- risk/neighborhoods/__init__.py +1 -1
- risk/neighborhoods/domains.py +4 -8
- risk/network/graph/network.py +28 -6
- risk/network/plot/canvas.py +1 -1
- risk/network/plot/contour.py +1 -1
- risk/network/plot/labels.py +1 -1
- risk/network/plot/network.py +1 -1
- risk/network/plot/plotter.py +1 -1
- risk/network/plot/utils/{color.py → colors.py} +39 -26
- risk/risk.py +8 -36
- {risk_network-0.0.9b6.dist-info → risk_network-0.0.9b8.dist-info}/METADATA +1 -1
- {risk_network-0.0.9b6.dist-info → risk_network-0.0.9b8.dist-info}/RECORD +17 -17
- {risk_network-0.0.9b6.dist-info → risk_network-0.0.9b8.dist-info}/LICENSE +0 -0
- {risk_network-0.0.9b6.dist-info → risk_network-0.0.9b8.dist-info}/WHEEL +0 -0
- {risk_network-0.0.9b6.dist-info → risk_network-0.0.9b8.dist-info}/top_level.txt +0 -0
risk/__init__.py
CHANGED
risk/annotations/io.py
CHANGED
@@ -41,7 +41,9 @@ class AnnotationsIO:
|
|
41
41
|
"""
|
42
42
|
filetype = "JSON"
|
43
43
|
# Log the loading of the JSON file
|
44
|
-
params.log_annotations(
|
44
|
+
params.log_annotations(
|
45
|
+
filetype=filetype, filepath=filepath, min_nodes_per_term=min_nodes_per_term
|
46
|
+
)
|
45
47
|
_log_loading(filetype, filepath=filepath)
|
46
48
|
|
47
49
|
# Load the JSON file into a dictionary
|
@@ -78,7 +80,9 @@ class AnnotationsIO:
|
|
78
80
|
"""
|
79
81
|
filetype = "Excel"
|
80
82
|
# Log the loading of the Excel file
|
81
|
-
params.log_annotations(
|
83
|
+
params.log_annotations(
|
84
|
+
filetype=filetype, filepath=filepath, min_nodes_per_term=min_nodes_per_term
|
85
|
+
)
|
82
86
|
_log_loading(filetype, filepath=filepath)
|
83
87
|
|
84
88
|
# Load the specified sheet from the Excel file
|
@@ -118,7 +122,9 @@ class AnnotationsIO:
|
|
118
122
|
"""
|
119
123
|
filetype = "CSV"
|
120
124
|
# Log the loading of the CSV file
|
121
|
-
params.log_annotations(
|
125
|
+
params.log_annotations(
|
126
|
+
filetype=filetype, filepath=filepath, min_nodes_per_term=min_nodes_per_term
|
127
|
+
)
|
122
128
|
_log_loading(filetype, filepath=filepath)
|
123
129
|
|
124
130
|
# Load the CSV file into a dictionary
|
@@ -154,7 +160,9 @@ class AnnotationsIO:
|
|
154
160
|
"""
|
155
161
|
filetype = "TSV"
|
156
162
|
# Log the loading of the TSV file
|
157
|
-
params.log_annotations(
|
163
|
+
params.log_annotations(
|
164
|
+
filetype=filetype, filepath=filepath, min_nodes_per_term=min_nodes_per_term
|
165
|
+
)
|
158
166
|
_log_loading(filetype, filepath=filepath)
|
159
167
|
|
160
168
|
# Load the TSV file into a dictionary
|
risk/neighborhoods/__init__.py
CHANGED
risk/neighborhoods/domains.py
CHANGED
@@ -86,13 +86,13 @@ def define_domains(
|
|
86
86
|
return node_to_domain
|
87
87
|
|
88
88
|
|
89
|
-
def
|
89
|
+
def trim_domains(
|
90
90
|
domains: pd.DataFrame,
|
91
91
|
top_annotations: pd.DataFrame,
|
92
92
|
min_cluster_size: int = 5,
|
93
93
|
max_cluster_size: int = 1000,
|
94
94
|
) -> Tuple[pd.DataFrame, pd.DataFrame, pd.DataFrame]:
|
95
|
-
"""Trim domains
|
95
|
+
"""Trim domains that do not meet size criteria and find outliers.
|
96
96
|
|
97
97
|
Args:
|
98
98
|
domains (pd.DataFrame): DataFrame of domain data for the network nodes.
|
@@ -101,8 +101,7 @@ def trim_domains_and_top_annotations(
|
|
101
101
|
max_cluster_size (int, optional): Maximum size of a cluster to be retained. Defaults to 1000.
|
102
102
|
|
103
103
|
Returns:
|
104
|
-
Tuple[pd.DataFrame, pd.DataFrame
|
105
|
-
- Trimmed annotations (pd.DataFrame)
|
104
|
+
Tuple[pd.DataFrame, pd.DataFrame]:
|
106
105
|
- Trimmed domains (pd.DataFrame)
|
107
106
|
- A DataFrame with domain labels (pd.DataFrame)
|
108
107
|
"""
|
@@ -155,14 +154,11 @@ def trim_domains_and_top_annotations(
|
|
155
154
|
).set_index("id")
|
156
155
|
|
157
156
|
# Remove invalid domains
|
158
|
-
valid_annotations = top_annotations[~top_annotations["domain"].isin(invalid_domain_ids)].drop(
|
159
|
-
columns=["normalized_value"]
|
160
|
-
)
|
161
157
|
valid_domains = domains[~domains["primary_domain"].isin(invalid_domain_ids)]
|
162
158
|
valid_trimmed_domains_matrix = trimmed_domains_matrix[
|
163
159
|
~trimmed_domains_matrix.index.isin(invalid_domain_ids)
|
164
160
|
]
|
165
|
-
return
|
161
|
+
return valid_domains, valid_trimmed_domains_matrix
|
166
162
|
|
167
163
|
|
168
164
|
def _optimize_silhouette_across_linkage_and_metrics(
|
risk/network/graph/network.py
CHANGED
@@ -27,7 +27,6 @@ class NetworkGraph:
|
|
27
27
|
network: nx.Graph,
|
28
28
|
annotations: Dict[str, Any],
|
29
29
|
neighborhoods: Dict[str, Any],
|
30
|
-
top_annotations: pd.DataFrame,
|
31
30
|
domains: pd.DataFrame,
|
32
31
|
trimmed_domains: pd.DataFrame,
|
33
32
|
node_label_to_node_id_map: Dict[str, Any],
|
@@ -39,24 +38,20 @@ class NetworkGraph:
|
|
39
38
|
network (nx.Graph): The network graph.
|
40
39
|
annotations (Dict[str, Any]): The annotations associated with the network.
|
41
40
|
neighborhoods (Dict[str, Any]): Neighborhood significance data.
|
42
|
-
top_annotations (pd.DataFrame): DataFrame containing annotations data for the network nodes.
|
43
41
|
domains (pd.DataFrame): DataFrame containing domain data for the network nodes.
|
44
42
|
trimmed_domains (pd.DataFrame): DataFrame containing trimmed domain data for the network nodes.
|
45
43
|
node_label_to_node_id_map (Dict[str, Any]): A dictionary mapping node labels to their corresponding IDs.
|
46
44
|
node_significance_sums (np.ndarray): Array containing the significant sums for the nodes.
|
47
45
|
"""
|
48
46
|
# Initialize self.network downstream of the other attributes
|
49
|
-
|
47
|
+
# All public attributes can be accessed after initialization
|
50
48
|
self.domain_id_to_node_ids_map = self._create_domain_id_to_node_ids_map(domains)
|
51
|
-
self.domains = domains
|
52
49
|
self.domain_id_to_domain_terms_map = self._create_domain_id_to_domain_terms_map(
|
53
50
|
trimmed_domains
|
54
51
|
)
|
55
52
|
self.domain_id_to_domain_info_map = self._create_domain_id_to_domain_info_map(
|
56
53
|
trimmed_domains
|
57
54
|
)
|
58
|
-
self.trimmed_domains = trimmed_domains
|
59
|
-
self.node_significance_sums = node_significance_sums
|
60
55
|
self.node_id_to_domain_ids_and_significance_map = (
|
61
56
|
self._create_node_id_to_domain_ids_and_significances(domains)
|
62
57
|
)
|
@@ -64,6 +59,7 @@ class NetworkGraph:
|
|
64
59
|
self.node_label_to_significance_map = dict(
|
65
60
|
zip(node_label_to_node_id_map.keys(), node_significance_sums)
|
66
61
|
)
|
62
|
+
self.node_significance_sums = node_significance_sums
|
67
63
|
self.node_label_to_node_id_map = node_label_to_node_id_map
|
68
64
|
|
69
65
|
# NOTE: Below this point, instance attributes (i.e., self) will be used!
|
@@ -75,6 +71,32 @@ class NetworkGraph:
|
|
75
71
|
# NOTE: Only after the above attributes are initialized, we can create the summary
|
76
72
|
self.summary = AnalysisSummary(annotations, neighborhoods, self)
|
77
73
|
|
74
|
+
def pop(self, domain_id: str) -> None:
|
75
|
+
"""Remove domain ID from instance domain ID mappings. This can be useful for cleaning up
|
76
|
+
domain-specific mappings based on a given criterion, as domain attributes are stored and
|
77
|
+
accessed only in dictionaries modified by this method.
|
78
|
+
|
79
|
+
Args:
|
80
|
+
key (str): The domain ID key to be removed from each mapping.
|
81
|
+
"""
|
82
|
+
# Define the domain mappings to be updated
|
83
|
+
domain_mappings = [
|
84
|
+
self.domain_id_to_node_ids_map,
|
85
|
+
self.domain_id_to_domain_terms_map,
|
86
|
+
self.domain_id_to_domain_info_map,
|
87
|
+
self.domain_id_to_node_labels_map,
|
88
|
+
]
|
89
|
+
# Remove the specified domain_id key from each mapping if it exists
|
90
|
+
for mapping in domain_mappings:
|
91
|
+
if domain_id in mapping:
|
92
|
+
mapping.pop(domain_id)
|
93
|
+
|
94
|
+
# Remove the domain_id from the node_id_to_domain_ids_and_significance_map
|
95
|
+
for node_id, domain_info in self.node_id_to_domain_ids_and_significance_map.items():
|
96
|
+
if domain_id in domain_info["domains"]:
|
97
|
+
domain_info["domains"].remove(domain_id)
|
98
|
+
domain_info["significances"].pop(domain_id)
|
99
|
+
|
78
100
|
@staticmethod
|
79
101
|
def _create_domain_id_to_node_ids_map(domains: pd.DataFrame) -> Dict[int, Any]:
|
80
102
|
"""Create a mapping from domains to the list of node IDs belonging to each domain.
|
risk/network/plot/canvas.py
CHANGED
@@ -10,7 +10,7 @@ import numpy as np
|
|
10
10
|
|
11
11
|
from risk.log import params
|
12
12
|
from risk.network.graph import NetworkGraph
|
13
|
-
from risk.network.plot.utils.
|
13
|
+
from risk.network.plot.utils.colors import to_rgba
|
14
14
|
from risk.network.plot.utils.layout import calculate_bounding_box
|
15
15
|
|
16
16
|
|
risk/network/plot/contour.py
CHANGED
@@ -13,7 +13,7 @@ from scipy.stats import gaussian_kde
|
|
13
13
|
|
14
14
|
from risk.log import params, logger
|
15
15
|
from risk.network.graph import NetworkGraph
|
16
|
-
from risk.network.plot.utils.
|
16
|
+
from risk.network.plot.utils.colors import get_annotated_domain_colors, to_rgba
|
17
17
|
|
18
18
|
|
19
19
|
class Contour:
|
risk/network/plot/labels.py
CHANGED
@@ -12,7 +12,7 @@ import pandas as pd
|
|
12
12
|
|
13
13
|
from risk.log import params
|
14
14
|
from risk.network.graph import NetworkGraph
|
15
|
-
from risk.network.plot.utils.
|
15
|
+
from risk.network.plot.utils.colors import get_annotated_domain_colors, to_rgba
|
16
16
|
from risk.network.plot.utils.layout import calculate_bounding_box
|
17
17
|
|
18
18
|
TERM_DELIMITER = "::::" # String used to separate multiple domain terms when constructing composite domain labels
|
risk/network/plot/network.py
CHANGED
@@ -10,7 +10,7 @@ import numpy as np
|
|
10
10
|
|
11
11
|
from risk.log import params
|
12
12
|
from risk.network.graph import NetworkGraph
|
13
|
-
from risk.network.plot.utils.
|
13
|
+
from risk.network.plot.utils.colors import get_domain_colors, to_rgba
|
14
14
|
|
15
15
|
|
16
16
|
class Network:
|
risk/network/plot/plotter.py
CHANGED
@@ -14,7 +14,7 @@ from risk.network.plot.canvas import Canvas
|
|
14
14
|
from risk.network.plot.contour import Contour
|
15
15
|
from risk.network.plot.labels import Labels
|
16
16
|
from risk.network.plot.network import Network
|
17
|
-
from risk.network.plot.utils.
|
17
|
+
from risk.network.plot.utils.colors import to_rgba
|
18
18
|
from risk.network.plot.utils.layout import calculate_bounding_box
|
19
19
|
|
20
20
|
|
@@ -9,6 +9,7 @@ import matplotlib
|
|
9
9
|
import matplotlib.colors as mcolors
|
10
10
|
import networkx as nx
|
11
11
|
import numpy as np
|
12
|
+
from sklearn.cluster import AgglomerativeClustering
|
12
13
|
|
13
14
|
from risk.network.graph import NetworkGraph
|
14
15
|
from risk.network.plot.utils.layout import calculate_centroids
|
@@ -249,34 +250,53 @@ def _get_colors(
|
|
249
250
|
"""
|
250
251
|
# Set random seed for reproducibility
|
251
252
|
np.random.seed(random_seed)
|
253
|
+
|
252
254
|
# Determine the number of colors to generate based on the number of domains
|
253
|
-
|
255
|
+
num_domains = len(domain_id_to_node_ids_map)
|
254
256
|
if color:
|
255
257
|
# Generate all colors as the same specified color
|
256
|
-
rgba = to_rgba(color, num_repeats=
|
258
|
+
rgba = to_rgba(color, num_repeats=num_domains)
|
257
259
|
return rgba
|
258
260
|
|
259
261
|
# Load colormap
|
260
262
|
colormap = matplotlib.colormaps.get_cmap(cmap)
|
261
263
|
# Step 1: Calculate centroids for each domain
|
262
264
|
centroids = calculate_centroids(network, domain_id_to_node_ids_map)
|
263
|
-
# Step 2: Calculate pairwise distances between centroids
|
264
265
|
centroid_array = np.array(centroids)
|
266
|
+
|
267
|
+
# Step 2: Cluster domains based on proximity using Agglomerative Clustering
|
265
268
|
dist_matrix = np.linalg.norm(centroid_array[:, None] - centroid_array, axis=-1)
|
266
|
-
|
267
|
-
|
268
|
-
|
269
|
-
|
270
|
-
|
271
|
-
|
272
|
-
|
269
|
+
max_distance = np.max(dist_matrix) if np.max(dist_matrix) != 0 else 1
|
270
|
+
proximity_threshold = 0.3 * max_distance
|
271
|
+
|
272
|
+
clustering_model = AgglomerativeClustering(
|
273
|
+
n_clusters=None, distance_threshold=proximity_threshold
|
274
|
+
)
|
275
|
+
cluster_labels = clustering_model.fit_predict(centroid_array)
|
276
|
+
num_clusters = len(set(cluster_labels))
|
277
|
+
|
278
|
+
# Step 3: Assign base color positions for each cluster, spaced across colormap
|
279
|
+
cluster_positions = np.linspace(0, 1, num_clusters, endpoint=False)
|
280
|
+
np.random.shuffle(cluster_positions) # Shuffle based on seed to vary color layout
|
281
|
+
cluster_id_to_position = {
|
282
|
+
cluster_id: pos for cluster_id, pos in zip(np.unique(cluster_labels), cluster_positions)
|
283
|
+
}
|
284
|
+
|
285
|
+
# Step 4: Assign colors to each domain based on cluster base color with a global shift
|
286
|
+
global_shift = np.random.uniform(-0.1, 0.1) # Small global shift for variety
|
287
|
+
colors = []
|
288
|
+
for i in range(num_domains):
|
289
|
+
cluster_idx = cluster_labels[i]
|
290
|
+
base_position = cluster_id_to_position[cluster_idx]
|
291
|
+
# Add global shift and ensure it stays within [0, 1]
|
292
|
+
color_position = (base_position + global_shift) % 1
|
293
|
+
colors.append(colormap(color_position)) # Get color from colormap
|
273
294
|
|
274
|
-
|
275
|
-
return [colormap(pos) for pos in color_positions]
|
295
|
+
return colors
|
276
296
|
|
277
297
|
|
278
298
|
def _assign_distant_colors(dist_matrix: np.ndarray, num_colors_to_generate: int) -> np.ndarray:
|
279
|
-
"""Assign
|
299
|
+
"""Assign color positions ensuring centroids close in space are maximally separated in color.
|
280
300
|
|
281
301
|
Args:
|
282
302
|
dist_matrix (ndarray): Matrix of pairwise centroid distances.
|
@@ -285,22 +305,14 @@ def _assign_distant_colors(dist_matrix: np.ndarray, num_colors_to_generate: int)
|
|
285
305
|
Returns:
|
286
306
|
np.array: Array of color positions in the range [0, 1].
|
287
307
|
"""
|
288
|
-
|
289
|
-
# Step 1: Sort indices by centroid proximity (based on sum of distances to others)
|
308
|
+
# Step 1: Calculate proximity order based on the sum of distances
|
290
309
|
proximity_order = sorted(
|
291
310
|
range(num_colors_to_generate), key=lambda idx: np.sum(dist_matrix[idx])
|
292
311
|
)
|
293
|
-
# Step 2:
|
294
|
-
|
295
|
-
|
296
|
-
|
297
|
-
# Step 3: Adjust colors so that centroids close to one another are maximally distant on the color spectrum
|
298
|
-
half_spectrum = int(num_colors_to_generate / 2)
|
299
|
-
for i in range(half_spectrum):
|
300
|
-
# Split the spectrum so that close centroids are assigned distant colors
|
301
|
-
color_positions[proximity_order[i]] = (i * 2) / num_colors_to_generate
|
302
|
-
color_positions[proximity_order[-(i + 1)]] = ((i * 2) + 1) / num_colors_to_generate
|
303
|
-
|
312
|
+
# Step 2: Generate evenly spaced color positions
|
313
|
+
color_positions = np.linspace(0, 1, num_colors_to_generate, endpoint=False)
|
314
|
+
# Step 3: Shuffle color positions based on proximity
|
315
|
+
color_positions = color_positions[proximity_order]
|
304
316
|
return color_positions
|
305
317
|
|
306
318
|
|
@@ -410,6 +422,7 @@ def to_rgba(
|
|
410
422
|
|
411
423
|
if alpha is not None: # Override alpha if provided
|
412
424
|
rgba[3] = alpha
|
425
|
+
|
413
426
|
return rgba
|
414
427
|
|
415
428
|
# If color is a 2D array of RGBA values, convert it to a list of lists
|
risk/risk.py
CHANGED
@@ -16,7 +16,7 @@ from risk.neighborhoods import (
|
|
16
16
|
define_domains,
|
17
17
|
get_network_neighborhoods,
|
18
18
|
process_neighborhoods,
|
19
|
-
|
19
|
+
trim_domains,
|
20
20
|
)
|
21
21
|
from risk.network import NetworkIO, NetworkGraph, NetworkPlotter
|
22
22
|
from risk.stats import (
|
@@ -335,16 +335,20 @@ class RISK(NetworkIO, AnnotationsIO):
|
|
335
335
|
)
|
336
336
|
|
337
337
|
log_header("Optimizing distance threshold for domains")
|
338
|
+
# Extract the significant significance matrix from the neighborhoods data
|
339
|
+
significant_neighborhoods_significance = processed_neighborhoods[
|
340
|
+
"significant_significance_matrix"
|
341
|
+
]
|
338
342
|
# Define domains in the network using the specified clustering settings
|
339
|
-
domains =
|
340
|
-
neighborhoods=processed_neighborhoods,
|
343
|
+
domains = define_domains(
|
341
344
|
top_annotations=top_annotations,
|
345
|
+
significant_neighborhoods_significance=significant_neighborhoods_significance,
|
342
346
|
linkage_criterion=linkage_criterion,
|
343
347
|
linkage_method=linkage_method,
|
344
348
|
linkage_metric=linkage_metric,
|
345
349
|
)
|
346
350
|
# Trim domains and top annotations based on cluster size constraints
|
347
|
-
|
351
|
+
domains, trimmed_domains = trim_domains(
|
348
352
|
domains=domains,
|
349
353
|
top_annotations=top_annotations,
|
350
354
|
min_cluster_size=min_cluster_size,
|
@@ -361,7 +365,6 @@ class RISK(NetworkIO, AnnotationsIO):
|
|
361
365
|
network=network,
|
362
366
|
annotations=annotations,
|
363
367
|
neighborhoods=neighborhoods,
|
364
|
-
top_annotations=top_annotations,
|
365
368
|
domains=domains,
|
366
369
|
trimmed_domains=trimmed_domains,
|
367
370
|
node_label_to_node_id_map=node_label_to_id,
|
@@ -484,34 +487,3 @@ class RISK(NetworkIO, AnnotationsIO):
|
|
484
487
|
min_cluster_size=min_cluster_size,
|
485
488
|
max_cluster_size=max_cluster_size,
|
486
489
|
)
|
487
|
-
|
488
|
-
def _define_domains(
|
489
|
-
self,
|
490
|
-
neighborhoods: Dict[str, Any],
|
491
|
-
top_annotations: pd.DataFrame,
|
492
|
-
linkage_criterion: str,
|
493
|
-
linkage_method: str,
|
494
|
-
linkage_metric: str,
|
495
|
-
) -> pd.DataFrame:
|
496
|
-
"""Define domains in the network based on significance data.
|
497
|
-
|
498
|
-
Args:
|
499
|
-
neighborhoods (Dict[str, Any]): Enrichment data for neighborhoods.
|
500
|
-
top_annotations (pd.DataFrame): Enrichment matrix for top annotations.
|
501
|
-
linkage_criterion (str): Clustering criterion for defining domains.
|
502
|
-
linkage_method (str): Clustering method to use.
|
503
|
-
linkage_metric (str): Metric to use for calculating distances.
|
504
|
-
|
505
|
-
Returns:
|
506
|
-
pd.DataFrame: Matrix of defined domains.
|
507
|
-
"""
|
508
|
-
# Extract the significant significance matrix from the neighborhoods data
|
509
|
-
significant_neighborhoods_significance = neighborhoods["significant_significance_matrix"]
|
510
|
-
# Call external function to define domains based on the extracted data
|
511
|
-
return define_domains(
|
512
|
-
top_annotations=top_annotations,
|
513
|
-
significant_neighborhoods_significance=significant_neighborhoods_significance,
|
514
|
-
linkage_criterion=linkage_criterion,
|
515
|
-
linkage_method=linkage_method,
|
516
|
-
linkage_metric=linkage_metric,
|
517
|
-
)
|
@@ -1,29 +1,29 @@
|
|
1
|
-
risk/__init__.py,sha256=
|
1
|
+
risk/__init__.py,sha256=CF_5U_LFsoHtqjLJO9F0HRUYI0sDt4UlIWwkpkOG-no,112
|
2
2
|
risk/constants.py,sha256=XInRaH78Slnw_sWgAsBFbUHkyA0h0jL0DKGuQNbOvjM,550
|
3
|
-
risk/risk.py,sha256=
|
3
|
+
risk/risk.py,sha256=kntBxYwAEpoAjXN_l6BM3yxFKyuAKmd8OMGl2P00pZ4,22416
|
4
4
|
risk/annotations/__init__.py,sha256=kXgadEXaCh0z8OyhOhTj7c3qXGmWgOhaSZ4gSzSb59U,147
|
5
5
|
risk/annotations/annotations.py,sha256=WVT9wzTm8lTpMw_3SnbyljWR77yExo0rb1zVgJza8nw,14284
|
6
|
-
risk/annotations/io.py,sha256=
|
6
|
+
risk/annotations/io.py,sha256=tk1dAsxIwW5oLxB294ppiuZd4__Y5pj8se8KhitRSNA,10554
|
7
7
|
risk/log/__init__.py,sha256=gy7C5L6D222AYUChq5lkc0LsCJ_QMQPaFiBJKbecdac,201
|
8
8
|
risk/log/console.py,sha256=C52s3FgQ2e9kQWcXL8m7rs_pnKXt5Yy8PBHmQkOTiNo,4537
|
9
9
|
risk/log/parameters.py,sha256=o4StqYCa0kt7_Ht4mKa1DwwvhGUwkC_dGBaiUIc0GB0,5683
|
10
|
-
risk/neighborhoods/__init__.py,sha256=
|
10
|
+
risk/neighborhoods/__init__.py,sha256=C-SD0G-9skSLjLFdAB6v6lAjO8la2v6Fqy63h2MY28k,186
|
11
11
|
risk/neighborhoods/community.py,sha256=MAgIblbuisEPwVU6mFZd4Yd9NUKlaHK99suw51r1Is0,7065
|
12
|
-
risk/neighborhoods/domains.py,sha256=
|
12
|
+
risk/neighborhoods/domains.py,sha256=t91xSpx9Ty9hSlhRq2_XwyPpBP7sjKhovcPPvkwWtf0,11398
|
13
13
|
risk/neighborhoods/neighborhoods.py,sha256=0TAP-xi4hgtnrK0cKQPHQHq9IVGHOMF1wYEcx6tsxRA,22241
|
14
14
|
risk/network/__init__.py,sha256=iEPeJdZfqp0toxtbElryB8jbz9_t_k4QQ3iDvKE8C_0,126
|
15
15
|
risk/network/geometry.py,sha256=gFtYUj9j9aul4paKq_qSGJn39Nazxu_MXv8m-tYYtrk,6840
|
16
16
|
risk/network/io.py,sha256=AWSbZGLZHtl72KSlafQlcYoG00YLSznG7UYDi_wDT7M,22958
|
17
17
|
risk/network/graph/__init__.py,sha256=H0YEiwqZ02LBTkH4blPwUjQ-DOUnhaTTNHM0BcXii6U,81
|
18
|
-
risk/network/graph/network.py,sha256=
|
18
|
+
risk/network/graph/network.py,sha256=JzYbrgJLiNWFyPIR6_qNSjMtmXmfzRv2FwWSdyg8HjY,12205
|
19
19
|
risk/network/graph/summary.py,sha256=h2bpUjfwI1NMflkKwplGQEGPswfAtunormdTIEQYbvs,8987
|
20
20
|
risk/network/plot/__init__.py,sha256=MfmaXJgAZJgXZ2wrhK8pXwzETlcMaLChhWXKAozniAo,98
|
21
|
-
risk/network/plot/canvas.py,sha256=
|
22
|
-
risk/network/plot/contour.py,sha256=
|
23
|
-
risk/network/plot/labels.py,sha256=
|
24
|
-
risk/network/plot/network.py,sha256=
|
25
|
-
risk/network/plot/plotter.py,sha256=
|
26
|
-
risk/network/plot/utils/
|
21
|
+
risk/network/plot/canvas.py,sha256=W8dFv4XYTzCWXBchgsc0esOQRn4usM4LkwNGPSDMobE,13357
|
22
|
+
risk/network/plot/contour.py,sha256=VONX9l6owrZvWtR0mWQ6z2GSd1YXIv5wV_sf5ROQLT4,15581
|
23
|
+
risk/network/plot/labels.py,sha256=eorP80CmAbHmt7de2qHna1tHGKL8YiHknwFW2R3tvjI,45734
|
24
|
+
risk/network/plot/network.py,sha256=_K8Am2y6zSGrm3fAgMbXxzgspbugJi3uK4_tG8qqGoI,14015
|
25
|
+
risk/network/plot/plotter.py,sha256=eS1vHqvOA2O001Rq7WiDcgqcehJ3fg4OPfvkezH4erw,5771
|
26
|
+
risk/network/plot/utils/colors.py,sha256=lNDwlTbpXCKAbaqSHsRlxxY_MRAbogma81xyqHawl3U,21005
|
27
27
|
risk/network/plot/utils/layout.py,sha256=6o7idoWQnyzujSWOFXQykUvyPy8NuRtJV04TnlbXXBo,3647
|
28
28
|
risk/stats/__init__.py,sha256=WcgoETQ-hS0LQqKRsAMIPtP15xZ-4eul6VUBuUx4Wzc,220
|
29
29
|
risk/stats/hypergeom.py,sha256=oc39f02ViB1vQ-uaDrxG_tzAT6dxQBRjc88EK2EGn78,2282
|
@@ -32,8 +32,8 @@ risk/stats/stats.py,sha256=z8NrhiVj4BzJ250bVLfytpmfC7RzYu7mBuIZD_l0aCA,7222
|
|
32
32
|
risk/stats/permutation/__init__.py,sha256=neJp7FENC-zg_CGOXqv-iIvz1r5XUKI9Ruxhmq7kDOI,105
|
33
33
|
risk/stats/permutation/permutation.py,sha256=meBNSrbRa9P8WJ54n485l0H7VQJlMSfHqdN4aCKYCtQ,10105
|
34
34
|
risk/stats/permutation/test_functions.py,sha256=lftOude6hee0pyR80HlBD32522JkDoN5hrKQ9VEbuoY,2345
|
35
|
-
risk_network-0.0.
|
36
|
-
risk_network-0.0.
|
37
|
-
risk_network-0.0.
|
38
|
-
risk_network-0.0.
|
39
|
-
risk_network-0.0.
|
35
|
+
risk_network-0.0.9b8.dist-info/LICENSE,sha256=jOtLnuWt7d5Hsx6XXB2QxzrSe2sWWh3NgMfFRetluQM,35147
|
36
|
+
risk_network-0.0.9b8.dist-info/METADATA,sha256=JaTFHNn11mwliUNgsG6jvz2sU53goxjS3cwQXLEbcEw,47497
|
37
|
+
risk_network-0.0.9b8.dist-info/WHEEL,sha256=P9jw-gEje8ByB7_hXoICnHtVCrEwMQh-630tKvQWehc,91
|
38
|
+
risk_network-0.0.9b8.dist-info/top_level.txt,sha256=NX7C2PFKTvC1JhVKv14DFlFAIFnKc6Lpsu1ZfxvQwVw,5
|
39
|
+
risk_network-0.0.9b8.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|