risk-network 0.0.13b4__tar.gz → 0.0.14__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. risk_network-0.0.14/PKG-INFO +115 -0
  2. risk_network-0.0.14/README.md +74 -0
  3. {risk_network-0.0.13b4 → risk_network-0.0.14}/src/risk/__init__.py +3 -2
  4. risk_network-0.0.14/src/risk/_annotation/__init__.py +10 -0
  5. risk_network-0.0.13b4/src/risk/annotation/annotation.py → risk_network-0.0.14/src/risk/_annotation/_annotation.py +18 -11
  6. risk_network-0.0.13b4/src/risk/annotation/io.py → risk_network-0.0.14/src/risk/_annotation/_io.py +22 -14
  7. risk_network-0.0.13b4/src/risk/annotation/nltk_setup.py → risk_network-0.0.14/src/risk/_annotation/_nltk_setup.py +7 -5
  8. risk_network-0.0.14/src/risk/_log/__init__.py +11 -0
  9. risk_network-0.0.13b4/src/risk/log/console.py → risk_network-0.0.14/src/risk/_log/_console.py +22 -12
  10. risk_network-0.0.13b4/src/risk/log/parameters.py → risk_network-0.0.14/src/risk/_log/_parameters.py +25 -14
  11. risk_network-0.0.14/src/risk/_neighborhoods/__init__.py +8 -0
  12. risk_network-0.0.13b4/src/risk/neighborhoods/api.py → risk_network-0.0.14/src/risk/_neighborhoods/_api.py +23 -17
  13. risk_network-0.0.13b4/src/risk/neighborhoods/community.py → risk_network-0.0.14/src/risk/_neighborhoods/_community.py +19 -11
  14. risk_network-0.0.13b4/src/risk/neighborhoods/domains.py → risk_network-0.0.14/src/risk/_neighborhoods/_domains.py +92 -35
  15. risk_network-0.0.13b4/src/risk/neighborhoods/neighborhoods.py → risk_network-0.0.14/src/risk/_neighborhoods/_neighborhoods.py +69 -58
  16. risk_network-0.0.14/src/risk/_neighborhoods/_stats/__init__.py +13 -0
  17. risk_network-0.0.14/src/risk/_neighborhoods/_stats/_permutation/__init__.py +6 -0
  18. risk_network-0.0.13b4/src/risk/neighborhoods/stats/permutation/permutation.py → risk_network-0.0.14/src/risk/_neighborhoods/_stats/_permutation/_permutation.py +9 -6
  19. risk_network-0.0.13b4/src/risk/neighborhoods/stats/permutation/test_functions.py → risk_network-0.0.14/src/risk/_neighborhoods/_stats/_permutation/_test_functions.py +6 -4
  20. risk_network-0.0.13b4/src/risk/neighborhoods/stats/tests.py → risk_network-0.0.14/src/risk/_neighborhoods/_stats/_tests.py +12 -7
  21. risk_network-0.0.14/src/risk/_network/__init__.py +8 -0
  22. risk_network-0.0.14/src/risk/_network/_graph/__init__.py +7 -0
  23. risk_network-0.0.13b4/src/risk/network/graph/api.py → risk_network-0.0.14/src/risk/_network/_graph/_api.py +13 -13
  24. risk_network-0.0.13b4/src/risk/network/graph/graph.py → risk_network-0.0.14/src/risk/_network/_graph/_graph.py +24 -13
  25. risk_network-0.0.13b4/src/risk/network/graph/stats.py → risk_network-0.0.14/src/risk/_network/_graph/_stats.py +8 -5
  26. risk_network-0.0.13b4/src/risk/network/graph/summary.py → risk_network-0.0.14/src/risk/_network/_graph/_summary.py +39 -32
  27. risk_network-0.0.13b4/src/risk/network/io.py → risk_network-0.0.14/src/risk/_network/_io.py +166 -148
  28. risk_network-0.0.14/src/risk/_network/_plotter/__init__.py +6 -0
  29. risk_network-0.0.13b4/src/risk/network/plotter/api.py → risk_network-0.0.14/src/risk/_network/_plotter/_api.py +9 -10
  30. risk_network-0.0.13b4/src/risk/network/plotter/canvas.py → risk_network-0.0.14/src/risk/_network/_plotter/_canvas.py +14 -10
  31. risk_network-0.0.13b4/src/risk/network/plotter/contour.py → risk_network-0.0.14/src/risk/_network/_plotter/_contour.py +17 -11
  32. risk_network-0.0.13b4/src/risk/network/plotter/labels.py → risk_network-0.0.14/src/risk/_network/_plotter/_labels.py +38 -23
  33. risk_network-0.0.13b4/src/risk/network/plotter/network.py → risk_network-0.0.14/src/risk/_network/_plotter/_network.py +17 -11
  34. risk_network-0.0.13b4/src/risk/network/plotter/plotter.py → risk_network-0.0.14/src/risk/_network/_plotter/_plotter.py +19 -15
  35. risk_network-0.0.14/src/risk/_network/_plotter/_utils/__init__.py +7 -0
  36. risk_network-0.0.13b4/src/risk/network/plotter/utils/colors.py → risk_network-0.0.14/src/risk/_network/_plotter/_utils/_colors.py +19 -11
  37. risk_network-0.0.13b4/src/risk/network/plotter/utils/layout.py → risk_network-0.0.14/src/risk/_network/_plotter/_utils/_layout.py +8 -5
  38. risk_network-0.0.13b4/src/risk/risk.py → risk_network-0.0.14/src/risk/_risk.py +11 -11
  39. risk_network-0.0.14/src/risk_network.egg-info/PKG-INFO +115 -0
  40. risk_network-0.0.14/src/risk_network.egg-info/SOURCES.txt +51 -0
  41. {risk_network-0.0.13b4 → risk_network-0.0.14}/tests/test_load_annotation.py +26 -13
  42. {risk_network-0.0.13b4 → risk_network-0.0.14}/tests/test_load_graph.py +109 -11
  43. {risk_network-0.0.13b4 → risk_network-0.0.14}/tests/test_load_io_combinations.py +2 -1
  44. {risk_network-0.0.13b4 → risk_network-0.0.14}/tests/test_load_neighborhoods.py +24 -12
  45. {risk_network-0.0.13b4 → risk_network-0.0.14}/tests/test_load_network.py +32 -16
  46. {risk_network-0.0.13b4 → risk_network-0.0.14}/tests/test_load_plotter.py +62 -31
  47. {risk_network-0.0.13b4 → risk_network-0.0.14}/tests/test_log.py +9 -10
  48. risk_network-0.0.13b4/PKG-INFO +0 -125
  49. risk_network-0.0.13b4/README.md +0 -84
  50. risk_network-0.0.13b4/src/risk/annotation/__init__.py +0 -10
  51. risk_network-0.0.13b4/src/risk/log/__init__.py +0 -11
  52. risk_network-0.0.13b4/src/risk/neighborhoods/__init__.py +0 -7
  53. risk_network-0.0.13b4/src/risk/neighborhoods/stats/__init__.py +0 -13
  54. risk_network-0.0.13b4/src/risk/neighborhoods/stats/permutation/__init__.py +0 -6
  55. risk_network-0.0.13b4/src/risk/network/__init__.py +0 -4
  56. risk_network-0.0.13b4/src/risk/network/graph/__init__.py +0 -4
  57. risk_network-0.0.13b4/src/risk/network/plotter/__init__.py +0 -4
  58. risk_network-0.0.13b4/src/risk_network.egg-info/PKG-INFO +0 -125
  59. risk_network-0.0.13b4/src/risk_network.egg-info/SOURCES.txt +0 -50
  60. {risk_network-0.0.13b4 → risk_network-0.0.14}/LICENSE +0 -0
  61. {risk_network-0.0.13b4 → risk_network-0.0.14}/pyproject.toml +0 -0
  62. {risk_network-0.0.13b4 → risk_network-0.0.14}/setup.cfg +0 -0
  63. {risk_network-0.0.13b4 → risk_network-0.0.14}/src/risk_network.egg-info/dependency_links.txt +0 -0
  64. {risk_network-0.0.13b4 → risk_network-0.0.14}/src/risk_network.egg-info/requires.txt +0 -0
  65. {risk_network-0.0.13b4 → risk_network-0.0.14}/src/risk_network.egg-info/top_level.txt +0 -0
@@ -0,0 +1,115 @@
1
+ Metadata-Version: 2.4
2
+ Name: risk-network
3
+ Version: 0.0.14
4
+ Summary: A Python package for scalable network analysis and high-quality visualization.
5
+ Author-email: Ira Horecka <ira89@icloud.com>
6
+ License: GPL-3.0-or-later
7
+ Project-URL: Homepage, https://github.com/riskportal/network
8
+ Project-URL: Issues, https://github.com/riskportal/network/issues
9
+ Classifier: Development Status :: 4 - Beta
10
+ Classifier: Intended Audience :: Developers
11
+ Classifier: Intended Audience :: Science/Research
12
+ Classifier: License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)
13
+ Classifier: Operating System :: OS Independent
14
+ Classifier: Programming Language :: Python :: 3
15
+ Classifier: Programming Language :: Python :: 3.8
16
+ Classifier: Programming Language :: Python :: 3 :: Only
17
+ Classifier: Topic :: Scientific/Engineering :: Bio-Informatics
18
+ Classifier: Topic :: Scientific/Engineering :: Information Analysis
19
+ Classifier: Topic :: Scientific/Engineering :: Visualization
20
+ Classifier: Topic :: Software Development :: Libraries :: Python Modules
21
+ Requires-Python: >=3.8
22
+ Description-Content-Type: text/markdown
23
+ License-File: LICENSE
24
+ Requires-Dist: ipywidgets
25
+ Requires-Dist: leidenalg
26
+ Requires-Dist: markov_clustering
27
+ Requires-Dist: matplotlib
28
+ Requires-Dist: networkx
29
+ Requires-Dist: nltk
30
+ Requires-Dist: numpy
31
+ Requires-Dist: openpyxl
32
+ Requires-Dist: pandas
33
+ Requires-Dist: python-igraph
34
+ Requires-Dist: python-louvain
35
+ Requires-Dist: scikit-learn
36
+ Requires-Dist: scipy
37
+ Requires-Dist: statsmodels
38
+ Requires-Dist: threadpoolctl
39
+ Requires-Dist: tqdm
40
+ Dynamic: license-file
41
+
42
+ # RISK Network
43
+
44
+ <p align="center">
45
+ <img src="https://i.imgur.com/8TleEJs.png" width="50%" />
46
+ </p>
47
+
48
+ <br>
49
+
50
+ ![Python](https://img.shields.io/badge/python-3.8%2B-yellow)
51
+ [![pypiv](https://img.shields.io/pypi/v/risk-network.svg)](https://pypi.python.org/pypi/risk-network)
52
+ ![License](https://img.shields.io/badge/license-GPLv3-purple)
53
+ [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.xxxxxxx.svg)](https://doi.org/10.5281/zenodo.xxxxxxx)
54
+ ![Downloads](https://img.shields.io/pypi/dm/risk-network)
55
+ ![Tests](https://github.com/riskportal/network/actions/workflows/ci.yml/badge.svg)
56
+
57
+ **RISK** (Regional Inference of Significant Kinships) is a next-generation tool for biological network annotation and visualization. It integrates community detection algorithms, rigorous overrepresentation analysis, and a modular framework for diverse network types. RISK identifies biologically coherent relationships within networks and generates publication-ready visualizations, making it a useful tool for biological and interdisciplinary network analysis.
58
+
59
+ For a full description of RISK and its applications, see:
60
+ <br>
61
+ **Horecka and Röst (2025)**, _"RISK: a next-generation tool for biological network annotation and visualization"_.
62
+ <br>
63
+ DOI: [10.5281/zenodo.xxxxxxx](https://doi.org/10.5281/zenodo.xxxxxxx)
64
+
65
+ ## Documentation and Tutorial
66
+
67
+ Full documentation is available at:
68
+
69
+ - **Docs:** [https://riskportal.github.io/network-tutorial](https://riskportal.github.io/network-tutorial)
70
+ - **Tutorial Jupyter Notebook Repository:** [https://github.com/riskportal/network-tutorial](https://github.com/riskportal/network-tutorial)
71
+
72
+ ## Installation
73
+
74
+ RISK is compatible with Python 3.8 or later and runs on all major operating systems. To install the latest version of RISK, run:
75
+
76
+ ```bash
77
+ pip install risk-network --upgrade
78
+ ```
79
+
80
+ ## Key Features of RISK
81
+
82
+ - **Broad Data Compatibility**: Accepts multiple network formats (NetworkX, Cytoscape, GPickle) and user-provided annotations formatted as term–to–gene membership tables (JSON, CSV, TSV, Excel, or Python dictionaries).
83
+ - **Flexible Clustering**: Offers Louvain, Leiden, Markov Clustering, Greedy Modularity, Label Propagation, Spinglass, and Walktrap, with user-defined resolution parameters to detect both coarse and fine-grained modules.
84
+ - **Statistical Testing**: Provides hypergeometric, binomial, chi-squared, Poisson, z-score, and permutation tests, balancing speed with statistical rigor.
85
+ - **High-Resolution Visualization**: Generates publication-ready figures with contour overlays, customizable node/edge properties, and export to SVG, PNG, or PDF.
86
+
87
+ ## Example Usage
88
+
89
+ We applied RISK to a _Saccharomyces cerevisiae_ protein–protein interaction (PPI) network (Michaelis _et al_., 2023; 3,839 proteins, 30,955 interactions). RISK identified compact, functional modules overrepresented in Gene Ontology Biological Process (GO BP) terms (Ashburner _et al_., 2000), revealing biological organization including ribosomal assembly, mitochondrial organization, and RNA polymerase activity (P < 0.0001).
90
+
91
+ [![Yeast PPI network annotated with GO BP terms](https://i.imgur.com/jQKatLY.jpeg)](https://i.imgur.com/jQKatLY.jpeg)
92
+ **RISK workflow overview and analysis of the yeast PPI network**. GO BP terms are color-coded to represent key cellular processes—including ribosomal assembly, mitochondrial organization, and RNA polymerase activity (P < 0.0001).
93
+
94
+ ## Citation
95
+
96
+ If you use RISK in your research, please cite the following:
97
+
98
+ **Horecka and Röst (2025)**, _"RISK: a next-generation tool for biological network annotation and visualization"_.
99
+ <br>
100
+ DOI: [10.5281/zenodo.xxxxxxx](https://doi.org/10.5281/zenodo.xxxxxxx)
101
+
102
+ ## Contributing
103
+
104
+ We welcome contributions from the community:
105
+
106
+ - [Issues Tracker](https://github.com/riskportal/network/issues)
107
+ - [Source Code](https://github.com/riskportal/network/tree/main/risk)
108
+
109
+ ## Support
110
+
111
+ If you encounter issues or have suggestions for new features, please use the [Issues Tracker](https://github.com/riskportal/network/issues) on GitHub.
112
+
113
+ ## License
114
+
115
+ RISK is open source under the [GNU General Public License v3.0](https://www.gnu.org/licenses/gpl-3.0.en.html).
@@ -0,0 +1,74 @@
1
+ # RISK Network
2
+
3
+ <p align="center">
4
+ <img src="https://i.imgur.com/8TleEJs.png" width="50%" />
5
+ </p>
6
+
7
+ <br>
8
+
9
+ ![Python](https://img.shields.io/badge/python-3.8%2B-yellow)
10
+ [![pypiv](https://img.shields.io/pypi/v/risk-network.svg)](https://pypi.python.org/pypi/risk-network)
11
+ ![License](https://img.shields.io/badge/license-GPLv3-purple)
12
+ [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.xxxxxxx.svg)](https://doi.org/10.5281/zenodo.xxxxxxx)
13
+ ![Downloads](https://img.shields.io/pypi/dm/risk-network)
14
+ ![Tests](https://github.com/riskportal/network/actions/workflows/ci.yml/badge.svg)
15
+
16
+ **RISK** (Regional Inference of Significant Kinships) is a next-generation tool for biological network annotation and visualization. It integrates community detection algorithms, rigorous overrepresentation analysis, and a modular framework for diverse network types. RISK identifies biologically coherent relationships within networks and generates publication-ready visualizations, making it a useful tool for biological and interdisciplinary network analysis.
17
+
18
+ For a full description of RISK and its applications, see:
19
+ <br>
20
+ **Horecka and Röst (2025)**, _"RISK: a next-generation tool for biological network annotation and visualization"_.
21
+ <br>
22
+ DOI: [10.5281/zenodo.xxxxxxx](https://doi.org/10.5281/zenodo.xxxxxxx)
23
+
24
+ ## Documentation and Tutorial
25
+
26
+ Full documentation is available at:
27
+
28
+ - **Docs:** [https://riskportal.github.io/network-tutorial](https://riskportal.github.io/network-tutorial)
29
+ - **Tutorial Jupyter Notebook Repository:** [https://github.com/riskportal/network-tutorial](https://github.com/riskportal/network-tutorial)
30
+
31
+ ## Installation
32
+
33
+ RISK is compatible with Python 3.8 or later and runs on all major operating systems. To install the latest version of RISK, run:
34
+
35
+ ```bash
36
+ pip install risk-network --upgrade
37
+ ```
38
+
39
+ ## Key Features of RISK
40
+
41
+ - **Broad Data Compatibility**: Accepts multiple network formats (NetworkX, Cytoscape, GPickle) and user-provided annotations formatted as term–to–gene membership tables (JSON, CSV, TSV, Excel, or Python dictionaries).
42
+ - **Flexible Clustering**: Offers Louvain, Leiden, Markov Clustering, Greedy Modularity, Label Propagation, Spinglass, and Walktrap, with user-defined resolution parameters to detect both coarse and fine-grained modules.
43
+ - **Statistical Testing**: Provides hypergeometric, binomial, chi-squared, Poisson, z-score, and permutation tests, balancing speed with statistical rigor.
44
+ - **High-Resolution Visualization**: Generates publication-ready figures with contour overlays, customizable node/edge properties, and export to SVG, PNG, or PDF.
45
+
46
+ ## Example Usage
47
+
48
+ We applied RISK to a _Saccharomyces cerevisiae_ protein–protein interaction (PPI) network (Michaelis _et al_., 2023; 3,839 proteins, 30,955 interactions). RISK identified compact, functional modules overrepresented in Gene Ontology Biological Process (GO BP) terms (Ashburner _et al_., 2000), revealing biological organization including ribosomal assembly, mitochondrial organization, and RNA polymerase activity (P < 0.0001).
49
+
50
+ [![Yeast PPI network annotated with GO BP terms](https://i.imgur.com/jQKatLY.jpeg)](https://i.imgur.com/jQKatLY.jpeg)
51
+ **RISK workflow overview and analysis of the yeast PPI network**. GO BP terms are color-coded to represent key cellular processes—including ribosomal assembly, mitochondrial organization, and RNA polymerase activity (P < 0.0001).
52
+
53
+ ## Citation
54
+
55
+ If you use RISK in your research, please cite the following:
56
+
57
+ **Horecka and Röst (2025)**, _"RISK: a next-generation tool for biological network annotation and visualization"_.
58
+ <br>
59
+ DOI: [10.5281/zenodo.xxxxxxx](https://doi.org/10.5281/zenodo.xxxxxxx)
60
+
61
+ ## Contributing
62
+
63
+ We welcome contributions from the community:
64
+
65
+ - [Issues Tracker](https://github.com/riskportal/network/issues)
66
+ - [Source Code](https://github.com/riskportal/network/tree/main/risk)
67
+
68
+ ## Support
69
+
70
+ If you encounter issues or have suggestions for new features, please use the [Issues Tracker](https://github.com/riskportal/network/issues) on GitHub.
71
+
72
+ ## License
73
+
74
+ RISK is open source under the [GNU General Public License v3.0](https://www.gnu.org/licenses/gpl-3.0.en.html).
@@ -5,6 +5,7 @@ risk
5
5
  RISK: Regional Inference of Significant Kinships
6
6
  """
7
7
 
8
- from risk.risk import RISK
8
+ from ._risk import RISK
9
9
 
10
- __version__ = "0.0.13-beta.4"
10
+ __all__ = ["RISK"]
11
+ __version__ = "0.0.14"
@@ -0,0 +1,10 @@
1
+ """
2
+ risk/_annotation
3
+ ~~~~~~~~~~~~~~~~
4
+ """
5
+
6
+ from ._annotation import (
7
+ define_top_annotation,
8
+ get_weighted_description,
9
+ )
10
+ from ._io import AnnotationHandler
@@ -1,6 +1,6 @@
1
1
  """
2
- risk/annotation/annotation
3
- ~~~~~~~~~~~~~~~~~~~~~~~~~~
2
+ risk/_annotation/_annotation
3
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
4
4
  """
5
5
 
6
6
  import re
@@ -14,12 +14,13 @@ import pandas as pd
14
14
  from nltk.tokenize import word_tokenize
15
15
  from scipy.sparse import coo_matrix
16
16
 
17
- from risk.annotation.nltk_setup import setup_nltk_resources
18
- from risk.log import logger
17
+ from .._log import logger
18
+ from ._nltk_setup import setup_nltk_resources
19
19
 
20
20
 
21
21
  def initialize_nltk():
22
- """Initialize all required NLTK components."""
22
+ """
23
+ Initialize all required NLTK components."""
23
24
  setup_nltk_resources()
24
25
 
25
26
  # After resources are available, initialize the components
@@ -41,7 +42,8 @@ def load_annotation(
41
42
  min_nodes_per_term: int = 1,
42
43
  max_nodes_per_term: int = 10_000,
43
44
  ) -> Dict[str, Any]:
44
- """Convert annotation input to a sparse matrix and reindex based on the network's node labels.
45
+ """
46
+ Convert annotation input to a sparse matrix and reindex based on the network's node labels.
45
47
 
46
48
  Args:
47
49
  network (nx.Graph): The network graph.
@@ -127,7 +129,8 @@ def define_top_annotation(
127
129
  min_cluster_size: int = 5,
128
130
  max_cluster_size: int = 1000,
129
131
  ) -> pd.DataFrame:
130
- """Define top annotations based on neighborhood significance sums and binary significance matrix.
132
+ """
133
+ Define top annotations based on neighborhood significance sums and binary significance matrix.
131
134
 
132
135
  Args:
133
136
  network (NetworkX graph): The network graph.
@@ -218,7 +221,8 @@ def define_top_annotation(
218
221
 
219
222
 
220
223
  def get_weighted_description(words_column: pd.Series, scores_column: pd.Series) -> str:
221
- """Generate a weighted description from words and their corresponding scores,
224
+ """
225
+ Generate a weighted description from words and their corresponding scores,
222
226
  using improved weighting logic with normalization, lemmatization, and aggregation.
223
227
 
224
228
  Args:
@@ -286,7 +290,8 @@ def get_weighted_description(words_column: pd.Series, scores_column: pd.Series)
286
290
 
287
291
 
288
292
  def _simplify_word_list(words: List[str], threshold: float = 0.80) -> List[str]:
289
- """Filter out words that are too similar based on the Jaccard index,
293
+ """
294
+ Filter out words that are too similar based on the Jaccard index,
290
295
  keeping the word with the higher aggregated count.
291
296
 
292
297
  Args:
@@ -326,7 +331,8 @@ def _simplify_word_list(words: List[str], threshold: float = 0.80) -> List[str]:
326
331
 
327
332
 
328
333
  def _calculate_jaccard_index(set1: Set[Any], set2: Set[Any]) -> float:
329
- """Calculate the Jaccard index between two sets.
334
+ """
335
+ Calculate the Jaccard index between two sets.
330
336
 
331
337
  Args:
332
338
  set1 (Set[Any]): The first set.
@@ -341,7 +347,8 @@ def _calculate_jaccard_index(set1: Set[Any], set2: Set[Any]) -> float:
341
347
 
342
348
 
343
349
  def _generate_coherent_description(words: List[str]) -> str:
344
- """Generate a coherent description from a list of words.
350
+ """
351
+ Generate a coherent description from a list of words.
345
352
 
346
353
  If there is only one unique entry, return it directly.
347
354
  Otherwise, order the words by frequency and join them into a single string.
@@ -1,6 +1,6 @@
1
1
  """
2
- risk/annotation/io
3
- ~~~~~~~~~~~~~~~~~~
2
+ risk/_annotation/_io
3
+ ~~~~~~~~~~~~~~~~~~~~
4
4
  """
5
5
 
6
6
  import json
@@ -9,14 +9,15 @@ from typing import Any, Dict
9
9
  import networkx as nx
10
10
  import pandas as pd
11
11
 
12
- from risk.annotation.annotation import load_annotation
13
- from risk.log import log_header, logger, params
12
+ from .._log import log_header, logger, params
13
+ from ._annotation import load_annotation
14
14
 
15
15
 
16
- class AnnotationIO:
17
- """Handles the loading and exporting of annotation in various file formats.
16
+ class AnnotationHandler:
17
+ """
18
+ Handles the loading and exporting of annotation in various file formats.
18
19
 
19
- The AnnotationIO class provides methods to load annotation from different file types (JSON, CSV, Excel, etc.)
20
+ The AnnotationHandler class provides methods to load annotation from different file types (JSON, CSV, Excel, etc.)
20
21
  and to export parameter data to various formats like JSON, CSV, and text files.
21
22
  """
22
23
 
@@ -27,7 +28,8 @@ class AnnotationIO:
27
28
  min_nodes_per_term: int = 1,
28
29
  max_nodes_per_term: int = 10_000,
29
30
  ) -> Dict[str, Any]:
30
- """Load annotation from a JSON file and convert them to a DataFrame.
31
+ """
32
+ Load annotation from a JSON file and convert them to a DataFrame.
31
33
 
32
34
  Args:
33
35
  network (NetworkX graph): The network to which the annotation is related.
@@ -67,7 +69,8 @@ class AnnotationIO:
67
69
  min_nodes_per_term: int = 1,
68
70
  max_nodes_per_term: int = 10_000,
69
71
  ) -> Dict[str, Any]:
70
- """Load annotation from an Excel file and associate them with the network.
72
+ """
73
+ Load annotation from an Excel file and associate them with the network.
71
74
 
72
75
  Args:
73
76
  network (nx.Graph): The NetworkX graph to which the annotation is related.
@@ -116,7 +119,8 @@ class AnnotationIO:
116
119
  min_nodes_per_term: int = 1,
117
120
  max_nodes_per_term: int = 10_000,
118
121
  ) -> Dict[str, Any]:
119
- """Load annotation from a CSV file and associate them with the network.
122
+ """
123
+ Load annotation from a CSV file and associate them with the network.
120
124
 
121
125
  Args:
122
126
  network (nx.Graph): The NetworkX graph to which the annotation is related.
@@ -160,7 +164,8 @@ class AnnotationIO:
160
164
  min_nodes_per_term: int = 1,
161
165
  max_nodes_per_term: int = 10_000,
162
166
  ) -> Dict[str, Any]:
163
- """Load annotation from a TSV file and associate them with the network.
167
+ """
168
+ Load annotation from a TSV file and associate them with the network.
164
169
 
165
170
  Args:
166
171
  network (nx.Graph): The NetworkX graph to which the annotation is related.
@@ -201,7 +206,8 @@ class AnnotationIO:
201
206
  min_nodes_per_term: int = 1,
202
207
  max_nodes_per_term: int = 10_000,
203
208
  ) -> Dict[str, Any]:
204
- """Load annotation from a provided dictionary and convert them to a dictionary annotation.
209
+ """
210
+ Load annotation from a provided dictionary and convert them to a dictionary annotation.
205
211
 
206
212
  Args:
207
213
  network (NetworkX graph): The network to which the annotation is related.
@@ -244,7 +250,8 @@ class AnnotationIO:
244
250
  delimiter: str = ",",
245
251
  nodes_delimiter: str = ";",
246
252
  ) -> Dict[str, Any]:
247
- """Load annotation from a CSV or TSV file and convert them to a dictionary.
253
+ """
254
+ Load annotation from a CSV or TSV file and convert them to a dictionary.
248
255
 
249
256
  Args:
250
257
  filepath (str): Path to the annotation file.
@@ -267,7 +274,8 @@ class AnnotationIO:
267
274
  return label_node_dict
268
275
 
269
276
  def _log_loading_annotation(self, filetype: str, filepath: str = "") -> None:
270
- """Log the loading of annotation files.
277
+ """
278
+ Log the loading of annotation files.
271
279
 
272
280
  Args:
273
281
  filetype (str): The type of the file being loaded (e.g., 'Cytoscape').
@@ -1,6 +1,6 @@
1
1
  """
2
- risk/annotation/nltk_setup
3
- ~~~~~~~~~~~~~~~~~~~~~~~~~~
2
+ risk/_annotation/_nltk_setup
3
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
4
4
  """
5
5
 
6
6
  import os
@@ -11,11 +11,12 @@ import nltk
11
11
  from nltk.data import find
12
12
  from nltk.data import path as nltk_data_path
13
13
 
14
- from risk.log import logger
14
+ from .._log import logger
15
15
 
16
16
 
17
17
  def setup_nltk_resources(required_resources: Optional[List[Tuple[str, str]]] = None) -> None:
18
- """Ensures all required NLTK resources are available and properly extracted.
18
+ """
19
+ Ensures all required NLTK resources are available and properly extracted.
19
20
  Uses NLTK's default paths and mechanisms.
20
21
 
21
22
  Args:
@@ -47,7 +48,8 @@ def setup_nltk_resources(required_resources: Optional[List[Tuple[str, str]]] = N
47
48
 
48
49
 
49
50
  def verify_and_extract_if_needed(resource_path: str, package_name: str) -> None:
50
- """Verifies if the resource is properly extracted and extracts if needed. Respects
51
+ """
52
+ Verifies if the resource is properly extracted and extracts if needed. Respects
51
53
  NLTK's directory structure where the extracted content should be in the same directory
52
54
  as the zip file.
53
55
 
@@ -0,0 +1,11 @@
1
+ """
2
+ risk/_log
3
+ ~~~~~~~~~
4
+ """
5
+
6
+ from ._console import log_header, logger, set_global_verbosity
7
+ from ._parameters import Params
8
+
9
+ # Initialize the global parameters logger
10
+ params = Params()
11
+ params.initialize()
@@ -1,13 +1,14 @@
1
1
  """
2
- risk/log/console
3
- ~~~~~~~~~~~~~~~~
2
+ risk/_log/_console
3
+ ~~~~~~~~~~~~~~~~~~
4
4
  """
5
5
 
6
6
  import logging
7
7
 
8
8
 
9
9
  def in_jupyter():
10
- """Check if the code is running in a Jupyter notebook environment.
10
+ """
11
+ Check if the code is running in a Jupyter notebook environment.
11
12
 
12
13
  Returns:
13
14
  bool: True if running in a Jupyter notebook or QtConsole, False otherwise.
@@ -26,7 +27,8 @@ def in_jupyter():
26
27
 
27
28
  # Define the MockLogger class to replicate logging behavior with print statements in Jupyter
28
29
  class MockLogger:
29
- """MockLogger: A lightweight logger replacement using print statements in Jupyter.
30
+ """
31
+ MockLogger: A lightweight logger replacement using print statements in Jupyter.
30
32
 
31
33
  The MockLogger class replicates the behavior of a standard logger using print statements
32
34
  to display messages. This is primarily used in a Jupyter environment to show outputs
@@ -35,7 +37,8 @@ class MockLogger:
35
37
  """
36
38
 
37
39
  def __init__(self, verbose: bool = True):
38
- """Initialize the MockLogger with verbosity settings.
40
+ """
41
+ Initialize the MockLogger with verbosity settings.
39
42
 
40
43
  Args:
41
44
  verbose (bool): If True, display all log messages (info, debug, warning).
@@ -44,7 +47,8 @@ class MockLogger:
44
47
  self.verbose = verbose
45
48
 
46
49
  def info(self, message: str) -> None:
47
- """Display an informational message.
50
+ """
51
+ Display an informational message.
48
52
 
49
53
  Args:
50
54
  message (str): The informational message to be printed.
@@ -53,7 +57,8 @@ class MockLogger:
53
57
  print(message)
54
58
 
55
59
  def debug(self, message: str) -> None:
56
- """Display a debug message.
60
+ """
61
+ Display a debug message.
57
62
 
58
63
  Args:
59
64
  message (str): The debug message to be printed.
@@ -62,7 +67,8 @@ class MockLogger:
62
67
  print(message)
63
68
 
64
69
  def warning(self, message: str) -> None:
65
- """Display a warning message.
70
+ """
71
+ Display a warning message.
66
72
 
67
73
  Args:
68
74
  message (str): The warning message to be printed.
@@ -70,7 +76,8 @@ class MockLogger:
70
76
  print(message)
71
77
 
72
78
  def error(self, message: str) -> None:
73
- """Display an error message.
79
+ """
80
+ Display an error message.
74
81
 
75
82
  Args:
76
83
  message (str): The error message to be printed.
@@ -78,7 +85,8 @@ class MockLogger:
78
85
  print(message)
79
86
 
80
87
  def setLevel(self, level: int) -> None:
81
- """Adjust verbosity based on the logging level.
88
+ """
89
+ Adjust verbosity based on the logging level.
82
90
 
83
91
  Args:
84
92
  level (int): Logging level to control message display.
@@ -108,7 +116,8 @@ else:
108
116
 
109
117
 
110
118
  def set_global_verbosity(verbose):
111
- """Set the global verbosity level for the logger.
119
+ """
120
+ Set the global verbosity level for the logger.
112
121
 
113
122
  Args:
114
123
  verbose (bool): Whether to display all log messages (True) or only error messages (False).
@@ -130,7 +139,8 @@ def set_global_verbosity(verbose):
130
139
 
131
140
 
132
141
  def log_header(input_string: str) -> None:
133
- """Log the input string as a header with a line of dashes above and below it.
142
+ """
143
+ Log the input string as a header with a line of dashes above and below it.
134
144
 
135
145
  Args:
136
146
  input_string (str): The string to be printed as a header.
@@ -1,6 +1,6 @@
1
1
  """
2
- risk/log/parameters
3
- ~~~~~~~~~~~~~~~~~~~
2
+ risk/_log/_parameters
3
+ ~~~~~~~~~~~~~~~~~~~~~
4
4
  """
5
5
 
6
6
  import csv
@@ -11,14 +11,15 @@ from typing import Any, Dict
11
11
 
12
12
  import numpy as np
13
13
 
14
- from risk.log.console import log_header, logger
14
+ from ._console import log_header, logger
15
15
 
16
16
  # Suppress all warnings - this is to resolve warnings from multiprocessing
17
17
  warnings.filterwarnings("ignore")
18
18
 
19
19
 
20
20
  class Params:
21
- """Handles the storage and logging of various parameters for network analysis.
21
+ """
22
+ Handles the storage and logging of various parameters for network analysis.
22
23
 
23
24
  The Params class provides methods to log parameters related to different components of the analysis,
24
25
  such as the network, annotation, neighborhoods, graph, and plotter settings. It also stores
@@ -39,7 +40,8 @@ class Params:
39
40
  self.plotter = {}
40
41
 
41
42
  def log_network(self, **kwargs) -> None:
42
- """Log network-related parameters.
43
+ """
44
+ Log network-related parameters.
43
45
 
44
46
  Args:
45
47
  **kwargs: Network parameters to log.
@@ -47,7 +49,8 @@ class Params:
47
49
  self.network = {**self.network, **kwargs}
48
50
 
49
51
  def log_annotation(self, **kwargs) -> None:
50
- """Log annotation-related parameters.
52
+ """
53
+ Log annotation-related parameters.
51
54
 
52
55
  Args:
53
56
  **kwargs: Annotation parameters to log.
@@ -55,7 +58,8 @@ class Params:
55
58
  self.annotation = {**self.annotation, **kwargs}
56
59
 
57
60
  def log_neighborhoods(self, **kwargs) -> None:
58
- """Log neighborhood-related parameters.
61
+ """
62
+ Log neighborhood-related parameters.
59
63
 
60
64
  Args:
61
65
  **kwargs: Neighborhood parameters to log.
@@ -63,7 +67,8 @@ class Params:
63
67
  self.neighborhoods = {**self.neighborhoods, **kwargs}
64
68
 
65
69
  def log_graph(self, **kwargs) -> None:
66
- """Log graph-related parameters.
70
+ """
71
+ Log graph-related parameters.
67
72
 
68
73
  Args:
69
74
  **kwargs: Graph parameters to log.
@@ -71,7 +76,8 @@ class Params:
71
76
  self.graph = {**self.graph, **kwargs}
72
77
 
73
78
  def log_plotter(self, **kwargs) -> None:
74
- """Log plotter-related parameters.
79
+ """
80
+ Log plotter-related parameters.
75
81
 
76
82
  Args:
77
83
  **kwargs: Plotter parameters to log.
@@ -79,7 +85,8 @@ class Params:
79
85
  self.plotter = {**self.plotter, **kwargs}
80
86
 
81
87
  def to_csv(self, filepath: str) -> None:
82
- """Export the parameters to a CSV file.
88
+ """
89
+ Export the parameters to a CSV file.
83
90
 
84
91
  Args:
85
92
  filepath (str): The path where the CSV file will be saved.
@@ -102,7 +109,8 @@ class Params:
102
109
  logger.info(f"Parameters exported to CSV file: {filepath}")
103
110
 
104
111
  def to_json(self, filepath: str) -> None:
105
- """Export the parameters to a JSON file.
112
+ """
113
+ Export the parameters to a JSON file.
106
114
 
107
115
  Args:
108
116
  filepath (str): The path where the JSON file will be saved.
@@ -113,7 +121,8 @@ class Params:
113
121
  logger.info(f"Parameters exported to JSON file: {filepath}")
114
122
 
115
123
  def to_txt(self, filepath: str) -> None:
116
- """Export the parameters to a text file.
124
+ """
125
+ Export the parameters to a text file.
117
126
 
118
127
  Args:
119
128
  filepath (str): The path where the text file will be saved.
@@ -131,7 +140,8 @@ class Params:
131
140
  logger.info(f"Parameters exported to text file: {filepath}")
132
141
 
133
142
  def load(self) -> Dict[str, Any]:
134
- """Load and process various parameters, converting any np.ndarray values to lists.
143
+ """
144
+ Load and process various parameters, converting any np.ndarray values to lists.
135
145
 
136
146
  Returns:
137
147
  Dict[str, Any]: A dictionary containing the processed parameters.
@@ -149,7 +159,8 @@ class Params:
149
159
  )
150
160
 
151
161
  def _convert_ndarray_to_list(self, d: Dict[str, Any]) -> Dict[str, Any]:
152
- """Recursively convert all np.ndarray values in the dictionary to lists.
162
+ """
163
+ Recursively convert all np.ndarray values in the dictionary to lists.
153
164
 
154
165
  Args:
155
166
  d (Dict[str, Any]): The dictionary to process.