put-asunder 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- put_asunder-0.1.0/LICENSE +28 -0
- put_asunder-0.1.0/PKG-INFO +183 -0
- put_asunder-0.1.0/README.md +135 -0
- put_asunder-0.1.0/asunder/__init__.py +32 -0
- put_asunder-0.1.0/asunder/algorithms/__init__.py +22 -0
- put_asunder-0.1.0/asunder/algorithms/community.py +285 -0
- put_asunder-0.1.0/asunder/algorithms/core_periphery.py +541 -0
- put_asunder-0.1.0/asunder/algorithms/louvain_modified.py +739 -0
- put_asunder-0.1.0/asunder/algorithms/signed_louvain/__init__.py +9 -0
- put_asunder-0.1.0/asunder/algorithms/signed_louvain/community_detection.py +703 -0
- put_asunder-0.1.0/asunder/algorithms/signed_louvain/util.py +53 -0
- put_asunder-0.1.0/asunder/algorithms/spectral.py +277 -0
- put_asunder-0.1.0/asunder/case_studies/__init__.py +6 -0
- put_asunder-0.1.0/asunder/case_studies/circle_cutting.py +126 -0
- put_asunder-0.1.0/asunder/case_studies/cpcong.py +109 -0
- put_asunder-0.1.0/asunder/column_generation/__init__.py +18 -0
- put_asunder-0.1.0/asunder/column_generation/decomposition.py +275 -0
- put_asunder-0.1.0/asunder/column_generation/master.py +146 -0
- put_asunder-0.1.0/asunder/column_generation/subproblem.py +227 -0
- put_asunder-0.1.0/asunder/config.py +35 -0
- put_asunder-0.1.0/asunder/evaluation/__init__.py +6 -0
- put_asunder-0.1.0/asunder/evaluation/metrics.py +169 -0
- put_asunder-0.1.0/asunder/evaluation/runner.py +178 -0
- put_asunder-0.1.0/asunder/legacy/__init__.py +5 -0
- put_asunder-0.1.0/asunder/legacy/notebook_compat.py +18 -0
- put_asunder-0.1.0/asunder/orchestrator.py +83 -0
- put_asunder-0.1.0/asunder/solvers.py +38 -0
- put_asunder-0.1.0/asunder/types.py +59 -0
- put_asunder-0.1.0/asunder/utils/__init__.py +27 -0
- put_asunder-0.1.0/asunder/utils/graph.py +302 -0
- put_asunder-0.1.0/asunder/utils/partition_generation.py +433 -0
- put_asunder-0.1.0/asunder/visualization/__init__.py +16 -0
- put_asunder-0.1.0/asunder/visualization/graphs.py +186 -0
- put_asunder-0.1.0/asunder/visualization/matrices.py +57 -0
- put_asunder-0.1.0/put_asunder.egg-info/PKG-INFO +183 -0
- put_asunder-0.1.0/put_asunder.egg-info/SOURCES.txt +39 -0
- put_asunder-0.1.0/put_asunder.egg-info/dependency_links.txt +1 -0
- put_asunder-0.1.0/put_asunder.egg-info/requires.txt +32 -0
- put_asunder-0.1.0/put_asunder.egg-info/top_level.txt +1 -0
- put_asunder-0.1.0/pyproject.toml +77 -0
- put_asunder-0.1.0/setup.cfg +4 -0
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
BSD 3-Clause License for Asunder
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026, Allman Process Systems Research Team, University of Michigan
|
|
4
|
+
|
|
5
|
+
Redistribution and use in source and binary forms, with or without
|
|
6
|
+
modification, are permitted provided that the following conditions are met:
|
|
7
|
+
|
|
8
|
+
1. Redistributions of source code must retain the above copyright notice, this
|
|
9
|
+
list of conditions and the following disclaimer.
|
|
10
|
+
|
|
11
|
+
2. Redistributions in binary form must reproduce the above copyright notice,
|
|
12
|
+
this list of conditions and the following disclaimer in the documentation
|
|
13
|
+
and/or other materials provided with the distribution.
|
|
14
|
+
|
|
15
|
+
3. Neither the name of the copyright holder nor the names of its
|
|
16
|
+
contributors may be used to endorse or promote products derived from
|
|
17
|
+
this software without specific prior written permission.
|
|
18
|
+
|
|
19
|
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
20
|
+
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
21
|
+
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
|
22
|
+
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
|
23
|
+
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
|
24
|
+
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
|
25
|
+
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
|
26
|
+
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
|
27
|
+
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
28
|
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
@@ -0,0 +1,183 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: put-asunder
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: Asunder: Constrained Structure Detection on Undirected Graphs.
|
|
5
|
+
Author-email: Fortune Adekogbe <fortunea@umich.edu>, Allman Group <allmanaa@umich.edu>
|
|
6
|
+
License-Expression: BSD-3-Clause
|
|
7
|
+
Project-URL: Homepage, https://github.com/allman-group/asunder
|
|
8
|
+
Project-URL: Documentation, https://put-asunder.readthedocs.io
|
|
9
|
+
Project-URL: Repository, https://github.com/allman-group/asunder
|
|
10
|
+
Keywords: graph,constrained-clustering,graph-clustering,community-detection,core-periphery,column-generation,constrained-network-structure-detection,branch-and-price
|
|
11
|
+
Classifier: Programming Language :: Python :: 3
|
|
12
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
13
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
14
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
15
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
16
|
+
Classifier: Intended Audience :: Science/Research
|
|
17
|
+
Requires-Python: <3.14,>=3.10
|
|
18
|
+
Description-Content-Type: text/markdown
|
|
19
|
+
License-File: LICENSE
|
|
20
|
+
Requires-Dist: numpy>=1.26
|
|
21
|
+
Requires-Dist: scipy>=1.13
|
|
22
|
+
Requires-Dist: networkx>=3.2
|
|
23
|
+
Requires-Dist: scikit-learn>=1.5
|
|
24
|
+
Requires-Dist: scikit-network>=0.33
|
|
25
|
+
Requires-Dist: tqdm>=4.66
|
|
26
|
+
Requires-Dist: pyomo>=6.8
|
|
27
|
+
Requires-Dist: gurobipy>=11.0
|
|
28
|
+
Provides-Extra: graph
|
|
29
|
+
Requires-Dist: python-igraph>=0.11; extra == "graph"
|
|
30
|
+
Requires-Dist: leidenalg>=0.10; extra == "graph"
|
|
31
|
+
Provides-Extra: viz
|
|
32
|
+
Requires-Dist: matplotlib>=3.8; extra == "viz"
|
|
33
|
+
Requires-Dist: seaborn>=0.13; extra == "viz"
|
|
34
|
+
Provides-Extra: legacy
|
|
35
|
+
Requires-Dist: cpnet; extra == "legacy"
|
|
36
|
+
Provides-Extra: docs
|
|
37
|
+
Requires-Dist: sphinx>=7.4; extra == "docs"
|
|
38
|
+
Requires-Dist: furo>=2024.8.6; extra == "docs"
|
|
39
|
+
Requires-Dist: myst-parser>=3.0; extra == "docs"
|
|
40
|
+
Requires-Dist: sphinx-autodoc-typehints>=2.3; extra == "docs"
|
|
41
|
+
Provides-Extra: dev
|
|
42
|
+
Requires-Dist: pytest>=8.3; extra == "dev"
|
|
43
|
+
Requires-Dist: pytest-cov>=5.0; extra == "dev"
|
|
44
|
+
Requires-Dist: ruff>=0.7; extra == "dev"
|
|
45
|
+
Requires-Dist: mypy>=1.12; extra == "dev"
|
|
46
|
+
Requires-Dist: pre-commit>=3.8; extra == "dev"
|
|
47
|
+
Dynamic: license-file
|
|
48
|
+
|
|
49
|
+
# Asunder
|
|
50
|
+
|
|
51
|
+
Asunder is a Python package for constrained structure detection on undirected graphs, with workflows centered on column generation and customizable master/subproblem pipelines.
|
|
52
|
+
|
|
53
|
+
Development of Asunder is led by the Allman Group at the University of Michigan.
|
|
54
|
+
|
|
55
|
+
## Install
|
|
56
|
+
|
|
57
|
+
Base install:
|
|
58
|
+
|
|
59
|
+
```bash
|
|
60
|
+
pip install put-asunder
|
|
61
|
+
```
|
|
62
|
+
|
|
63
|
+
Optional extras:
|
|
64
|
+
|
|
65
|
+
```bash
|
|
66
|
+
pip install "put-asunder[graph,viz]"
|
|
67
|
+
```
|
|
68
|
+
|
|
69
|
+
Legacy heuristics (best-effort on Python 3.13):
|
|
70
|
+
|
|
71
|
+
```bash
|
|
72
|
+
pip install "put-asunder[legacy]"
|
|
73
|
+
```
|
|
74
|
+
|
|
75
|
+
## Python Support
|
|
76
|
+
|
|
77
|
+
- Guaranteed: Python 3.10, 3.11, 3.12, 3.13 for core package.
|
|
78
|
+
- Guaranteed: mainstream extras (`graph`, `viz`) on Python 3.10–3.13.
|
|
79
|
+
- Best-effort: `legacy` extra on Python 3.13.
|
|
80
|
+
|
|
81
|
+
## Quickstart
|
|
82
|
+
|
|
83
|
+
```python
|
|
84
|
+
import numpy as np
|
|
85
|
+
from asunder import CSDDecomposition, CSDDecompositionConfig
|
|
86
|
+
|
|
87
|
+
A = np.array([
|
|
88
|
+
[0, 1, 1, 0],
|
|
89
|
+
[1, 0, 1, 0],
|
|
90
|
+
[1, 1, 0, 0],
|
|
91
|
+
[0, 0, 0, 0],
|
|
92
|
+
], dtype=float)
|
|
93
|
+
|
|
94
|
+
cfg = CSDDecompositionConfig(
|
|
95
|
+
ifc_params={"generator": lambda N, **_: [np.ones((N, N))], "num": 1, "args": {"N": A.shape[0]}},
|
|
96
|
+
extract_dual=False,
|
|
97
|
+
final_master_solve=False,
|
|
98
|
+
)
|
|
99
|
+
|
|
100
|
+
result = CSDDecomposition(config=cfg).run(A)
|
|
101
|
+
print(result.metadata)
|
|
102
|
+
```
|
|
103
|
+
|
|
104
|
+
## Solver Setup
|
|
105
|
+
|
|
106
|
+
Asunder accepts user-provided solver objects. For Gurobi, `GRB_LICENSE_FILE` is used by your environment. Example:
|
|
107
|
+
|
|
108
|
+
```python
|
|
109
|
+
from asunder import create_solver
|
|
110
|
+
|
|
111
|
+
solver = create_solver("gurobi_direct")
|
|
112
|
+
```
|
|
113
|
+
|
|
114
|
+
## Problem Fit
|
|
115
|
+
|
|
116
|
+
Asunder works well out of the box for optimization problems where coordination or operations are coupled across time and those interactions can be represented as a graph over constraints.
|
|
117
|
+
|
|
118
|
+
Asunder also supports general constrained partitioning beyond these domain examples when requirements can be expressed as must-link and cannot-link constraints.
|
|
119
|
+
|
|
120
|
+
Typical fit signals:
|
|
121
|
+
|
|
122
|
+
- coupling across time periods, units, or resources
|
|
123
|
+
- mixed discrete-continuous structure with meaningful constraint interactions
|
|
124
|
+
- a useful interpretation of must-link/cannot-link or worthy-edge constraints
|
|
125
|
+
- value from multilevel partitioning or core-periphery structure detection
|
|
126
|
+
|
|
127
|
+
Representative domains:
|
|
128
|
+
|
|
129
|
+
- stochastic design and dispatch in energy systems
|
|
130
|
+
- scheduling and resource allocation in healthcare systems
|
|
131
|
+
- planning, routing, and location in supply chain and logistics
|
|
132
|
+
- network configuration and resource management in telecommunications
|
|
133
|
+
|
|
134
|
+
For a fuller guide on where default workflows are sufficient vs where customization helps, see `docs/problem_fit.rst`.
|
|
135
|
+
|
|
136
|
+
## Constraint Graph Compatibility
|
|
137
|
+
|
|
138
|
+
For the built-in case-study evaluation workflows (`run_evaluation`), Asunder expects a constraint-graph pattern consistent with the provided case studies.
|
|
139
|
+
|
|
140
|
+
Required structure for `run_evaluation`-style workflows:
|
|
141
|
+
|
|
142
|
+
- undirected graph (typically `networkx.Graph`)
|
|
143
|
+
- node attribute `constraint` (string tag used for ground-truth and role grouping)
|
|
144
|
+
- edge attribute `var_type` with values `"integer"` or `"continuous"`
|
|
145
|
+
|
|
146
|
+
Commonly present (recommended) attributes:
|
|
147
|
+
|
|
148
|
+
- node attribute `type` (for example `"constraint"`)
|
|
149
|
+
- node attribute `details` (metadata dict)
|
|
150
|
+
- edge attributes `weight`, `variables`, `var_types`
|
|
151
|
+
|
|
152
|
+
How these are used:
|
|
153
|
+
|
|
154
|
+
- `constraint` identifies core/nonlinear tags in built-in case studies
|
|
155
|
+
- `var_type` determines candidate edge sets for CP and CD_Refine paths
|
|
156
|
+
|
|
157
|
+
If you are not using `run_evaluation` and instead calling decomposition APIs directly, you can work from an adjacency matrix plus explicit `must_link`, `cannot_link`, and optional `worthy_edges`.
|
|
158
|
+
|
|
159
|
+
## Customization Points
|
|
160
|
+
|
|
161
|
+
For custom problems, typical extension points are:
|
|
162
|
+
|
|
163
|
+
1. Initial feasible partition generator.
|
|
164
|
+
2. `solve_master_problem` replacement.
|
|
165
|
+
3. Heuristic or ILP subproblem replacement.
|
|
166
|
+
4. Optional partition refinement stage.
|
|
167
|
+
|
|
168
|
+
## Examples
|
|
169
|
+
|
|
170
|
+
- Nonlinear B&P-style decomposition: `examples/nonlinear_bp.py`
|
|
171
|
+
- Custom subproblem wiring: `examples/custom_subproblem.py`
|
|
172
|
+
|
|
173
|
+
## Documentation
|
|
174
|
+
|
|
175
|
+
Sphinx docs are scaffolded in `docs/` and intended for Read the Docs deployment.
|
|
176
|
+
|
|
177
|
+
## References
|
|
178
|
+
|
|
179
|
+
Asunder integrates or wraps methods from:
|
|
180
|
+
|
|
181
|
+
- python-igraph / leidenalg
|
|
182
|
+
- scikit-network
|
|
183
|
+
- signed-louvain style algorithms
|
|
@@ -0,0 +1,135 @@
|
|
|
1
|
+
# Asunder
|
|
2
|
+
|
|
3
|
+
Asunder is a Python package for constrained structure detection on undirected graphs, with workflows centered on column generation and customizable master/subproblem pipelines.
|
|
4
|
+
|
|
5
|
+
Development of Asunder is led by the Allman Group at the University of Michigan.
|
|
6
|
+
|
|
7
|
+
## Install
|
|
8
|
+
|
|
9
|
+
Base install:
|
|
10
|
+
|
|
11
|
+
```bash
|
|
12
|
+
pip install put-asunder
|
|
13
|
+
```
|
|
14
|
+
|
|
15
|
+
Optional extras:
|
|
16
|
+
|
|
17
|
+
```bash
|
|
18
|
+
pip install "put-asunder[graph,viz]"
|
|
19
|
+
```
|
|
20
|
+
|
|
21
|
+
Legacy heuristics (best-effort on Python 3.13):
|
|
22
|
+
|
|
23
|
+
```bash
|
|
24
|
+
pip install "put-asunder[legacy]"
|
|
25
|
+
```
|
|
26
|
+
|
|
27
|
+
## Python Support
|
|
28
|
+
|
|
29
|
+
- Guaranteed: Python 3.10, 3.11, 3.12, 3.13 for core package.
|
|
30
|
+
- Guaranteed: mainstream extras (`graph`, `viz`) on Python 3.10–3.13.
|
|
31
|
+
- Best-effort: `legacy` extra on Python 3.13.
|
|
32
|
+
|
|
33
|
+
## Quickstart
|
|
34
|
+
|
|
35
|
+
```python
|
|
36
|
+
import numpy as np
|
|
37
|
+
from asunder import CSDDecomposition, CSDDecompositionConfig
|
|
38
|
+
|
|
39
|
+
A = np.array([
|
|
40
|
+
[0, 1, 1, 0],
|
|
41
|
+
[1, 0, 1, 0],
|
|
42
|
+
[1, 1, 0, 0],
|
|
43
|
+
[0, 0, 0, 0],
|
|
44
|
+
], dtype=float)
|
|
45
|
+
|
|
46
|
+
cfg = CSDDecompositionConfig(
|
|
47
|
+
ifc_params={"generator": lambda N, **_: [np.ones((N, N))], "num": 1, "args": {"N": A.shape[0]}},
|
|
48
|
+
extract_dual=False,
|
|
49
|
+
final_master_solve=False,
|
|
50
|
+
)
|
|
51
|
+
|
|
52
|
+
result = CSDDecomposition(config=cfg).run(A)
|
|
53
|
+
print(result.metadata)
|
|
54
|
+
```
|
|
55
|
+
|
|
56
|
+
## Solver Setup
|
|
57
|
+
|
|
58
|
+
Asunder accepts user-provided solver objects. For Gurobi, `GRB_LICENSE_FILE` is used by your environment. Example:
|
|
59
|
+
|
|
60
|
+
```python
|
|
61
|
+
from asunder import create_solver
|
|
62
|
+
|
|
63
|
+
solver = create_solver("gurobi_direct")
|
|
64
|
+
```
|
|
65
|
+
|
|
66
|
+
## Problem Fit
|
|
67
|
+
|
|
68
|
+
Asunder works well out of the box for optimization problems where coordination or operations are coupled across time and those interactions can be represented as a graph over constraints.
|
|
69
|
+
|
|
70
|
+
Asunder also supports general constrained partitioning beyond these domain examples when requirements can be expressed as must-link and cannot-link constraints.
|
|
71
|
+
|
|
72
|
+
Typical fit signals:
|
|
73
|
+
|
|
74
|
+
- coupling across time periods, units, or resources
|
|
75
|
+
- mixed discrete-continuous structure with meaningful constraint interactions
|
|
76
|
+
- a useful interpretation of must-link/cannot-link or worthy-edge constraints
|
|
77
|
+
- value from multilevel partitioning or core-periphery structure detection
|
|
78
|
+
|
|
79
|
+
Representative domains:
|
|
80
|
+
|
|
81
|
+
- stochastic design and dispatch in energy systems
|
|
82
|
+
- scheduling and resource allocation in healthcare systems
|
|
83
|
+
- planning, routing, and location in supply chain and logistics
|
|
84
|
+
- network configuration and resource management in telecommunications
|
|
85
|
+
|
|
86
|
+
For a fuller guide on where default workflows are sufficient vs where customization helps, see `docs/problem_fit.rst`.
|
|
87
|
+
|
|
88
|
+
## Constraint Graph Compatibility
|
|
89
|
+
|
|
90
|
+
For the built-in case-study evaluation workflows (`run_evaluation`), Asunder expects a constraint-graph pattern consistent with the provided case studies.
|
|
91
|
+
|
|
92
|
+
Required structure for `run_evaluation`-style workflows:
|
|
93
|
+
|
|
94
|
+
- undirected graph (typically `networkx.Graph`)
|
|
95
|
+
- node attribute `constraint` (string tag used for ground-truth and role grouping)
|
|
96
|
+
- edge attribute `var_type` with values `"integer"` or `"continuous"`
|
|
97
|
+
|
|
98
|
+
Commonly present (recommended) attributes:
|
|
99
|
+
|
|
100
|
+
- node attribute `type` (for example `"constraint"`)
|
|
101
|
+
- node attribute `details` (metadata dict)
|
|
102
|
+
- edge attributes `weight`, `variables`, `var_types`
|
|
103
|
+
|
|
104
|
+
How these are used:
|
|
105
|
+
|
|
106
|
+
- `constraint` identifies core/nonlinear tags in built-in case studies
|
|
107
|
+
- `var_type` determines candidate edge sets for CP and CD_Refine paths
|
|
108
|
+
|
|
109
|
+
If you are not using `run_evaluation` and instead calling decomposition APIs directly, you can work from an adjacency matrix plus explicit `must_link`, `cannot_link`, and optional `worthy_edges`.
|
|
110
|
+
|
|
111
|
+
## Customization Points
|
|
112
|
+
|
|
113
|
+
For custom problems, typical extension points are:
|
|
114
|
+
|
|
115
|
+
1. Initial feasible partition generator.
|
|
116
|
+
2. `solve_master_problem` replacement.
|
|
117
|
+
3. Heuristic or ILP subproblem replacement.
|
|
118
|
+
4. Optional partition refinement stage.
|
|
119
|
+
|
|
120
|
+
## Examples
|
|
121
|
+
|
|
122
|
+
- Nonlinear B&P-style decomposition: `examples/nonlinear_bp.py`
|
|
123
|
+
- Custom subproblem wiring: `examples/custom_subproblem.py`
|
|
124
|
+
|
|
125
|
+
## Documentation
|
|
126
|
+
|
|
127
|
+
Sphinx docs are scaffolded in `docs/` and intended for Read the Docs deployment.
|
|
128
|
+
|
|
129
|
+
## References
|
|
130
|
+
|
|
131
|
+
Asunder integrates or wraps methods from:
|
|
132
|
+
|
|
133
|
+
- python-igraph / leidenalg
|
|
134
|
+
- scikit-network
|
|
135
|
+
- signed-louvain style algorithms
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
"""Asunder: Constrained structure detection on undirected graphs."""
|
|
2
|
+
|
|
3
|
+
from asunder.column_generation.master import solve_master_problem
|
|
4
|
+
from asunder.column_generation.subproblem import solve_subproblem
|
|
5
|
+
from asunder.config import CSDDecompositionConfig
|
|
6
|
+
from asunder.orchestrator import CSDDecomposition, run_csd_decomposition
|
|
7
|
+
from asunder.solvers import create_solver
|
|
8
|
+
from asunder.types import DecompositionResult, IterationRecord
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def run_evaluation(*args, **kwargs):
|
|
12
|
+
"""Run benchmark evaluations using :mod:`asunder.evaluation.runner`.
|
|
13
|
+
|
|
14
|
+
This lazy import keeps optional evaluation dependencies out of import-time
|
|
15
|
+
paths for users who only need decomposition APIs.
|
|
16
|
+
"""
|
|
17
|
+
from asunder.evaluation.runner import run_evaluation as _run_evaluation
|
|
18
|
+
|
|
19
|
+
return _run_evaluation(*args, **kwargs)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
__all__ = [
|
|
23
|
+
"CSDDecomposition",
|
|
24
|
+
"CSDDecompositionConfig",
|
|
25
|
+
"DecompositionResult",
|
|
26
|
+
"IterationRecord",
|
|
27
|
+
"create_solver",
|
|
28
|
+
"run_csd_decomposition",
|
|
29
|
+
"run_evaluation",
|
|
30
|
+
"solve_master_problem",
|
|
31
|
+
"solve_subproblem",
|
|
32
|
+
]
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
"""Public algorithm exports for community and core-periphery detection."""
|
|
2
|
+
|
|
3
|
+
from asunder.algorithms.community import run_modularity, run_signed_louvain
|
|
4
|
+
from asunder.algorithms.core_periphery import (
|
|
5
|
+
EnhancedGeneticBE,
|
|
6
|
+
FullContinuousGeneticBE,
|
|
7
|
+
detect_continuous_KL,
|
|
8
|
+
find_core,
|
|
9
|
+
spectral_continuous_cp_detection,
|
|
10
|
+
)
|
|
11
|
+
from asunder.algorithms.louvain_modified import ModifiedLouvain
|
|
12
|
+
|
|
13
|
+
__all__ = [
|
|
14
|
+
"EnhancedGeneticBE",
|
|
15
|
+
"FullContinuousGeneticBE",
|
|
16
|
+
"ModifiedLouvain",
|
|
17
|
+
"detect_continuous_KL",
|
|
18
|
+
"spectral_continuous_cp_detection",
|
|
19
|
+
"find_core",
|
|
20
|
+
"run_modularity",
|
|
21
|
+
"run_signed_louvain",
|
|
22
|
+
]
|
|
@@ -0,0 +1,285 @@
|
|
|
1
|
+
"""Community-detection helpers and wrappers."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import itertools
|
|
6
|
+
|
|
7
|
+
import networkx as nx
|
|
8
|
+
import numpy as np
|
|
9
|
+
from scipy import sparse
|
|
10
|
+
|
|
11
|
+
from asunder.algorithms.signed_louvain import community_detection as cd
|
|
12
|
+
from asunder.algorithms.signed_louvain import util as slouvain_util
|
|
13
|
+
from asunder.utils.graph import (
|
|
14
|
+
group_nodes_by_community,
|
|
15
|
+
partition_matrix_to_vector,
|
|
16
|
+
partition_vector_to_2d_matrix,
|
|
17
|
+
)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def _import_sknetwork():
|
|
21
|
+
from sknetwork.clustering import Leiden, Louvain, PropagationClustering, get_modularity
|
|
22
|
+
from sknetwork.linalg import normalize
|
|
23
|
+
from sknetwork.utils import get_membership
|
|
24
|
+
|
|
25
|
+
return Louvain, Leiden, PropagationClustering, get_modularity, normalize, get_membership
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def _import_igraph():
|
|
29
|
+
import igraph as ig
|
|
30
|
+
|
|
31
|
+
return ig
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def _import_leidenalg():
|
|
35
|
+
import leidenalg as la
|
|
36
|
+
|
|
37
|
+
return la
|
|
38
|
+
|
|
39
|
+
def labels_to_probabilities(A, labels, p=1):
|
|
40
|
+
"""Convert hard labels into row-normalized membership probabilities."""
|
|
41
|
+
_, _, _, _, normalize, get_membership = _import_sknetwork()
|
|
42
|
+
if not sparse.isspmatrix_csr(A):
|
|
43
|
+
A = sparse.csr_matrix(np.asarray(A), dtype=float)
|
|
44
|
+
M = get_membership(labels)
|
|
45
|
+
return normalize(A @ M, p=p)
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def probability_to_integer_labels(probabilities, method="threshold", threshold=0.8, verbose=False):
|
|
49
|
+
"""Map soft memberships to integer labels using a configurable rule."""
|
|
50
|
+
from sklearn.cluster import DBSCAN
|
|
51
|
+
from sklearn.mixture import GaussianMixture
|
|
52
|
+
|
|
53
|
+
assert method in ["threshold", "gaussian_mixture", "DBSCAN"]
|
|
54
|
+
|
|
55
|
+
if probabilities.ndim == 2:
|
|
56
|
+
p = np.max(probabilities, axis=1).reshape((-1, 1))
|
|
57
|
+
else:
|
|
58
|
+
p = probabilities.reshape((-1, 1))
|
|
59
|
+
scaled_probabilities = (probabilities - p.min()) / (p.max() - p.min() + 1e-12)
|
|
60
|
+
scaled_probabilities[scaled_probabilities < 0] = 0
|
|
61
|
+
|
|
62
|
+
if verbose:
|
|
63
|
+
print("Probability values are:\n", p)
|
|
64
|
+
partition = np.zeros(shape=(probabilities.shape[0],))
|
|
65
|
+
|
|
66
|
+
if method == "threshold":
|
|
67
|
+
for i in range(probabilities.shape[0]):
|
|
68
|
+
if np.max(probabilities[i]) < threshold:
|
|
69
|
+
partition[i] = -1
|
|
70
|
+
else:
|
|
71
|
+
partition[i] = np.argmax(probabilities[i])
|
|
72
|
+
elif method == "gaussian_mixture":
|
|
73
|
+
gmm = GaussianMixture(n_components=2, random_state=42)
|
|
74
|
+
labels_gmm = gmm.fit_predict(p)
|
|
75
|
+
core_cluster = np.argmin(gmm.means_)
|
|
76
|
+
if verbose:
|
|
77
|
+
print("Labels from GMM are:\n", labels_gmm)
|
|
78
|
+
for i in range(probabilities.shape[0]):
|
|
79
|
+
if labels_gmm[i] == core_cluster:
|
|
80
|
+
partition[i] = -1
|
|
81
|
+
else:
|
|
82
|
+
partition[i] = np.argmax(probabilities[i])
|
|
83
|
+
else:
|
|
84
|
+
labels_dbscan = DBSCAN().fit_predict(
|
|
85
|
+
scaled_probabilities if (np.std(np.unique(p)) < 0.25) else probabilities
|
|
86
|
+
)
|
|
87
|
+
v, c = np.unique(labels_dbscan, return_counts=True)
|
|
88
|
+
lv = v[np.argmin(c)]
|
|
89
|
+
for i in range(probabilities.shape[0]):
|
|
90
|
+
if labels_dbscan[i] == lv:
|
|
91
|
+
partition[i] = -1
|
|
92
|
+
else:
|
|
93
|
+
partition[i] = np.argmax(probabilities[i])
|
|
94
|
+
return partition
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
def refine_partition_linear_group(
|
|
98
|
+
A, partition, *, p=1, prob_method="threshold", threshold=0.8, verbose=False
|
|
99
|
+
):
|
|
100
|
+
"""Refine a partition by reassigning low-confidence nodes."""
|
|
101
|
+
labels = partition_matrix_to_vector(partition) if partition.ndim == 2 else partition.copy()
|
|
102
|
+
probs = labels_to_probabilities(A, labels, p=p).toarray()
|
|
103
|
+
refined_labels = probability_to_integer_labels(
|
|
104
|
+
probs, method=prob_method, threshold=threshold, verbose=verbose
|
|
105
|
+
)
|
|
106
|
+
return partition_vector_to_2d_matrix(refined_labels)
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
def best_girvan_newman_partition(G, max_levels=10):
|
|
110
|
+
"""Search Girvan-Newman levels and return the best modularity partition."""
|
|
111
|
+
comp = nx.community.girvan_newman(G)
|
|
112
|
+
best_mod = -1.0
|
|
113
|
+
best_communities = None
|
|
114
|
+
for communities in itertools.islice(comp, max_levels):
|
|
115
|
+
communities_list = tuple(sorted(c) for c in communities)
|
|
116
|
+
mod = nx.community.modularity(G, communities_list)
|
|
117
|
+
if mod > best_mod:
|
|
118
|
+
best_mod = mod
|
|
119
|
+
best_communities = communities_list
|
|
120
|
+
return best_communities, best_mod
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
def run_modularity(modified_A, algo="louvain", package="networkx", resolution=1, verbose=False, refine=True, refine_params=None):
|
|
124
|
+
"""Run modularity-style community detection and return ``(partition, score)``."""
|
|
125
|
+
assert algo in ["louvain", "leiden", "greedy", "girvan_newman"]
|
|
126
|
+
modG = nx.from_numpy_array(modified_A.astype([("weight", "float")]))
|
|
127
|
+
metric = None
|
|
128
|
+
|
|
129
|
+
if package in {"igraph", "leidenalg"}:
|
|
130
|
+
ig = _import_igraph()
|
|
131
|
+
ig_graph = ig.Graph.Weighted_Adjacency(modified_A.tolist(), mode="UNDIRECTED", attr="weight")
|
|
132
|
+
else:
|
|
133
|
+
ig_graph = None
|
|
134
|
+
|
|
135
|
+
if algo == "louvain":
|
|
136
|
+
if package == "networkx":
|
|
137
|
+
communities = nx.community.louvain_communities(modG, weight="weight", resolution=resolution)
|
|
138
|
+
elif package == "sknetwork":
|
|
139
|
+
Louvain, _, _, _, _, _ = _import_sknetwork()
|
|
140
|
+
partition = Louvain().fit_predict(modified_A)
|
|
141
|
+
communities = {}
|
|
142
|
+
for i, val in enumerate(partition):
|
|
143
|
+
communities.setdefault(val, set()).add(int(i))
|
|
144
|
+
communities = communities.values()
|
|
145
|
+
else:
|
|
146
|
+
raise NotImplementedError(f"Invalid package: {package}")
|
|
147
|
+
elif algo == "leiden":
|
|
148
|
+
if package == "leidenalg":
|
|
149
|
+
la = _import_leidenalg()
|
|
150
|
+
partition = la.find_partition(
|
|
151
|
+
ig_graph, la.ModularityVertexPartition, seed=42, weights="weight"
|
|
152
|
+
)
|
|
153
|
+
membership = {}
|
|
154
|
+
for node, comm in zip(ig_graph.vs.indices, partition.membership):
|
|
155
|
+
membership.setdefault(comm, []).append(node)
|
|
156
|
+
communities = list(membership.values())
|
|
157
|
+
elif package == "sknetwork":
|
|
158
|
+
_, Leiden, _, _, _, _ = _import_sknetwork()
|
|
159
|
+
partition = Leiden().fit_predict(modified_A)
|
|
160
|
+
communities = {}
|
|
161
|
+
for i, val in enumerate(partition):
|
|
162
|
+
communities.setdefault(val, set()).add(int(i))
|
|
163
|
+
communities = communities.values()
|
|
164
|
+
elif package == "igraph":
|
|
165
|
+
clustering = ig_graph.community_leiden(
|
|
166
|
+
objective_function="modularity", weights="weight", resolution=resolution
|
|
167
|
+
)
|
|
168
|
+
communities = {}
|
|
169
|
+
for i, val in enumerate(clustering.membership):
|
|
170
|
+
communities.setdefault(val, set()).add(int(i))
|
|
171
|
+
communities = communities.values()
|
|
172
|
+
else:
|
|
173
|
+
raise NotImplementedError(f"Invalid package: {package}")
|
|
174
|
+
elif algo == "greedy":
|
|
175
|
+
if package == "networkx":
|
|
176
|
+
communities = nx.community.greedy_modularity_communities(
|
|
177
|
+
modG, weight="weight", resolution=resolution
|
|
178
|
+
)
|
|
179
|
+
elif package == "igraph":
|
|
180
|
+
clustering = ig_graph.community_fastgreedy(weights="weight").as_clustering()
|
|
181
|
+
communities = {}
|
|
182
|
+
for i, val in enumerate(clustering.membership):
|
|
183
|
+
communities.setdefault(val, set()).add(int(i))
|
|
184
|
+
communities = communities.values()
|
|
185
|
+
else:
|
|
186
|
+
raise NotImplementedError(f"Invalid package: {package}")
|
|
187
|
+
else:
|
|
188
|
+
communities, _ = best_girvan_newman_partition(modG, max_levels=modified_A.shape[0])
|
|
189
|
+
|
|
190
|
+
oneD_z = np.zeros(shape=(modified_A.shape[0]), dtype=np.int64)
|
|
191
|
+
for i, community in enumerate(communities):
|
|
192
|
+
for node in community:
|
|
193
|
+
oneD_z[node] = i
|
|
194
|
+
|
|
195
|
+
if refine:
|
|
196
|
+
if refine_params is None:
|
|
197
|
+
raise ValueError("refine_params is required when refine=True")
|
|
198
|
+
zii = refine_params["refine_func"](A=modified_A, partition=oneD_z, **refine_params["kwargs"])
|
|
199
|
+
_, communities = group_nodes_by_community(np.array(zii))
|
|
200
|
+
else:
|
|
201
|
+
zii = np.equal.outer(oneD_z, oneD_z).astype(int)
|
|
202
|
+
metric = nx.community.modularity(modG, communities)
|
|
203
|
+
return zii, metric
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
def run_lpa(modified_A, refine=True):
|
|
207
|
+
"""Run label propagation clustering and return ``(partition, modularity)``."""
|
|
208
|
+
_, _, PropagationClustering, get_modularity, _, _ = _import_sknetwork()
|
|
209
|
+
algorithm = PropagationClustering()
|
|
210
|
+
if refine:
|
|
211
|
+
result = algorithm.fit_predict_proba(modified_A)
|
|
212
|
+
partition = np.zeros(shape=(result.shape[0],))
|
|
213
|
+
for i in range(result.shape[0]):
|
|
214
|
+
partition[i] = -1 if np.max(result[i]) < 0.8 else np.argmax(result[i])
|
|
215
|
+
else:
|
|
216
|
+
partition = algorithm.fit_predict(modified_A)
|
|
217
|
+
|
|
218
|
+
communities = {}
|
|
219
|
+
for i, val in enumerate(partition):
|
|
220
|
+
communities.setdefault(val, set()).add(int(i))
|
|
221
|
+
oneD_z = np.zeros(shape=(modified_A.shape[0]), dtype=np.int64)
|
|
222
|
+
for i, community in enumerate(communities.values()):
|
|
223
|
+
for node in community:
|
|
224
|
+
oneD_z[node] = i
|
|
225
|
+
zii = np.equal.outer(oneD_z, oneD_z).astype(int)
|
|
226
|
+
return zii, get_modularity(modified_A, partition.astype(int))
|
|
227
|
+
|
|
228
|
+
|
|
229
|
+
def run_igraph_spinglass(modified_A):
|
|
230
|
+
"""Run igraph spinglass community detection and return a partition matrix."""
|
|
231
|
+
ig = _import_igraph()
|
|
232
|
+
ig_graph = ig.Graph.Weighted_Adjacency(modified_A.tolist(), mode="UNDIRECTED", attr="weight")
|
|
233
|
+
clustering = ig_graph.community_spinglass(
|
|
234
|
+
weights="weight", implementation="neg", lambda_=0.0, spins=500, start_temp=1.0, stop_temp=0.01, cool_fact=0.99
|
|
235
|
+
)
|
|
236
|
+
oneD_z = clustering.membership
|
|
237
|
+
return np.equal.outer(oneD_z, oneD_z).astype(int)
|
|
238
|
+
|
|
239
|
+
|
|
240
|
+
def run_igraph(modified_A, algo="infomap", resolution=1):
|
|
241
|
+
"""Run selected igraph community algorithm and return ``(partition, score)``."""
|
|
242
|
+
ig = _import_igraph()
|
|
243
|
+
ig_graph = ig.Graph.Weighted_Adjacency(modified_A.tolist(), mode="UNDIRECTED", attr="weight")
|
|
244
|
+
if algo == "infomap":
|
|
245
|
+
clustering = ig_graph.community_infomap(edge_weights="weight")
|
|
246
|
+
elif algo == "lpa":
|
|
247
|
+
clustering = ig_graph.community_label_propagation(weights="weight")
|
|
248
|
+
elif algo == "multilevel":
|
|
249
|
+
clustering = ig_graph.community_multilevel(weights="weight", resolution=resolution)
|
|
250
|
+
elif algo == "voronoi":
|
|
251
|
+
clustering = ig_graph.community_voronoi(weights="weight", radius=resolution)
|
|
252
|
+
elif algo == "walktrap":
|
|
253
|
+
clustering = ig_graph.community_walktrap(weights="weight").as_clustering()
|
|
254
|
+
else:
|
|
255
|
+
raise NotImplementedError("Invalid Igraph Algorithm")
|
|
256
|
+
oneD_z = clustering.membership
|
|
257
|
+
zii = np.equal.outer(oneD_z, oneD_z).astype(int)
|
|
258
|
+
metric = ig_graph.modularity(clustering, weights="weight", resolution=resolution)
|
|
259
|
+
return zii, metric
|
|
260
|
+
|
|
261
|
+
|
|
262
|
+
def run_signed_louvain(modified_A):
|
|
263
|
+
"""Run signed Louvain on positive/negative layers and return ``(partition, score)``."""
|
|
264
|
+
n_nodes = modified_A.shape[0]
|
|
265
|
+
edges = [(i, j, modified_A[i, j]) for i, j in zip(*np.where(np.triu(modified_A) != 0))]
|
|
266
|
+
graph = slouvain_util.build_nx_graph(n_nodes, edges)
|
|
267
|
+
posgraph, neggraph = slouvain_util.build_subgraphs(graph, weight="weight")
|
|
268
|
+
communities, status = cd.best_partition(
|
|
269
|
+
layers=[posgraph, neggraph],
|
|
270
|
+
layer_weights=[1.0, -1.0],
|
|
271
|
+
resolutions=[1.0, 1.0],
|
|
272
|
+
masks=[False, True],
|
|
273
|
+
k=2,
|
|
274
|
+
initial_membership=None,
|
|
275
|
+
weight="weight",
|
|
276
|
+
random_state=None,
|
|
277
|
+
pass_max=40,
|
|
278
|
+
return_dendogram=False,
|
|
279
|
+
silent=True,
|
|
280
|
+
)
|
|
281
|
+
oneD_z = np.zeros(shape=(modified_A.shape[0]), dtype=np.int64)
|
|
282
|
+
for node, community in communities.items():
|
|
283
|
+
oneD_z[node] = community
|
|
284
|
+
zii = np.equal.outer(oneD_z, oneD_z).astype(int)
|
|
285
|
+
return zii, status.modularity()
|