spization 1.0.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. spization-1.0.0/LICENSE +0 -0
  2. spization-1.0.0/PKG-INFO +66 -0
  3. spization-1.0.0/README.md +35 -0
  4. spization-1.0.0/pyproject.toml +70 -0
  5. spization-1.0.0/setup.cfg +4 -0
  6. spization-1.0.0/spization/__init__.py +3 -0
  7. spization-1.0.0/spization/__internals/__init__.py +0 -0
  8. spization-1.0.0/spization/__internals/general/__init__.py +53 -0
  9. spization-1.0.0/spization/__internals/graph/__init__.py +27 -0
  10. spization-1.0.0/spization/__internals/graph/add_nodes_and_edges.py +20 -0
  11. spization-1.0.0/spization/__internals/graph/longest_path_lengths_from_source.py +39 -0
  12. spization-1.0.0/spization/__internals/graph/lowest_common_ancestor.py +17 -0
  13. spization-1.0.0/spization/__internals/graph/properties.py +34 -0
  14. spization-1.0.0/spization/__internals/graph/sinks.py +7 -0
  15. spization-1.0.0/spization/__internals/graph/sources.py +7 -0
  16. spization-1.0.0/spization/__internals/graph/strata_sort.py +10 -0
  17. spization-1.0.0/spization/__internals/sp/__init__.py +14 -0
  18. spization-1.0.0/spization/__internals/sp/cbc_decomposition.py +93 -0
  19. spization-1.0.0/spization/__internals/sp/inverse_line_graph.py +63 -0
  20. spization-1.0.0/spization/algorithms/__init__.py +12 -0
  21. spization-1.0.0/spization/algorithms/flexible_sync.py +208 -0
  22. spization-1.0.0/spization/algorithms/naive_strata_sync.py +37 -0
  23. spization-1.0.0/spization/algorithms/pure_node_dup.py +96 -0
  24. spization-1.0.0/spization/algorithms/spanish_strata_sync.py +155 -0
  25. spization-1.0.0/spization/benchmarking/benchmarking.py +252 -0
  26. spization-1.0.0/spization/benchmarking/cost_modelling.py +72 -0
  27. spization-1.0.0/spization/benchmarking/graphs.py +259 -0
  28. spization-1.0.0/spization/modular_decomposition/__init__.py +24 -0
  29. spization-1.0.0/spization/modular_decomposition/directed/directed_md.py +104 -0
  30. spization-1.0.0/spization/modular_decomposition/directed/directed_quotient_graph.py +113 -0
  31. spization-1.0.0/spization/modular_decomposition/directed/objects.py +101 -0
  32. spization-1.0.0/spization/modular_decomposition/through_modular_decomposition.py +53 -0
  33. spization-1.0.0/spization/modular_decomposition/undirected/objects.py +105 -0
  34. spization-1.0.0/spization/modular_decomposition/undirected/undirected_md.py +53 -0
  35. spization-1.0.0/spization/modular_decomposition/undirected/undirected_md_naive.py +55 -0
  36. spization-1.0.0/spization/modular_decomposition/undirected/undirected_quotient_graph.py +112 -0
  37. spization-1.0.0/spization/modular_decomposition/utils.py +74 -0
  38. spization-1.0.0/spization/objects/__init__.py +32 -0
  39. spization-1.0.0/spization/objects/edges.py +4 -0
  40. spization-1.0.0/spization/objects/nodes.py +13 -0
  41. spization-1.0.0/spization/objects/splits.py +140 -0
  42. spization-1.0.0/spization/utils/__init__.py +55 -0
  43. spization-1.0.0/spization/utils/bsp_to_sp.py +25 -0
  44. spization-1.0.0/spization/utils/compositions.py +62 -0
  45. spization-1.0.0/spization/utils/critical_path_cost.py +86 -0
  46. spization-1.0.0/spization/utils/dependencies_are_maintained.py +29 -0
  47. spization-1.0.0/spization/utils/get_ancestors.py +54 -0
  48. spization-1.0.0/spization/utils/get_node_counter.py +20 -0
  49. spization-1.0.0/spization/utils/get_nodes.py +19 -0
  50. spization-1.0.0/spization/utils/has_no_duplicate_nodes.py +6 -0
  51. spization-1.0.0/spization/utils/is_empty.py +7 -0
  52. spization-1.0.0/spization/utils/normalize.py +41 -0
  53. spization-1.0.0/spization/utils/random_sp.py +27 -0
  54. spization-1.0.0/spization/utils/replace_node.py +26 -0
  55. spization-1.0.0/spization/utils/sp_to_bsp.py +43 -0
  56. spization-1.0.0/spization/utils/sp_to_spg.py +26 -0
  57. spization-1.0.0/spization/utils/spg_to_sp.py +116 -0
  58. spization-1.0.0/spization/utils/ttspg_to_spg.py +25 -0
  59. spization-1.0.0/spization/utils/work_cost.py +51 -0
  60. spization-1.0.0/spization.egg-info/PKG-INFO +66 -0
  61. spization-1.0.0/spization.egg-info/SOURCES.txt +63 -0
  62. spization-1.0.0/spization.egg-info/dependency_links.txt +1 -0
  63. spization-1.0.0/spization.egg-info/entry_points.txt +2 -0
  64. spization-1.0.0/spization.egg-info/requires.txt +9 -0
  65. spization-1.0.0/spization.egg-info/top_level.txt +1 -0
File without changes
@@ -0,0 +1,66 @@
1
+ Metadata-Version: 2.4
2
+ Name: spization
3
+ Version: 1.0.0
4
+ Summary: Package for Graph Series-Parallel-ization
5
+ Author-email: Marsella8 <pietromax.marsella@gmail.com>
6
+ License: GPL-3.0-or-later
7
+ Project-URL: Homepage, https://github.com/Marsella8/spization
8
+ Project-URL: Repository, https://github.com/Marsella8/spization
9
+ Project-URL: Issues, https://github.com/Marsella8/spization/issues
10
+ Keywords: graph,series-parallel,dag,modular-decomposition,algorithms
11
+ Classifier: Development Status :: 4 - Beta
12
+ Classifier: Intended Audience :: Science/Research
13
+ Classifier: License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)
14
+ Classifier: Programming Language :: Python :: 3
15
+ Classifier: Programming Language :: Python :: 3.12
16
+ Classifier: Programming Language :: Python :: 3.13
17
+ Classifier: Topic :: Scientific/Engineering :: Mathematics
18
+ Requires-Python: <3.14,>=3.12
19
+ Description-Content-Type: text/markdown
20
+ License-File: LICENSE
21
+ Requires-Dist: networkx<4,>=3.3
22
+ Requires-Dist: multimethod<2,>=1.12
23
+ Requires-Dist: loguru<1,>=0.7.2
24
+ Requires-Dist: icecream<3,>=2.1.3
25
+ Requires-Dist: numpy<3,>=2.1.0
26
+ Requires-Dist: rich<14,>=13.8.0
27
+ Requires-Dist: multiset<4,>=3.2.0
28
+ Requires-Dist: bidict<1,>=0.23.1
29
+ Requires-Dist: passagemath-graphs<11,>=10.5.43
30
+ Dynamic: license-file
31
+
32
+ ## SP-ization
33
+
34
+ A simple Python package for Graph Series-Parallel-ization.
35
+
36
+
37
+ <!-- ## Notes:
38
+
39
+ Your SP: no sync nodes, can be dups
40
+ - Does not have sync nodes, can only have nodes as int. If coming from a graph with dupes, the nodes are simply deduplicated (so from NodeDup to int).
41
+ Technically, SP can only hold NODEs, but for simplicity we'll have Node.
42
+
43
+ Your graph: sync nodes, cannot be dups
44
+ - Can have sync nodes or not (and can freely switch between one another)
45
+ - Can either have no duplicates (so all the nodes are simply int) or have duplicates (in which case they are NodeDups). You can frely switch between graph and SP.
46
+
47
+ - Algorithms must take in a DAG that is made up of ints and that is TTSP, and return an SPD of only ints.
48
+ - In general: all graph utilities should apply to SPG, all SP utilities should apply to SPD. -->
49
+
50
+ ## Instructions
51
+
52
+ Install dependencies: `uv sync --group dev`
53
+
54
+ To enter the venv: `source .venv/bin/activate`
55
+
56
+ For testing: `uv run pytest`
57
+
58
+ For Nix build verification: `nix build .#spization --no-link`
59
+
60
+ For codecov: `uv run pytest --cov=spization --cov-report=term-missing:skip-covered`
61
+
62
+ For linting: `uv run ruff check --fix` / `uv run ruff format`
63
+
64
+ For type checking: `uv run ty check`
65
+
66
+ To run benchmarking: `uv run benchmark`
@@ -0,0 +1,35 @@
1
+ ## SP-ization
2
+
3
+ A simple Python package for Graph Series-Parallel-ization.
4
+
5
+
6
+ <!-- ## Notes:
7
+
8
+ Your SP: no sync nodes, can be dups
9
+ - Does not have sync nodes, can only have nodes as int. If coming from a graph with dupes, the nodes are simply deduplicated (so from NodeDup to int).
10
+ Technically, SP can only hold NODEs, but for simplicity we'll have Node.
11
+
12
+ Your graph: sync nodes, cannot be dups
13
+ - Can have sync nodes or not (and can freely switch between one another)
14
+ - Can either have no duplicates (so all the nodes are simply int) or have duplicates (in which case they are NodeDups). You can frely switch between graph and SP.
15
+
16
+ - Algorithms must take in a DAG that is made up of ints and that is TTSP, and return an SPD of only ints.
17
+ - In general: all graph utilities should apply to SPG, all SP utilities should apply to SPD. -->
18
+
19
+ ## Instructions
20
+
21
+ Install dependencies: `uv sync --group dev`
22
+
23
+ To enter the venv: `source .venv/bin/activate`
24
+
25
+ For testing: `uv run pytest`
26
+
27
+ For Nix build verification: `nix build .#spization --no-link`
28
+
29
+ For codecov: `uv run pytest --cov=spization --cov-report=term-missing:skip-covered`
30
+
31
+ For linting: `uv run ruff check --fix` / `uv run ruff format`
32
+
33
+ For type checking: `uv run ty check`
34
+
35
+ To run benchmarking: `uv run benchmark`
@@ -0,0 +1,70 @@
1
+ [project]
2
+ name = "spization"
3
+ version = "1.0.0"
4
+ description = "Package for Graph Series-Parallel-ization"
5
+ readme = "README.md"
6
+ requires-python = ">=3.12,<3.14"
7
+ license = { text = "GPL-3.0-or-later" }
8
+ authors = [{ name = "Marsella8", email = "pietromax.marsella@gmail.com" }]
9
+ keywords = ["graph", "series-parallel", "dag", "modular-decomposition", "algorithms"]
10
+ classifiers = [
11
+ "Development Status :: 4 - Beta",
12
+ "Intended Audience :: Science/Research",
13
+ "License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)",
14
+ "Programming Language :: Python :: 3",
15
+ "Programming Language :: Python :: 3.12",
16
+ "Programming Language :: Python :: 3.13",
17
+ "Topic :: Scientific/Engineering :: Mathematics",
18
+ ]
19
+ dependencies = [
20
+ "networkx>=3.3,<4",
21
+ "multimethod>=1.12,<2",
22
+ "loguru>=0.7.2,<1",
23
+ "icecream>=2.1.3,<3",
24
+ "numpy>=2.1.0,<3",
25
+ "rich>=13.8.0,<14",
26
+ "multiset>=3.2.0,<4",
27
+ "bidict>=0.23.1,<1",
28
+ "passagemath-graphs>=10.5.43,<11",
29
+ ]
30
+
31
+ [project.urls]
32
+ Homepage = "https://github.com/Marsella8/spization"
33
+ Repository = "https://github.com/Marsella8/spization"
34
+ Issues = "https://github.com/Marsella8/spization/issues"
35
+
36
+ [dependency-groups]
37
+ dev = [
38
+ "build>=1.2.2.post1,<2",
39
+ "pytest>=8.3.2,<9",
40
+ "pytest-xdist>=3.8.0,<4",
41
+ "pytest-cov>=5.0.0,<6",
42
+ "ruff>=0.6.3,<1",
43
+ "twine>=6.1.0,<7",
44
+ "ty>=0.0.1",
45
+ ]
46
+
47
+ [project.scripts]
48
+ benchmark = "spization.benchmarking.benchmarking:main"
49
+
50
+ [build-system]
51
+ requires = ["setuptools>=68", "wheel"]
52
+ build-backend = "setuptools.build_meta"
53
+
54
+ [tool.setuptools]
55
+ license-files = ["LICENSE"]
56
+
57
+ [tool.setuptools.packages.find]
58
+ include = ["spization*"]
59
+ namespaces = false
60
+
61
+ [tool.ty.src]
62
+ include = ["spization"]
63
+
64
+ [tool.ruff]
65
+ lint.ignore = ["F811", "F821"]
66
+ lint.extend-select = ["I"]
67
+
68
+ [tool.pytest.ini_options]
69
+ pythonpath = ["."]
70
+ addopts = "-n auto"
@@ -0,0 +1,4 @@
1
+ [egg_info]
2
+ tag_build =
3
+ tag_date = 0
4
+
@@ -0,0 +1,3 @@
1
+ from spization import algorithms, modular_decomposition, objects, utils
2
+
3
+ __all__ = ["algorithms", "objects", "utils", "modular_decomposition"]
File without changes
@@ -0,0 +1,53 @@
1
+ from functools import reduce
2
+ from itertools import chain
3
+ from typing import Callable, Iterable
4
+
5
+
6
+ def get_only[T](container: Iterable[T]) -> T:
7
+ c = list(container)
8
+ if len(c) != 1:
9
+ raise ValueError(f"Container must only have 1 item, has {len(c)}")
10
+ return c[0]
11
+
12
+
13
+ def get_any[T](container: Iterable[T]) -> T:
14
+ return next(iter(container))
15
+
16
+
17
+ def must[T](value: T | None) -> T:
18
+ if value is None:
19
+ raise ValueError("Used must() on a None value.")
20
+ return value
21
+
22
+
23
+ def flatmap[T, U](
24
+ func: Callable[[T], Iterable[U]], iterable: Iterable[T]
25
+ ) -> Iterable[U]:
26
+ return chain.from_iterable(map(func, iterable))
27
+
28
+
29
+ def are_all_equal[T](iterable: Iterable[T]) -> bool:
30
+ iterator = iter(iterable)
31
+ try:
32
+ first = next(iterator)
33
+ except StopIteration:
34
+ return True
35
+ return all(first == x for x in iterator)
36
+
37
+
38
+ def are_all_disjoint[T](iterable: Iterable[set[T] | frozenset[T]]) -> bool:
39
+ sets = list(iterable)
40
+ if not sets:
41
+ return True
42
+ union = reduce(lambda x, y: x.union(y), sets)
43
+ return len(union) == sum(len(s) for s in sets)
44
+
45
+
46
+ __all__ = [
47
+ "get_any",
48
+ "get_only",
49
+ "must",
50
+ "flatmap",
51
+ "are_all_equal",
52
+ "are_all_disjoint",
53
+ ]
@@ -0,0 +1,27 @@
1
+ from .add_nodes_and_edges import add_edges, add_node, add_nodes
2
+ from .longest_path_lengths_from_source import longest_path_lengths_from_source
3
+ from .lowest_common_ancestor import lowest_common_ancestor
4
+ from .properties import (
5
+ is_2_terminal_dag,
6
+ is_compatible_graph,
7
+ is_single_sourced,
8
+ is_transitively_closed_dag,
9
+ )
10
+ from .sinks import sinks
11
+ from .sources import sources
12
+ from .strata_sort import strata_sort
13
+
14
+ __all__ = [
15
+ "longest_path_lengths_from_source",
16
+ "lowest_common_ancestor",
17
+ "is_2_terminal_dag",
18
+ "is_compatible_graph",
19
+ "is_single_sourced",
20
+ "is_transitively_closed_dag",
21
+ "sinks",
22
+ "sources",
23
+ "strata_sort",
24
+ "add_edges",
25
+ "add_node",
26
+ "add_nodes",
27
+ ]
@@ -0,0 +1,20 @@
1
+ from typing import Iterable
2
+
3
+ from networkx import DiGraph
4
+
5
+ from spization.objects import DiEdge, Node
6
+
7
+
8
+ def add_node(g: DiGraph) -> Node:
9
+ n = max(g.nodes(), default=-1) + 1
10
+ g.add_node(n)
11
+ return n
12
+
13
+
14
+ def add_nodes(g: DiGraph, n: int) -> list[Node]:
15
+ return [add_node(g) for _ in range(n)]
16
+
17
+
18
+ def add_edges(g: DiGraph, edges: Iterable[DiEdge]) -> None:
19
+ for edge in edges:
20
+ g.add_edge(edge[0], edge[1])
@@ -0,0 +1,39 @@
1
+ import networkx as nx
2
+ from multimethod import multimethod
3
+ from networkx import DiGraph
4
+
5
+ from spization.__internals.general import get_only
6
+ from spization.objects import Node
7
+
8
+ from .properties import is_single_sourced
9
+ from .sources import sources
10
+
11
+
12
+ @multimethod
13
+ def longest_path_lengths_from_source(g: DiGraph) -> dict[Node, int]:
14
+ assert is_single_sourced(g)
15
+ dist: dict[Node, int] = dict.fromkeys(g.nodes, -1)
16
+ root: Node = get_only(sources(g))
17
+ dist[root] = 0
18
+ topo_order = nx.topological_sort(g)
19
+ for n in topo_order:
20
+ if n == root:
21
+ continue
22
+ dist[n] = 1 + max(dist[p] for p in g.predecessors(n))
23
+ return dist
24
+
25
+
26
+ @multimethod
27
+ def longest_path_lengths_from_source(
28
+ g: DiGraph, cost_map: dict[Node, int | float]
29
+ ) -> dict[Node, int | float]:
30
+ assert is_single_sourced(g)
31
+ dist: dict[Node, int | float] = dict.fromkeys(g.nodes, -1)
32
+ root: Node = get_only(sources(g))
33
+ dist[root] = cost_map[root]
34
+ topo_order = nx.topological_sort(g)
35
+ for n in topo_order:
36
+ if n == root:
37
+ continue
38
+ dist[n] = cost_map[n] + max(dist[p] for p in g.predecessors(n))
39
+ return dist
@@ -0,0 +1,17 @@
1
+ from typing import Optional
2
+
3
+ import networkx as nx
4
+ from networkx import DiGraph
5
+
6
+ from spization.__internals.general import get_any
7
+ from spization.objects import Node
8
+
9
+
10
+ def lowest_common_ancestor(g: DiGraph, nodes: set[Node]) -> Optional[Node]:
11
+ assert all(n in g.nodes() for n in nodes)
12
+ lca: Optional[Node] = get_any(nodes)
13
+ for n in nodes:
14
+ lca = nx.lowest_common_ancestor(g, lca, n)
15
+ if lca is None:
16
+ return lca
17
+ return lca
@@ -0,0 +1,34 @@
1
+ import networkx as nx
2
+ from networkx import DiGraph
3
+
4
+ from spization.objects import Node
5
+
6
+ from .sinks import sinks
7
+ from .sources import sources
8
+
9
+
10
+ def is_2_terminal_dag(g: DiGraph) -> bool:
11
+ if not nx.is_directed_acyclic_graph(g):
12
+ return False
13
+
14
+ return len(sources(g)) == 1 and len(sinks(g)) == 1
15
+
16
+
17
+ def is_compatible_graph(g: DiGraph) -> bool:
18
+ return all(isinstance(node, Node) for node in g.nodes())
19
+
20
+
21
+ def is_single_sourced(g: DiGraph) -> bool:
22
+ return len(sources(g)) == 1
23
+
24
+
25
+ def is_transitively_closed(g: DiGraph) -> bool:
26
+ for node in g.nodes():
27
+ for descendant in nx.descendants(g, node):
28
+ if not g.has_edge(node, descendant):
29
+ return False
30
+ return True
31
+
32
+
33
+ def is_transitively_closed_dag(g: DiGraph) -> bool:
34
+ return nx.is_directed_acyclic_graph(g) and is_transitively_closed(g)
@@ -0,0 +1,7 @@
1
+ from networkx import DiGraph
2
+
3
+ from spization.objects import Node
4
+
5
+
6
+ def sinks(g: DiGraph) -> set[Node]:
7
+ return {node for node, out_degree in g.out_degree() if out_degree == 0}
@@ -0,0 +1,7 @@
1
+ from networkx import DiGraph
2
+
3
+ from spization.objects import Node
4
+
5
+
6
+ def sources(g: DiGraph) -> set[Node]:
7
+ return {node for node, in_degree in g.in_degree() if in_degree == 0}
@@ -0,0 +1,10 @@
1
+ from networkx import DiGraph
2
+
3
+ from spization.objects import Node
4
+
5
+ from .longest_path_lengths_from_source import longest_path_lengths_from_source
6
+
7
+
8
+ def strata_sort(g: DiGraph) -> list[Node]:
9
+ depth_map: dict[Node, int] = longest_path_lengths_from_source(g)
10
+ return sorted(depth_map.keys(), key=lambda node: depth_map[node])
@@ -0,0 +1,14 @@
1
+ from .cbc_decomposition import (
2
+ BipartiteComponent,
3
+ CompleteBipartiteCompositeDecomposition,
4
+ cbc_decomposition,
5
+ )
6
+ from .inverse_line_graph import InverseLineGraphResult, inverse_line_graph
7
+
8
+ __all__ = [
9
+ "inverse_line_graph",
10
+ "InverseLineGraphResult",
11
+ "cbc_decomposition",
12
+ "CompleteBipartiteCompositeDecomposition",
13
+ "BipartiteComponent",
14
+ ]
@@ -0,0 +1,93 @@
1
+ from collections import deque
2
+ from dataclasses import dataclass
3
+ from typing import Optional
4
+
5
+ from networkx import DiGraph
6
+
7
+ from spization.__internals.general import get_only
8
+ from spization.__internals.graph import sinks, sources
9
+ from spization.objects import DiEdge, Node
10
+
11
+
12
+ @dataclass(frozen=True)
13
+ class BipartiteComponent:
14
+ head_nodes: frozenset[Node]
15
+ tail_nodes: frozenset[Node]
16
+
17
+
18
+ CompleteBipartiteCompositeDecomposition = set[BipartiteComponent]
19
+
20
+
21
+ def is_complete_bipartite_digraph(g: DiGraph, head: frozenset[Node]) -> bool:
22
+ sinks = set(g.nodes) - head
23
+ for source in head:
24
+ for sink in sinks:
25
+ if not g.has_edge(source, sink):
26
+ return False
27
+ return True
28
+
29
+
30
+ def cbc_decomposition(g: DiGraph) -> Optional[CompleteBipartiteCompositeDecomposition]:
31
+ edges_to_process = deque(sorted(g.edges()))
32
+
33
+ already_in_a_head: set[Node] = set()
34
+ already_in_a_tail: set[Node] = set()
35
+ already_processed: set[DiEdge] = set()
36
+ result: CompleteBipartiteCompositeDecomposition = set()
37
+
38
+ while edges_to_process:
39
+ e = edges_to_process.pop()
40
+ if e in already_processed:
41
+ continue
42
+
43
+ head = frozenset(g.predecessors(e[1]))
44
+ tail = frozenset(g.successors(e[0]))
45
+
46
+ if head & tail:
47
+ return None
48
+
49
+ from_head_to_tail = {(u, v) for u in head for v in tail if g.has_edge(u, v)}
50
+
51
+ subgraph = g.subgraph(head | tail)
52
+
53
+ if not is_complete_bipartite_digraph(subgraph, head):
54
+ return None
55
+
56
+ for u, v in subgraph.edges():
57
+ if (u, v) not in from_head_to_tail:
58
+ return None
59
+
60
+ out_edges = {(u, v) for u in head for v in g.successors(u)}
61
+ if out_edges != from_head_to_tail:
62
+ return None
63
+
64
+ in_edges = {(u, v) for v in tail for u in g.predecessors(v)}
65
+ if in_edges != from_head_to_tail:
66
+ return None
67
+
68
+ result.add(BipartiteComponent(head, tail))
69
+
70
+ already_processed |= from_head_to_tail
71
+ already_in_a_head.update(head)
72
+ already_in_a_tail.update(tail)
73
+
74
+ assert already_in_a_head == set(g.nodes) - sinks(g)
75
+ assert already_in_a_tail == set(g.nodes) - sources(g)
76
+
77
+ return result
78
+
79
+
80
+ def get_component_containing_node_in_head(
81
+ cbc: CompleteBipartiteCompositeDecomposition, n: Node
82
+ ) -> Optional[BipartiteComponent]:
83
+ found: set[BipartiteComponent] = set(filter(lambda bc: n in bc.head_nodes, cbc))
84
+ assert len(found) <= 1
85
+ return get_only(found) if found else None
86
+
87
+
88
+ def get_component_containing_node_in_tail(
89
+ cbc: CompleteBipartiteCompositeDecomposition, n: Node
90
+ ) -> Optional[BipartiteComponent]:
91
+ found: set[BipartiteComponent] = set(filter(lambda bc: n in bc.tail_nodes, cbc))
92
+ assert len(found) <= 1
93
+ return get_only(found) if found else None
@@ -0,0 +1,63 @@
1
+ from dataclasses import dataclass
2
+ from typing import Optional
3
+
4
+ from bidict import bidict
5
+ from networkx import DiGraph, MultiDiGraph
6
+
7
+ from spization.__internals.graph import add_node, sinks, sources
8
+ from spization.__internals.sp.cbc_decomposition import (
9
+ BipartiteComponent,
10
+ cbc_decomposition,
11
+ get_component_containing_node_in_head,
12
+ get_component_containing_node_in_tail,
13
+ )
14
+ from spization.objects import MultiDiEdge, Node, SerialParallelDecomposition
15
+
16
+
17
+ @dataclass
18
+ class InverseLineGraphResult:
19
+ graph: MultiDiGraph
20
+ inverse_edge_to_line_node_map: bidict[MultiDiEdge, SerialParallelDecomposition]
21
+
22
+
23
+ def inverse_line_graph(g: DiGraph) -> Optional[InverseLineGraphResult]:
24
+ cbc_decomp = cbc_decomposition(g)
25
+ if cbc_decomp is None:
26
+ return None
27
+
28
+ result_graph = MultiDiGraph()
29
+ alpha: Node = add_node(result_graph)
30
+ omega: Node = add_node(result_graph)
31
+
32
+ component_nodes = bidict(
33
+ {bi_comp: add_node(result_graph) for bi_comp in cbc_decomp}
34
+ )
35
+
36
+ def h(n: Node) -> BipartiteComponent:
37
+ cmp = get_component_containing_node_in_head(cbc_decomp, n)
38
+ assert cmp is not None
39
+ return cmp
40
+
41
+ def t(n: Node) -> BipartiteComponent:
42
+ cmp = get_component_containing_node_in_tail(cbc_decomp, n)
43
+ assert cmp is not None
44
+ return cmp
45
+
46
+ srcs = sources(g)
47
+ snks = sinks(g)
48
+
49
+ def src_for_node(v: Node) -> Node:
50
+ return alpha if v in srcs else component_nodes[t(v)]
51
+
52
+ def dst_for_node(v: Node) -> Node:
53
+ return omega if v in snks else component_nodes[h(v)]
54
+
55
+ inverse_edge_to_line_node: bidict[MultiDiEdge, SerialParallelDecomposition] = bidict()
56
+
57
+ for v in g.nodes:
58
+ src, dst = src_for_node(v), dst_for_node(v)
59
+ idx = result_graph.add_edge(src, dst)
60
+ edge: MultiDiEdge = (src, dst, idx)
61
+ inverse_edge_to_line_node[edge] = v
62
+
63
+ return InverseLineGraphResult(result_graph, inverse_edge_to_line_node)
@@ -0,0 +1,12 @@
1
+ from .flexible_sync import flexible_sync
2
+ from .naive_strata_sync import naive_strata_sync
3
+ from .pure_node_dup import pure_node_dup, tree_pure_node_dup
4
+ from .spanish_strata_sync import spanish_strata_sync
5
+
6
+ __all__ = [
7
+ "naive_strata_sync",
8
+ "pure_node_dup",
9
+ "tree_pure_node_dup",
10
+ "spanish_strata_sync",
11
+ "flexible_sync",
12
+ ]