iplotx 0.1.0__py3-none-any.whl → 0.2.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
iplotx/edge/ports.py ADDED
@@ -0,0 +1,47 @@
1
+ """
2
+ Module for handling edge ports in iplotx.
3
+ """
4
+
5
+ import numpy as np
6
+
7
+ sq2 = np.sqrt(2) / 2
8
+
9
+ port_dict = {
10
+ "s": (0, -1),
11
+ "w": (-1, 0),
12
+ "n": (0, 1),
13
+ "e": (1, 0),
14
+ "sw": (-sq2, -sq2),
15
+ "nw": (-sq2, sq2),
16
+ "ne": (sq2, sq2),
17
+ "se": (sq2, -sq2),
18
+ }
19
+
20
+
21
+ def _get_port_unit_vector(
22
+ portstring,
23
+ trans_inv,
24
+ ):
25
+ """Get the tangent unit vector from a port string."""
26
+ # The only tricky bit is if the port says e.g. north but the y axis is inverted, in which
27
+ # case the port should go south. We can figure it out by checking the sign of the monotonic
28
+ # trans_inv from figure to data coordinates.
29
+ v12 = trans_inv(
30
+ np.array(
31
+ [
32
+ [0, 0],
33
+ [1, 1],
34
+ ]
35
+ )
36
+ )
37
+ invertx = v12[1, 0] - v12[0, 0] < 0
38
+ inverty = v12[1, 1] - v12[0, 1] < 0
39
+
40
+ if invertx:
41
+ portstring = portstring.replace("w", "x").replace("e", "w").replace("x", "e")
42
+ if inverty:
43
+ portstring = portstring.replace("n", "x").replace("s", "n").replace("x", "s")
44
+
45
+ if portstring not in port_dict:
46
+ raise KeyError(f"Port not found: {portstring}")
47
+ return np.array(port_dict[portstring])
iplotx/groups.py CHANGED
@@ -1,19 +1,22 @@
1
- from typing import Union, Sequence
2
- from copy import deepcopy
3
- from collections import defaultdict
1
+ """
2
+ Module for vertex groupings code, especially the GroupingArtist class.
3
+ """
4
+
5
+ from typing import Union
4
6
  import numpy as np
5
- import pandas as pd
6
7
  import matplotlib as mpl
7
8
  from matplotlib.collections import PatchCollection
8
9
 
9
10
 
10
- from .importing import igraph
11
11
  from .typing import (
12
12
  GroupingType,
13
13
  LayoutType,
14
14
  )
15
- from .heuristics import normalise_layout, normalise_grouping
16
- from .styles import get_style, rotate_style
15
+ from .ingest.heuristics import (
16
+ normalise_layout,
17
+ normalise_grouping,
18
+ )
19
+ from .style import get_style, rotate_style
17
20
  from .utils.geometry import (
18
21
  convex_hull,
19
22
  _compute_group_path_with_vertex_padding,
@@ -21,14 +24,23 @@ from .utils.geometry import (
21
24
 
22
25
 
23
26
  class GroupingArtist(PatchCollection):
27
+ """Matplotlib artist for a vertex grouping (clustering/cover).
28
+
29
+ This class is used to plot patches surrounding groups of vertices in a network.
30
+ """
31
+
32
+ _factor = 1.0
33
+
24
34
  def __init__(
25
35
  self,
26
36
  grouping: GroupingType,
27
37
  layout: LayoutType,
28
38
  vertexpadding: Union[None, int] = None,
39
+ points_per_curve: int = 30,
40
+ transform: mpl.transforms.Transform = mpl.transforms.IdentityTransform(),
29
41
  *args,
30
42
  **kwargs,
31
- ):
43
+ ) -> None:
32
44
  """Container artist for vertex groupings, e.g. covers or clusterings.
33
45
 
34
46
  Parameters:
@@ -38,21 +50,55 @@ class GroupingArtist(PatchCollection):
38
50
  layout: The layout of the vertices. If this object has no keys/index, the
39
51
  vertices are assumed to have IDs corresponding to integers starting from
40
52
  zero.
53
+ vertexpadding: How may points of padding to leave around each vertex centre.
54
+ points_per_curve: How many points to use to approximate a round envelope around
55
+ each convex hull vertex.
56
+ transform: The matplotlib transform to use for the patches (typically transData).
41
57
  """
42
58
  if vertexpadding is not None:
43
59
  self._vertexpadding = vertexpadding
44
60
  else:
45
61
  style = get_style(".grouping")
46
62
  self._vertexpadding = style.get("vertexpadding", 10)
47
- patches, grouping, layout = self._create_patches(grouping, layout, **kwargs)
63
+
64
+ self._points_per_curve = points_per_curve
65
+
66
+ network = kwargs.pop("network", None)
67
+ patches, grouping, coords_hulls = self._create_patches(
68
+ grouping,
69
+ layout,
70
+ network,
71
+ **kwargs,
72
+ )
73
+ if "network" in kwargs:
74
+ del kwargs["network"]
48
75
  self._grouping = grouping
49
- self._layout = layout
76
+ self._coords_hulls = coords_hulls
50
77
  kwargs["match_original"] = True
51
78
 
52
79
  super().__init__(patches, *args, **kwargs)
53
80
 
54
- def _create_patches(self, grouping, layout, **kwargs):
55
- layout = normalise_layout(layout)
81
+ zorder = get_style(".grouping").get("zorder", 1)
82
+ self.set_zorder(zorder)
83
+
84
+ self.set_transform(transform)
85
+
86
+ def set_figure(self, figure) -> None:
87
+ """Set the figure for the grouping, recomputing the paths depending on the figure's dpi."""
88
+ ret = super().set_figure(figure)
89
+ self._compute_paths(self.get_figure(root=True).dpi)
90
+ return ret
91
+
92
+ def get_vertexpadding(self) -> float:
93
+ """Get the vertex padding of each group."""
94
+ return self._vertexpadding
95
+
96
+ def get_vertexpadding_dpi(self, dpi: float = 72.0) -> float:
97
+ """Get vertex padding of each group, scaled by dpi of the figure."""
98
+ return self.get_vertexpadding() * dpi / 72.0 * self._factor
99
+
100
+ def _create_patches(self, grouping, layout, network, **kwargs):
101
+ layout = normalise_layout(layout, network=network)
56
102
  grouping = normalise_grouping(grouping, layout)
57
103
  style = get_style(".grouping")
58
104
  style.pop("vertexpadding", None)
@@ -60,6 +106,7 @@ class GroupingArtist(PatchCollection):
60
106
  style.update(kwargs)
61
107
 
62
108
  patches = []
109
+ coords_hulls = []
63
110
  for i, (name, vids) in enumerate(grouping.items()):
64
111
  if len(vids) == 0:
65
112
  continue
@@ -67,6 +114,7 @@ class GroupingArtist(PatchCollection):
67
114
  coords = layout.loc[vids].values
68
115
  idx_hull = convex_hull(coords)
69
116
  coords_hull = coords[idx_hull]
117
+ coords_hulls.append(coords_hull)
70
118
 
71
119
  stylei = rotate_style(style, i)
72
120
 
@@ -79,23 +127,37 @@ class GroupingArtist(PatchCollection):
79
127
  )
80
128
 
81
129
  patches.append(patch)
82
- return patches, grouping, layout
83
-
84
- def _compute_paths(self):
85
- if self._vertexpadding > 0:
86
- for i, path in enumerate(self._paths):
87
- self._paths[i].vertices = _compute_group_path_with_vertex_padding(
88
- path.vertices,
89
- self.get_transform(),
90
- vertexpadding=self._vertexpadding,
91
- )
92
-
93
- def _process(self):
94
- self.set_transform(self.axes.transData)
95
- self._compute_paths()
130
+ return patches, grouping, coords_hulls
131
+
132
+ def _compute_paths(self, dpi: float = 72.0) -> None:
133
+ ppc = self._points_per_curve
134
+ for i, hull in enumerate(self._coords_hulls):
135
+ self._paths[i].vertices = _compute_group_path_with_vertex_padding(
136
+ hull,
137
+ self._paths[i].vertices,
138
+ self.get_transform(),
139
+ vertexpadding=self.get_vertexpadding_dpi(dpi),
140
+ points_per_curve=ppc,
141
+ )
96
142
 
97
- def draw(self, renderer):
143
+ def _process(self) -> None:
98
144
  self._compute_paths()
145
+
146
+ def draw(self, renderer) -> None:
147
+ """Draw or re-draw the grouping patches.
148
+
149
+ Parameters:
150
+ renderer: The renderer to use for drawing the patches.
151
+ """
152
+ # FIXME: this kind of breaks everything since the vertices' magical "_transforms" does
153
+ # not really scale from 72 pixels but rather from the screen's or something.
154
+ # Conclusion: using this keeps consistency across dpis but breaks proportionality of
155
+ # vertexpadding and vertex_size (for now).
156
+ # NOTE: this might be less bad than initially thought in the sense that even perfect
157
+ # scaling does not seem to align the center of the perimeter of the group with the
158
+ # center of the perimeter of the vertex when of the same exact size. So we are
159
+ # probably ok winging it as users will adapt.
160
+ self._compute_paths(self.get_figure(root=True).dpi)
99
161
  super().draw(renderer)
100
162
 
101
163
 
@@ -111,24 +173,10 @@ def _compute_group_patch_stub(
111
173
  )
112
174
 
113
175
  # NOTE: Closing point: mpl is a bit quirky here
114
- vertices = []
115
- codes = []
116
- if len(points) == 0:
117
- vertices = np.zeros((0, 2))
118
- elif len(points) == 1:
119
- vertices = [points[0]] * 9
120
- codes = ["MOVETO"] + ["CURVE3"] * 8
121
- elif len(points) == 2:
122
- vertices = [points[0]] * 5 + [points[1]] * 5 + [points[0]]
123
- codes = ["MOVETO"] + ["CURVE3"] * 4 + ["LINETO"] + ["CURVE3"] * 4 + ["LINETO"]
124
- else:
125
- for point in points:
126
- vertices.extend([point] * 3)
127
- codes.extend(["LINETO", "CURVE3", "CURVE3"])
128
- vertices.append(vertices[0])
129
- codes.append("LINETO")
130
- codes[0] = "MOVETO"
131
-
176
+ vertices = np.zeros(
177
+ (1 + 30 * len(points), 2),
178
+ )
179
+ codes = ["MOVETO"] + ["LINETO"] * (len(vertices) - 2) + ["CLOSEPOLY"]
132
180
  codes = [getattr(mpl.path.Path, x) for x in codes]
133
181
  patch = mpl.patches.PathPatch(
134
182
  mpl.path.Path(
@@ -0,0 +1,155 @@
1
+ """
2
+ This module focuses on how to ingest network data into standard data structures no matter what library they come from.
3
+ """
4
+
5
+ import pathlib
6
+ import pkgutil
7
+ import importlib
8
+ import warnings
9
+ from typing import (
10
+ Optional,
11
+ Sequence,
12
+ Protocol,
13
+ )
14
+ from collections.abc import Hashable
15
+ import pandas as pd
16
+
17
+ from ..typing import (
18
+ GraphType,
19
+ LayoutType,
20
+ TreeType,
21
+ )
22
+ from .typing import (
23
+ NetworkDataProvider,
24
+ NetworkData,
25
+ TreeDataProvider,
26
+ TreeData,
27
+ )
28
+
29
+ provider_protocols = {
30
+ "network": NetworkDataProvider,
31
+ "tree": TreeDataProvider,
32
+ }
33
+
34
+ # Internally supported data providers
35
+ data_providers: dict[str, dict[str, Protocol]] = {
36
+ kind: {} for kind in provider_protocols
37
+ }
38
+ for kind in data_providers:
39
+ providers_path = pathlib.Path(__file__).parent.joinpath("providers").joinpath(kind)
40
+ for importer, module_name, _ in pkgutil.iter_modules([providers_path]):
41
+ module = importlib.import_module(
42
+ f"iplotx.ingest.providers.{kind}.{module_name}"
43
+ )
44
+ for key, val in module.__dict__.items():
45
+ if key == provider_protocols[kind].__name__:
46
+ continue
47
+ if key.endswith("DataProvider"):
48
+ data_providers[kind][module_name] = val()
49
+ break
50
+ del providers_path
51
+
52
+
53
+ def network_library(network) -> str:
54
+ """Guess the network library used to create the network."""
55
+ for name, provider in data_providers["network"].items():
56
+ if provider.check_dependencies():
57
+ graph_type = provider.graph_type()
58
+ if isinstance(network, graph_type):
59
+ return name
60
+ raise ValueError(
61
+ f"Network {network} did not match any available network library.",
62
+ )
63
+
64
+
65
+ def tree_library(tree) -> str:
66
+ """Guess the tree library used to create the tree."""
67
+ for name, provider in data_providers["tree"].items():
68
+ if provider.check_dependencies():
69
+ tree_type = provider.tree_type()
70
+ if isinstance(tree, tree_type):
71
+ return name
72
+ raise ValueError(
73
+ f"Tree {tree} did not match any available tree library.",
74
+ )
75
+
76
+
77
+ # Functions to ingest data from various libraries
78
+ def ingest_network_data(
79
+ network: GraphType,
80
+ layout: Optional[LayoutType] = None,
81
+ vertex_labels: Optional[Sequence[str] | dict[Hashable, str] | pd.Series] = None,
82
+ edge_labels: Optional[Sequence[str] | dict[str,]] = None,
83
+ ) -> NetworkData:
84
+ """Create internal data for the network."""
85
+ _update_data_providers("network")
86
+
87
+ nl = network_library(network)
88
+
89
+ if nl in data_providers["network"]:
90
+ provider: NetworkDataProvider = data_providers["network"][nl]
91
+ else:
92
+ sup = ", ".join(data_providers["network"].keys())
93
+ raise ValueError(
94
+ f"Network library '{nl}' is not installed. "
95
+ f"Currently installed supported libraries: {sup}."
96
+ )
97
+
98
+ result = provider(
99
+ network=network,
100
+ layout=layout,
101
+ vertex_labels=vertex_labels,
102
+ edge_labels=edge_labels,
103
+ )
104
+ result["network_library"] = nl
105
+ return result
106
+
107
+
108
+ def ingest_tree_data(
109
+ tree: TreeType,
110
+ layout: Optional[str] = "horizontal",
111
+ orientation: Optional[str] = "right",
112
+ directed: bool | str = False,
113
+ vertex_labels: Optional[Sequence[str] | dict[Hashable, str] | pd.Series] = None,
114
+ edge_labels: Optional[Sequence[str] | dict[str,]] = None,
115
+ ) -> TreeData:
116
+ """Create internal data for the tree."""
117
+ _update_data_providers("tree")
118
+
119
+ tl = tree_library(tree)
120
+
121
+ if tl in data_providers["tree"]:
122
+ provider: TreeDataProvider = data_providers["tree"][tl]
123
+ else:
124
+ sup = ", ".join(data_providers["tree"].keys())
125
+ raise ValueError(
126
+ f"Tree library '{tl}' is not installed. "
127
+ f"Currently installed supported libraries: {sup}."
128
+ )
129
+
130
+ result = provider(
131
+ tree=tree,
132
+ layout=layout,
133
+ orientation=orientation,
134
+ directed=directed,
135
+ vertex_labels=vertex_labels,
136
+ edge_labels=edge_labels,
137
+ )
138
+ result["tree_library"] = tl
139
+ return result
140
+
141
+
142
+ # INTERNAL FUNCTIONS
143
+ def _update_data_providers(kind):
144
+ """Update data provieders dynamically from external packages."""
145
+ discovered_providers = importlib.metadata.entry_points(
146
+ group=f"iplotx.{kind}_data_providers"
147
+ )
148
+ for entry_point in discovered_providers:
149
+ if entry_point.name not in data_providers["network"]:
150
+ try:
151
+ data_providers[kind][entry_point.name] = entry_point.load()
152
+ except Exception as e:
153
+ warnings.warn(
154
+ f"Failed to load {kind} data provider '{entry_point.name}': {e}"
155
+ )
@@ -0,0 +1,209 @@
1
+ """
2
+ Heuristics module to funnel certain variable inputs (e.g. layouts) into a standard format.
3
+ """
4
+
5
+ from typing import (
6
+ Optional,
7
+ Any,
8
+ )
9
+ from collections.abc import Hashable
10
+ from collections import defaultdict
11
+ import numpy as np
12
+ import pandas as pd
13
+
14
+ from ..layout import compute_tree_layout
15
+ from ..typing import (
16
+ GraphType,
17
+ GroupingType,
18
+ TreeType,
19
+ LayoutType,
20
+ )
21
+
22
+
23
+ def number_of_vertices(network: GraphType) -> int:
24
+ """Get the number of vertices in the network."""
25
+ from . import network_library
26
+
27
+ if network_library(network) == "igraph":
28
+ return network.vcount()
29
+ if network_library(network) == "networkx":
30
+ return network.number_of_nodes()
31
+ raise TypeError("Unsupported graph type. Supported types are igraph and networkx.")
32
+
33
+
34
+ def detect_directedness(
35
+ network: GraphType,
36
+ ) -> bool:
37
+ """Detect if the network is directed or not."""
38
+ from . import network_library
39
+
40
+ nl = network_library(network)
41
+
42
+ if nl == "igraph":
43
+ return network.is_directed()
44
+ if nl == "networkx":
45
+ import networkx as nx
46
+
47
+ if isinstance(network, (nx.DiGraph, nx.MultiDiGraph)):
48
+ return True
49
+ return False
50
+
51
+
52
+ def normalise_layout(layout, network=None):
53
+ """Normalise the layout to a pandas.DataFrame."""
54
+ from . import network_library
55
+
56
+ try:
57
+ import igraph as ig
58
+ except ImportError:
59
+ ig = None
60
+
61
+ if layout is None:
62
+ if (network is not None) and (number_of_vertices(network) == 0):
63
+ return pd.DataFrame(np.zeros((0, 2)))
64
+ return None
65
+ if (network is not None) and isinstance(layout, str):
66
+ if network_library(network) == "igraph":
67
+ if hasattr(network, layout):
68
+ layout = network[layout]
69
+ else:
70
+ layout = network.layout(layout)
71
+ # NOTE: This seems like a legit bug in igraph
72
+ # Sometimes (e.g. sugiyama) the layout has more vertices than the network (?)
73
+ layout = np.asarray(layout.coords)[: network.vcount()]
74
+ if network_library(network) == "networkx":
75
+ layout = dict(network.nodes.data(layout))
76
+
77
+ if (ig is not None) and isinstance(layout, ig.layout.Layout):
78
+ return pd.DataFrame(layout.coords)
79
+ if isinstance(layout, dict):
80
+ return pd.DataFrame(layout).T
81
+ if isinstance(layout, str):
82
+ raise NotImplementedError("Layout as a string is not supported yet.")
83
+ if isinstance(layout, (list, tuple)):
84
+ return pd.DataFrame(np.array(layout))
85
+ if isinstance(layout, pd.DataFrame):
86
+ return layout
87
+ if isinstance(layout, np.ndarray):
88
+ return pd.DataFrame(layout)
89
+ raise TypeError("Layout could not be normalised.")
90
+
91
+
92
+ def normalise_tree_layout(
93
+ layout: str | Any,
94
+ tree: Optional[TreeType] = None,
95
+ **kwargs,
96
+ ) -> pd.DataFrame:
97
+ """Normalise tree layout from a variety of inputs.
98
+
99
+ Parameters:
100
+ layout: The tree layout to normalise.
101
+ tree: The correcponding tree object.
102
+ **kwargs: Additional arguments for the subroutines.
103
+
104
+ Returns:
105
+ A pandas DataFrame with the normalised tree layout.
106
+
107
+ NOTE: This function currently only accepts strings and computes
108
+ the layout internally. This might change in the future.
109
+ """
110
+ if isinstance(layout, str):
111
+ layout = compute_tree_layout(tree, layout, **kwargs)
112
+ else:
113
+ raise NotImplementedError(
114
+ "Only internally computed tree layout currently accepted."
115
+ )
116
+
117
+ if isinstance(layout, dict):
118
+ # Adjust vertex layout
119
+ index = []
120
+ coordinates = []
121
+ for key, coordinate in layout.items():
122
+ index.append(key)
123
+ coordinates.append(coordinate)
124
+ index = pd.Index(index)
125
+ coordinates = np.array(coordinates)
126
+ ndim = len(coordinates[0]) if len(coordinates) > 0 else 2
127
+ layout_columns = [f"_ipx_layout_{i}" for i in range(ndim)]
128
+ layout = pd.DataFrame(
129
+ coordinates,
130
+ index=index,
131
+ columns=layout_columns,
132
+ )
133
+
134
+ return layout
135
+
136
+
137
+ def normalise_grouping(
138
+ grouping: GroupingType,
139
+ layout: LayoutType,
140
+ ) -> dict[Hashable, set]:
141
+ """Normalise network grouping from a variery of inputs.
142
+
143
+ Parameters:
144
+ grouping: Network grouping (e.g. vertex cover).
145
+ layout: Network layout.
146
+
147
+ Returns:
148
+ A dictionary of sets. Each key is the index of a group, each value is a set of vertices
149
+ included in that group. If all sets are mutually exclusive, this is a vertex clustering,
150
+ otherwise it's only a vertex cover.
151
+ """
152
+ try:
153
+ import igraph as ig
154
+ except ImportError:
155
+ ig = None
156
+
157
+ if len(grouping) == 0:
158
+ return {}
159
+
160
+ if isinstance(grouping, dict):
161
+ val0 = next(iter(grouping.values()))
162
+ # If already the right data type or compatible, leave as is
163
+ if isinstance(val0, (set, frozenset)):
164
+ return grouping
165
+
166
+ # If a dict of integers or strings, assume each key is a vertex id and each value is a
167
+ # group, convert (i.e. invert the dict)
168
+ if isinstance(val0, (int, str)):
169
+ group_dic = defaultdict(set)
170
+ for key, val in grouping.items():
171
+ group_dic[val].add(key)
172
+ return group_dic
173
+
174
+ # If an igraph object, convert to a dict of sets
175
+ if ig is not None:
176
+ if isinstance(grouping, ig.clustering.Clustering):
177
+ layout = normalise_layout(layout)
178
+ group_dic = defaultdict(set)
179
+ for i, member in enumerate(grouping.membership):
180
+ group_dic[member].add(i)
181
+ return group_dic
182
+
183
+ if isinstance(grouping, ig.clustering.Cover):
184
+ layout = normalise_layout(layout)
185
+ group_dic = defaultdict(set)
186
+ for i, members in enumerate(grouping.membership):
187
+ for member in members:
188
+ group_dic[member].add(i)
189
+ return group_dic
190
+
191
+ # Assume it's a sequence, so convert to list
192
+ grouping = list(grouping)
193
+
194
+ # If the values are already sets, assume group indices are integers
195
+ # and values are as is
196
+ if isinstance(grouping[0], set):
197
+ return dict(enumerate(grouping))
198
+
199
+ # If the values are integers or strings, assume each key is a vertex id and each value is a
200
+ # group, convert to dict of sets
201
+ if isinstance(grouping[0], (int, str)):
202
+ group_dic = defaultdict(set)
203
+ for i, val in enumerate(grouping):
204
+ group_dic[val].add(i)
205
+ return group_dic
206
+
207
+ raise TypeError(
208
+ "Could not standardise grouping from object.",
209
+ )