napistu 0.4.5__py3-none-any.whl → 0.4.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -23,7 +23,7 @@ from napistu.constants import SBML_DFS
23
23
  from napistu.constants import SOURCE_SPEC
24
24
  from napistu.identifiers import _validate_assets_sbml_ids
25
25
  from napistu.network.constants import GRAPH_WIRING_APPROACHES
26
- from napistu.network.constants import NAPISTU_GRAPH_DIRECTEDNESS
26
+ from napistu.network.constants import GRAPH_DIRECTEDNESS
27
27
 
28
28
  logger = logging.getLogger(__name__)
29
29
 
@@ -133,8 +133,8 @@ def get_minimal_sources_edges(
133
133
 
134
134
  Returns
135
135
  -------
136
- edge_sources: pd.DataFrame
137
- A table of edges and the sources they are assigned to.
136
+ reaction_sources: pd.DataFrame
137
+ A table of reactions and the sources they are assigned to.
138
138
  """
139
139
 
140
140
  nodes = vertices["node"].tolist()
@@ -148,10 +148,10 @@ def get_minimal_sources_edges(
148
148
  if source_df is None:
149
149
  return None
150
150
  else:
151
- edge_sources = source.source_set_coverage(
151
+ reaction_sources = source.source_set_coverage(
152
152
  source_df, source_total_counts, sbml_dfs, min_pw_size=min_pw_size
153
153
  )
154
- return edge_sources.reset_index()[
154
+ return reaction_sources.reset_index()[
155
155
  [SBML_DFS.R_ID, SOURCE_SPEC.PATHWAY_ID, SOURCE_SPEC.NAME]
156
156
  ]
157
157
 
@@ -406,9 +406,9 @@ def _create_network_save_string(
406
406
  model_prefix: str, outdir: str, directed: bool, wiring_approach: str
407
407
  ) -> str:
408
408
  if directed:
409
- directed_str = NAPISTU_GRAPH_DIRECTEDNESS.DIRECTED
409
+ directed_str = GRAPH_DIRECTEDNESS.DIRECTED
410
410
  else:
411
- directed_str = NAPISTU_GRAPH_DIRECTEDNESS.UNDIRECTED
411
+ directed_str = GRAPH_DIRECTEDNESS.UNDIRECTED
412
412
 
413
413
  export_pkl_path = os.path.join(
414
414
  outdir,
napistu/network/paths.py CHANGED
@@ -11,13 +11,19 @@ from napistu import sbml_dfs_core
11
11
  from napistu import utils
12
12
  from napistu.network.ng_core import NapistuGraph
13
13
  from napistu.network.ng_utils import get_minimal_sources_edges
14
- from napistu.constants import NAPISTU_PATH_REQ_VARS
15
- from napistu.constants import MINI_SBO_NAME_TO_POLARITY
16
- from napistu.constants import MINI_SBO_TO_NAME
17
- from napistu.constants import SBML_DFS
18
- from napistu.network.constants import NET_POLARITY
19
- from napistu.network.constants import NAPISTU_GRAPH_EDGES
20
- from napistu.network.constants import VALID_LINK_POLARITIES
14
+ from napistu.constants import (
15
+ MINI_SBO_NAME_TO_POLARITY,
16
+ MINI_SBO_TO_NAME,
17
+ NAPISTU_EDGELIST,
18
+ NAPISTU_PATH_REQ_VARS,
19
+ SBML_DFS,
20
+ )
21
+ from napistu.network.constants import (
22
+ NAPISTU_GRAPH_EDGES,
23
+ NAPISTU_GRAPH_VERTICES,
24
+ NET_POLARITY,
25
+ VALID_LINK_POLARITIES,
26
+ )
21
27
 
22
28
  logger = logging.getLogger(__name__)
23
29
 
@@ -273,8 +279,8 @@ def find_all_shortest_reaction_paths(
273
279
  Nodes in all shortest paths
274
280
  all_shortest_reaction_path_edges_df : pd.DataFrame
275
281
  Edges in all shortest paths
276
- edge_sources : pd.DataFrame
277
- Sources of edge identifying the models where they originated
282
+ reaction_sources : pd.DataFrame
283
+ Sources of reactions identifying the models where they originated
278
284
  paths_graph : igraph.Graph
279
285
  Network formed by all shortest paths
280
286
  """
@@ -297,8 +303,8 @@ def find_all_shortest_reaction_paths(
297
303
  paths = find_shortest_reaction_paths(
298
304
  napistu_graph,
299
305
  sbml_dfs,
300
- origin=one_search["sc_id_origin"],
301
- dest=one_search["sc_id_dest"],
306
+ origin=one_search[NAPISTU_EDGELIST.SC_ID_ORIGIN],
307
+ dest=one_search[NAPISTU_EDGELIST.SC_ID_DEST],
302
308
  weight_var=weight_var,
303
309
  )
304
310
 
@@ -309,12 +315,14 @@ def find_all_shortest_reaction_paths(
309
315
 
310
316
  all_shortest_reaction_paths.append(
311
317
  shortest_paths_df.assign(
312
- origin=one_search["sc_id_origin"], dest=one_search["sc_id_dest"]
318
+ origin=one_search[NAPISTU_EDGELIST.SC_ID_ORIGIN],
319
+ dest=one_search[NAPISTU_EDGELIST.SC_ID_DEST],
313
320
  )
314
321
  )
315
322
  all_shortest_reaction_path_edges.append(
316
323
  shortest_path_edges_df.assign(
317
- origin=one_search["sc_id_origin"], dest=one_search["sc_id_dest"]
324
+ origin=one_search[NAPISTU_EDGELIST.SC_ID_ORIGIN],
325
+ dest=one_search[NAPISTU_EDGELIST.SC_ID_DEST],
318
326
  )
319
327
  )
320
328
 
@@ -332,7 +340,7 @@ def find_all_shortest_reaction_paths(
332
340
  ).reset_index()
333
341
 
334
342
  # at a minimal set of pathway sources to organize reactions
335
- edge_sources = get_minimal_sources_edges(
343
+ reaction_sources = get_minimal_sources_edges(
336
344
  all_shortest_reaction_paths_df,
337
345
  sbml_dfs,
338
346
  min_pw_size=min_pw_size,
@@ -353,13 +361,13 @@ def find_all_shortest_reaction_paths(
353
361
  edges=all_shortest_reaction_path_edges_df.to_dict("records"),
354
362
  directed=directed,
355
363
  vertex_name_attr="node",
356
- edge_foreign_keys=("from", "to"),
364
+ edge_foreign_keys=(NAPISTU_GRAPH_EDGES.FROM, NAPISTU_GRAPH_EDGES.TO),
357
365
  )
358
366
 
359
367
  return (
360
368
  all_shortest_reaction_paths_df,
361
369
  all_shortest_reaction_path_edges_df,
362
- edge_sources,
370
+ reaction_sources,
363
371
  paths_graph,
364
372
  )
365
373
 
@@ -387,9 +395,11 @@ def plot_shortest_paths(napistu_graph: NapistuGraph) -> NapistuGraph.plot:
387
395
  visual_style["vertex_label_size"] = 8
388
396
  visual_style["vertex_label_angle"] = 90
389
397
  visual_style["vertex_color"] = [
390
- color_dict[x] for x in napistu_graph.vs["node_type"]
398
+ color_dict[x] for x in napistu_graph.vs[NAPISTU_GRAPH_VERTICES.NODE_TYPE]
399
+ ]
400
+ visual_style["edge_width"] = [
401
+ math.sqrt(x) for x in napistu_graph.es[NAPISTU_GRAPH_EDGES.WEIGHTS]
391
402
  ]
392
- visual_style["edge_width"] = [math.sqrt(x) for x in napistu_graph.es["weights"]]
393
403
  visual_style["edge_color"] = "dimgray"
394
404
  visual_style["layout"] = paths_graph_layout
395
405
  visual_style["bbox"] = (2000, 2000)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: napistu
3
- Version: 0.4.5
3
+ Version: 0.4.7
4
4
  Summary: Connecting high-dimensional data to curated pathways
5
5
  Home-page: https://github.com/napistu/napistu-py
6
6
  Author: Sean Hackett
@@ -59,16 +59,16 @@ napistu/modify/gaps.py,sha256=CV-bdSfanhrnCIFVWfNuQJbtjvj4hsEwheKYR-Z3tNA,26844
59
59
  napistu/modify/pathwayannot.py,sha256=xuBSMDFWbg_d6-Gzv0Td3Q5nnFTa-Qzic48g1b1AZtQ,48081
60
60
  napistu/modify/uncompartmentalize.py,sha256=y5LkXn5x6u80dB_McfAIh88BxZGIAVFLujkP7sPNRh0,9690
61
61
  napistu/network/__init__.py,sha256=dFXAhIqlTLJMwowS4BUDT08-Vy3Q0u1L0CMCErSZT1Y,239
62
- napistu/network/constants.py,sha256=nG_lUZYLgop8oxOGjDYqvxXJzVdOwKZ3aWnxlhtSaIo,6915
62
+ napistu/network/constants.py,sha256=zQkBTeZ2_K_rId6IUvKKTv9chx_i6K5B8vPZ19-LkNQ,7782
63
63
  napistu/network/data_handling.py,sha256=KncrAKjXI3169BgVE-SnY8FkpVF60JnUwfMHtbqvsTc,14725
64
64
  napistu/network/ig_utils.py,sha256=MuyEyOVtSHndil6QuuRCimBZrJ2jTaF5qQESgYlu02M,17042
65
- napistu/network/neighborhoods.py,sha256=hi8FT5sGd1vtkR5Uu10wr0Ik5Z3fz9e5fhvXqfi7QPQ,57340
65
+ napistu/network/neighborhoods.py,sha256=RNqaz91MTxuU4jNfrGdEcnm6rCykgdtHDPu2abAE-Kg,65516
66
66
  napistu/network/net_create.py,sha256=66kV_xoWnu4BVLaJZ1TAC7wBSsjPDqjoAXH-X9ShV3s,59091
67
67
  napistu/network/net_create_utils.py,sha256=zajwaz2xAij_9fEnD77SgBw_EnNAnJ8jBCmmK2rk_bA,24672
68
68
  napistu/network/net_propagation.py,sha256=Il5nDOWh3nLz8gRhDFHGp2LxcvJ9C1twiSZjDeiZMUo,23490
69
69
  napistu/network/ng_core.py,sha256=dGnTUKR4WtnvaYMyIHqqF55FY4mJSa7wjA2LZ4cVB6U,11720
70
- napistu/network/ng_utils.py,sha256=DkI_Ln2uFiNDjPEnUnf7kyy6XwyqvpeUkk8DRjTGZQQ,16078
71
- napistu/network/paths.py,sha256=BcoYNkCplaM_QPqWWfiwD89bsvwlyvvacSiEzHacfmA,17863
70
+ napistu/network/ng_utils.py,sha256=LX9DzMnz0AQMhJGUh3r8bg4dyEgWs_tym1Olu1FwlbQ,16070
71
+ napistu/network/paths.py,sha256=ZnIqwBIsgz4C4TLyg3c_pCO5zZ97gmCNepDmq2QNEQc,18020
72
72
  napistu/network/precompute.py,sha256=ARU2tktWnxFISaHAY8chpkg8pusZPv7TT5jSIB9eFF0,10081
73
73
  napistu/ontologies/__init__.py,sha256=dFXAhIqlTLJMwowS4BUDT08-Vy3Q0u1L0CMCErSZT1Y,239
74
74
  napistu/ontologies/constants.py,sha256=GyOFvezSxDK1VigATcruTKtNhjcYaid1ggulEf_HEtQ,4345
@@ -87,7 +87,7 @@ napistu/scverse/loading.py,sha256=jqiE71XB-wdV50GyZrauFNY0Lai4bX9Fm2Gv80VR8t8,27
87
87
  napistu/statistics/__init__.py,sha256=dFXAhIqlTLJMwowS4BUDT08-Vy3Q0u1L0CMCErSZT1Y,239
88
88
  napistu/statistics/hypothesis_testing.py,sha256=k0mBFAMF0XHVcKwS26aPnEbq_FIUVwXU1gZ6cKfFbCk,2190
89
89
  napistu/statistics/quantiles.py,sha256=1-LnmVzC2CQWxCKUh0yi6YfKrbsZM1-kkD7nu2-aS5s,3042
90
- napistu-0.4.5.dist-info/licenses/LICENSE,sha256=kW8wVT__JWoHjl2BbbJDAZInWa9AxzJeR_uv6-i5x1g,1063
90
+ napistu-0.4.7.dist-info/licenses/LICENSE,sha256=kW8wVT__JWoHjl2BbbJDAZInWa9AxzJeR_uv6-i5x1g,1063
91
91
  tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
92
92
  tests/conftest.py,sha256=Rw0KtnVyykZhRjnlmNu4oV47lNIeYUJVHu4y47RnVq0,9990
93
93
  tests/test_consensus.py,sha256=Hzfrgp4SpkRDnEMVMD3f0UInSycndB8kKzC4wDDvRas,15076
@@ -109,14 +109,14 @@ tests/test_mcp_documentation_utils.py,sha256=OW0N2N_2IOktbYTcCWhhWz4bANi8IB60l1q
109
109
  tests/test_mcp_server.py,sha256=bP3PWVQsEfX6-lAgXKP32njdg__o65n2WuLvkxTTHkQ,11215
110
110
  tests/test_network_data_handling.py,sha256=4aS8z2AlKkVd-JhK4BQ8fjeiW8_bJ1hZ3cc71Jh7Glk,12716
111
111
  tests/test_network_ig_utils.py,sha256=XihmEpX890sr-LYmsb_t4aN0sKIDWCnXkTpDhpuTDmw,7199
112
- tests/test_network_neighborhoods.py,sha256=OvVfgGodbS3MpuSfj-__VKjBj99Ng4WWLmINlIIvbvo,5100
112
+ tests/test_network_neighborhoods.py,sha256=3k0d-Pk_rWtGwxTg-Jpjv3CsVSj4qMn71MEHEKcqHII,8746
113
113
  tests/test_network_net_create.py,sha256=L0U91b4jVHDuC3DFo-_BUFVuv4GuSxZuLAo7r-7EJxY,12877
114
114
  tests/test_network_net_create_utils.py,sha256=0J6KIh2HBc4koFsvwMaul1QRtj5x92kR9HBdDZajnAw,18971
115
115
  tests/test_network_net_propagation.py,sha256=kZeDHD93iMrLVvxO4OyfRH5_vgsYeQyC40OI9Dsb0xY,14999
116
116
  tests/test_network_ng_core.py,sha256=w-iNBTtenennJhaLFauk952pEsk7W0-Fa8lPvIRqHyY,628
117
117
  tests/test_network_ng_utils.py,sha256=QVVuRnvCRfTSIlGdwQTIF9lr0wOwoc5gGeXAUY_AdgE,713
118
- tests/test_network_paths.py,sha256=TWZnxY5bF3m6gahcxcYJGrBIawh2-_vUcec1LyPmXV8,1686
119
- tests/test_network_precompute.py,sha256=IPr1KhtxBD0fXx_2TvZqnevrD-Iig35otb8yloRFpRc,10014
118
+ tests/test_network_paths.py,sha256=Bx1uqyIAPw_i27s94dyjCQcV_04O9yovlrQgr1lFjS4,2143
119
+ tests/test_network_precompute.py,sha256=MhmT6LQF-JcMR9YR78W-cLHbkQnp8ro-MHJ5yvMVhJE,10409
120
120
  tests/test_ontologies_genodexito.py,sha256=6fINyUiubHZqu7qxye09DQfJXw28ZMAJc3clPb-cCoY,2298
121
121
  tests/test_ontologies_id_tables.py,sha256=CpwpbmQvTc1BaVd6jbDKHAVE2etwN0vx93nC8jpnMlE,7265
122
122
  tests/test_ontologies_mygene.py,sha256=VkdRcKIWmcG6V-2dpfvsBiOJN5dO-j0RqZNxtJRcyBU,1583
@@ -136,8 +136,8 @@ tests/test_uncompartmentalize.py,sha256=nAk5kfAVLU9a2VWe2x2HYVcKqj-EnwmwddERIPRa
136
136
  tests/test_utils.py,sha256=qPSpV-Q9b6vmdycgaDmQqtcvzKnAVnN9j5xJ9x-T6bg,23959
137
137
  tests/utils.py,sha256=SoWQ_5roJteFGcMaOeEiQ5ucwq3Z2Fa3AAs9iXHTsJY,749
138
138
  tests/test_data/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
139
- napistu-0.4.5.dist-info/METADATA,sha256=JJzjckSlzdusT7COjo-FxaNRBGMWtfJc-kfUDvjyvW4,4078
140
- napistu-0.4.5.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
141
- napistu-0.4.5.dist-info/entry_points.txt,sha256=_QnaPOvJNA3IltxmZgWIiBoen-L1bPYX18YQfC7oJgQ,41
142
- napistu-0.4.5.dist-info/top_level.txt,sha256=Gpvk0a_PjrtqhYcQ9IDr3zR5LqpZ-uIHidQMIpjlvhY,14
143
- napistu-0.4.5.dist-info/RECORD,,
139
+ napistu-0.4.7.dist-info/METADATA,sha256=PgIDsBflFe6QmORKY6hfoEI9_Qqrpwa7Oc9126D47jc,4078
140
+ napistu-0.4.7.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
141
+ napistu-0.4.7.dist-info/entry_points.txt,sha256=_QnaPOvJNA3IltxmZgWIiBoen-L1bPYX18YQfC7oJgQ,41
142
+ napistu-0.4.7.dist-info/top_level.txt,sha256=Gpvk0a_PjrtqhYcQ9IDr3zR5LqpZ-uIHidQMIpjlvhY,14
143
+ napistu-0.4.7.dist-info/RECORD,,
@@ -1,11 +1,17 @@
1
1
  import pandas as pd
2
+ import pytest
2
3
 
3
4
  from napistu.network import ng_utils
4
5
  from napistu.network import neighborhoods
5
6
  from napistu import source
6
7
 
7
8
  from napistu.constants import SBML_DFS
8
- from napistu.network.constants import NEIGHBORHOOD_NETWORK_TYPES
9
+ from napistu.network.constants import (
10
+ NAPISTU_GRAPH_VERTICES,
11
+ NAPISTU_GRAPH_NODE_TYPES,
12
+ NEIGHBORHOOD_DICT_KEYS,
13
+ NEIGHBORHOOD_NETWORK_TYPES,
14
+ )
9
15
 
10
16
 
11
17
  def test_neighborhood(sbml_dfs, napistu_graph):
@@ -22,7 +28,7 @@ def test_neighborhood(sbml_dfs, napistu_graph):
22
28
  order=3,
23
29
  )
24
30
 
25
- assert neighborhood["species_73473"]["vertices"].shape[0] == 6
31
+ assert neighborhood["species_73473"][NEIGHBORHOOD_DICT_KEYS.VERTICES].shape[0] == 6
26
32
 
27
33
 
28
34
  def test_find_and_prune_neighborhoods_with_source_counts(
@@ -32,7 +38,7 @@ def test_find_and_prune_neighborhoods_with_source_counts(
32
38
  Test find_and_prune_neighborhoods function with source_total_counts parameter.
33
39
 
34
40
  This test verifies that the function works correctly when source_total_counts
35
- is provided, which enables source-based edge assignment in neighborhoods.
41
+ is provided, which enables source-based reaction assignment in neighborhoods.
36
42
  """
37
43
  # Create source_total_counts using the source module
38
44
  source_total_counts = source.get_source_total_counts(
@@ -59,6 +65,7 @@ def test_find_and_prune_neighborhoods_with_source_counts(
59
65
  sbml_dfs=sbml_dfs_metabolism,
60
66
  napistu_graph=napistu_graph_metabolism,
61
67
  compartmentalized_species=compartmentalized_species,
68
+ min_pw_size=1,
62
69
  source_total_counts=source_total_counts,
63
70
  network_type=NEIGHBORHOOD_NETWORK_TYPES.HOURGLASS,
64
71
  order=3,
@@ -73,34 +80,39 @@ def test_find_and_prune_neighborhoods_with_source_counts(
73
80
  # Check each neighborhood has the expected structure
74
81
  for sc_id, neighborhood in neighborhoods_result.items():
75
82
  assert isinstance(neighborhood, dict)
76
- assert "graph" in neighborhood
77
- assert "vertices" in neighborhood
78
- assert "edges" in neighborhood
79
- assert "edge_sources" in neighborhood
83
+ assert NEIGHBORHOOD_DICT_KEYS.GRAPH in neighborhood
84
+ assert NEIGHBORHOOD_DICT_KEYS.VERTICES in neighborhood
85
+ assert NEIGHBORHOOD_DICT_KEYS.EDGES in neighborhood
86
+ assert NEIGHBORHOOD_DICT_KEYS.REACTION_SOURCES in neighborhood
80
87
 
81
- # Verify edge_sources is populated when source_total_counts is provided
88
+ # Verify reaction_sources is populated when source_total_counts is provided
82
89
  # (this is the key difference when source_total_counts is passed)
83
- if neighborhood["edges"].shape[0] > 0:
84
- # If there are edges, edge_sources should be populated
85
- assert neighborhood["edge_sources"] is not None
86
- assert isinstance(neighborhood["edge_sources"], pd.DataFrame)
87
-
88
- # Check edge_sources has expected columns
90
+ if neighborhood[NEIGHBORHOOD_DICT_KEYS.EDGES].shape[0] > 0:
91
+ # If there are edges, reaction_sources should be populated
92
+ assert neighborhood[NEIGHBORHOOD_DICT_KEYS.REACTION_SOURCES] is not None
93
+ assert isinstance(
94
+ neighborhood[NEIGHBORHOOD_DICT_KEYS.REACTION_SOURCES], pd.DataFrame
95
+ )
96
+ assert neighborhood[NEIGHBORHOOD_DICT_KEYS.REACTION_SOURCES].shape[0] > 0
97
+
98
+ # Check reaction_sources has expected columns
89
99
  expected_columns = [SBML_DFS.R_ID, "pathway_id", "name"]
90
100
  for col in expected_columns:
91
- assert col in neighborhood["edge_sources"].columns
101
+ assert (
102
+ col in neighborhood[NEIGHBORHOOD_DICT_KEYS.REACTION_SOURCES].columns
103
+ )
92
104
 
93
105
  # Verify vertices structure
94
- vertices = neighborhood["vertices"]
106
+ vertices = neighborhood[NEIGHBORHOOD_DICT_KEYS.VERTICES]
95
107
  assert isinstance(vertices, pd.DataFrame)
96
108
  assert vertices.shape[0] > 0
97
109
 
98
110
  # Verify edges structure
99
- edges = neighborhood["edges"]
111
+ edges = neighborhood[NEIGHBORHOOD_DICT_KEYS.EDGES]
100
112
  assert isinstance(edges, pd.DataFrame)
101
113
 
102
114
  # Verify graph structure
103
- graph = neighborhood["graph"]
115
+ graph = neighborhood[NEIGHBORHOOD_DICT_KEYS.GRAPH]
104
116
  assert hasattr(graph, "vcount")
105
117
  assert hasattr(graph, "ecount")
106
118
 
@@ -110,6 +122,7 @@ def test_find_and_prune_neighborhoods_with_source_counts(
110
122
  napistu_graph=napistu_graph_metabolism,
111
123
  compartmentalized_species=compartmentalized_species,
112
124
  source_total_counts=None, # No source counts
125
+ min_pw_size=1,
113
126
  network_type=NEIGHBORHOOD_NETWORK_TYPES.DOWNSTREAM,
114
127
  order=3,
115
128
  verbose=False,
@@ -119,13 +132,99 @@ def test_find_and_prune_neighborhoods_with_source_counts(
119
132
  # Verify both results have the same basic structure
120
133
  assert len(neighborhoods_result) == len(neighborhoods_result_no_source)
121
134
 
122
- # The main difference should be in edge_sources handling
135
+ # The main difference should be in reaction_sources handling
123
136
  for sc_id in neighborhoods_result:
124
- with_source = neighborhoods_result[sc_id]["edge_sources"]
125
- without_source = neighborhoods_result_no_source[sc_id]["edge_sources"]
137
+ with_source = neighborhoods_result[sc_id][
138
+ NEIGHBORHOOD_DICT_KEYS.REACTION_SOURCES
139
+ ]
140
+ without_source = neighborhoods_result_no_source[sc_id][
141
+ NEIGHBORHOOD_DICT_KEYS.REACTION_SOURCES
142
+ ]
126
143
 
127
144
  # Both should either be None or DataFrames, but the content may differ
128
145
  assert (with_source is None) == (without_source is None)
129
146
  if with_source is not None and without_source is not None:
130
147
  assert isinstance(with_source, pd.DataFrame)
131
148
  assert isinstance(without_source, pd.DataFrame)
149
+
150
+ # Test error handling for invalid parameters
151
+ # Test invalid network_type
152
+ with pytest.raises(ValueError):
153
+ neighborhoods.find_and_prune_neighborhoods(
154
+ sbml_dfs=sbml_dfs_metabolism,
155
+ napistu_graph=napistu_graph_metabolism,
156
+ compartmentalized_species=compartmentalized_species,
157
+ source_total_counts=source_total_counts,
158
+ min_pw_size=1,
159
+ network_type="invalid_network_type",
160
+ order=3,
161
+ verbose=False,
162
+ top_n=10,
163
+ )
164
+
165
+ # Test invalid order (negative)
166
+ with pytest.raises(ValueError):
167
+ neighborhoods.find_and_prune_neighborhoods(
168
+ sbml_dfs=sbml_dfs_metabolism,
169
+ napistu_graph=napistu_graph_metabolism,
170
+ compartmentalized_species=compartmentalized_species,
171
+ source_total_counts=source_total_counts,
172
+ min_pw_size=1,
173
+ network_type=NEIGHBORHOOD_NETWORK_TYPES.HOURGLASS,
174
+ order=-1,
175
+ verbose=False,
176
+ top_n=10,
177
+ )
178
+
179
+
180
+ def test_add_vertices_uri_urls(sbml_dfs):
181
+ """
182
+ Test add_vertices_uri_urls function.
183
+
184
+ This test verifies that the function correctly adds URI URLs to vertices
185
+ DataFrame for both species and reactions.
186
+ """
187
+
188
+ # Get real species and reaction names from the sbml_dfs fixture
189
+ real_species = sbml_dfs.compartmentalized_species.index[0] # Get first species
190
+ real_reaction = sbml_dfs.reactions.index[0] # Get first reaction
191
+
192
+ # Create a test vertices DataFrame with real species and reactions
193
+ test_vertices = pd.DataFrame(
194
+ {
195
+ NAPISTU_GRAPH_VERTICES.NAME: [real_species, real_reaction],
196
+ NAPISTU_GRAPH_VERTICES.NODE_TYPE: [
197
+ NAPISTU_GRAPH_NODE_TYPES.SPECIES,
198
+ NAPISTU_GRAPH_NODE_TYPES.REACTION,
199
+ ],
200
+ }
201
+ )
202
+
203
+ # Test basic functionality
204
+ result = neighborhoods.add_vertices_uri_urls(test_vertices, sbml_dfs)
205
+
206
+ # Verify basic structure
207
+ assert isinstance(result, pd.DataFrame)
208
+ assert result.shape[0] == test_vertices.shape[0] # Same number of rows
209
+ assert result.shape[1] >= test_vertices.shape[1] # At least same number of columns
210
+
211
+ # Verify original columns are preserved
212
+ for col in test_vertices.columns:
213
+ assert col in result.columns
214
+ assert all(result[col] == test_vertices[col])
215
+
216
+ # Verify species vertices have s_id column
217
+ species_vertices = result[
218
+ result[NAPISTU_GRAPH_VERTICES.NODE_TYPE] == NAPISTU_GRAPH_NODE_TYPES.SPECIES
219
+ ]
220
+ assert SBML_DFS.S_ID in species_vertices.columns
221
+
222
+ # Test error handling
223
+ import pytest
224
+
225
+ # Test with empty DataFrame
226
+ empty_vertices = pd.DataFrame(
227
+ columns=[NAPISTU_GRAPH_VERTICES.NAME, NAPISTU_GRAPH_VERTICES.NODE_TYPE]
228
+ )
229
+ with pytest.raises(ValueError, match="vertices must have at least one row"):
230
+ neighborhoods.add_vertices_uri_urls(empty_vertices, sbml_dfs)
@@ -2,12 +2,17 @@ import pandas as pd
2
2
 
3
3
  from napistu.network import paths
4
4
  from napistu.network import ng_utils
5
+ from napistu.constants import SBML_DFS
6
+ from napistu.network.constants import (
7
+ NAPISTU_GRAPH_EDGES,
8
+ NET_POLARITY,
9
+ )
5
10
 
6
11
 
7
12
  def test_shortest_paths(sbml_dfs, napistu_graph, napistu_graph_undirected):
8
13
  species = sbml_dfs.species
9
- source_species = species[species["s_name"] == "NADH"]
10
- dest_species = species[species["s_name"] == "NAD+"]
14
+ source_species = species[species[SBML_DFS.S_NAME] == "NADH"]
15
+ dest_species = species[species[SBML_DFS.S_NAME] == "NAD+"]
11
16
  target_species_paths = ng_utils.compartmentalize_species_pairs(
12
17
  sbml_dfs, source_species.index.tolist(), dest_species.index.tolist()
13
18
  )
@@ -18,17 +23,23 @@ def test_shortest_paths(sbml_dfs, napistu_graph, napistu_graph_undirected):
18
23
  _,
19
24
  _,
20
25
  ) = paths.find_all_shortest_reaction_paths(
21
- napistu_graph, sbml_dfs, target_species_paths, weight_var="weights"
26
+ napistu_graph,
27
+ sbml_dfs,
28
+ target_species_paths,
29
+ weight_var=NAPISTU_GRAPH_EDGES.WEIGHTS,
22
30
  )
23
31
 
24
32
  # undirected graph
25
33
  (
26
34
  all_shortest_reaction_paths_df,
27
- all_shortest_reaction_path_edges_df,
28
- edge_sources,
29
- paths_graph,
35
+ _,
36
+ _,
37
+ _,
30
38
  ) = paths.find_all_shortest_reaction_paths(
31
- napistu_graph_undirected, sbml_dfs, target_species_paths, weight_var="weights"
39
+ napistu_graph_undirected,
40
+ sbml_dfs,
41
+ target_species_paths,
42
+ weight_var=NAPISTU_GRAPH_EDGES.WEIGHTS,
32
43
  )
33
44
 
34
45
  assert all_shortest_reaction_paths_df.shape[0] == 3
@@ -36,21 +47,34 @@ def test_shortest_paths(sbml_dfs, napistu_graph, napistu_graph_undirected):
36
47
 
37
48
  def test_net_polarity():
38
49
  polarity_series = pd.Series(
39
- ["ambiguous", "ambiguous"], index=[0, 1], name="link_polarity"
50
+ [NET_POLARITY.AMBIGUOUS, NET_POLARITY.AMBIGUOUS],
51
+ index=[0, 1],
52
+ name=NET_POLARITY.LINK_POLARITY,
40
53
  )
41
54
  assert all(
42
- [x == "ambiguous" for x in paths._calculate_net_polarity(polarity_series)]
55
+ [
56
+ x == NET_POLARITY.AMBIGUOUS
57
+ for x in paths._calculate_net_polarity(polarity_series)
58
+ ]
43
59
  )
44
60
 
45
61
  polarity_series = pd.Series(
46
- ["activation", "inhibition", "inhibition", "ambiguous"],
62
+ [
63
+ NET_POLARITY.ACTIVATION,
64
+ NET_POLARITY.INHIBITION,
65
+ NET_POLARITY.INHIBITION,
66
+ NET_POLARITY.AMBIGUOUS,
67
+ ],
47
68
  index=range(0, 4),
48
- name="link_polarity",
69
+ name=NET_POLARITY.LINK_POLARITY,
49
70
  )
50
71
  assert paths._calculate_net_polarity(polarity_series) == [
51
- "activation",
52
- "inhibition",
53
- "activation",
54
- "ambiguous activation",
72
+ NET_POLARITY.ACTIVATION,
73
+ NET_POLARITY.INHIBITION,
74
+ NET_POLARITY.ACTIVATION,
75
+ NET_POLARITY.AMBIGUOUS_ACTIVATION,
55
76
  ]
56
- assert paths._terminal_net_polarity(polarity_series) == "ambiguous activation"
77
+ assert (
78
+ paths._terminal_net_polarity(polarity_series)
79
+ == NET_POLARITY.AMBIGUOUS_ACTIVATION
80
+ )
@@ -14,6 +14,12 @@ from napistu.network import net_create
14
14
  from napistu.network import paths
15
15
  from napistu.network import precompute
16
16
 
17
+ from napistu.network.constants import (
18
+ NAPISTU_GRAPH_VERTICES,
19
+ DISTANCES,
20
+ NEIGHBORHOOD_NETWORK_TYPES,
21
+ )
22
+
17
23
  test_path = os.path.abspath(os.path.join(__file__, os.pardir))
18
24
  sbml_path = os.path.join(test_path, "test_data", "reactome_glucose_metabolism.sbml")
19
25
  if not os.path.isfile(sbml_path):
@@ -138,6 +144,7 @@ def test_precomputed_distances_shortest_paths():
138
144
 
139
145
 
140
146
  def test_precomputed_distances_neighborhoods():
147
+
141
148
  compartmentalized_species = sbml_dfs.compartmentalized_species[
142
149
  sbml_dfs.compartmentalized_species["s_id"] == "S00000000"
143
150
  ].index.tolist()
@@ -169,8 +176,12 @@ def test_precomputed_distances_neighborhoods():
169
176
  pruned_vert_otf = pruned_neighborhoods_otf[key]["vertices"]
170
177
  pruned_vert_precomp = pruned_neighborhoods_precomputed[key]["vertices"]
171
178
 
172
- join_key = ["name", "node_name", "node_orientation"]
173
- join_key_w_vars = [*join_key, *["path_weight", "path_length"]]
179
+ join_key = [
180
+ NAPISTU_GRAPH_VERTICES.NAME,
181
+ NAPISTU_GRAPH_VERTICES.NODE_NAME,
182
+ "node_orientation",
183
+ ]
184
+ join_key_w_vars = [*join_key, *[DISTANCES.PATH_WEIGHTS, DISTANCES.PATH_LENGTH]]
174
185
  neighbor_comparison = (
175
186
  pruned_vert_precomp[join_key_w_vars]
176
187
  .assign(in_precompute=True)
@@ -197,23 +208,27 @@ def test_precomputed_distances_neighborhoods():
197
208
  # which should be the same if we are pre-selecting the correct neighbors
198
209
  # as part of _precompute_neighbors()
199
210
  downstream_disagreement_w_precompute = (
200
- comparison_df[comparison_df["node_orientation"] == "downstream"]
211
+ comparison_df[
212
+ comparison_df["node_orientation"] == NEIGHBORHOOD_NETWORK_TYPES.DOWNSTREAM
213
+ ]
201
214
  .merge(
202
215
  precomputed_distances,
203
- left_on=["focal_sc_id", "name"],
204
- right_on=["sc_id_origin", "sc_id_dest"],
216
+ left_on=["focal_sc_id", NAPISTU_GRAPH_VERTICES.NAME],
217
+ right_on=[DISTANCES.SC_ID_ORIGIN, DISTANCES.SC_ID_DEST],
205
218
  )
206
- .query("abs(path_weight_x - path_weights) > 1e-13")
219
+ .query("abs(path_weights_x - path_weights) > 1e-13")
207
220
  )
208
221
 
209
222
  upstream_disagreement_w_precompute = (
210
- comparison_df[comparison_df["node_orientation"] == "upstream"]
223
+ comparison_df[
224
+ comparison_df["node_orientation"] == NEIGHBORHOOD_NETWORK_TYPES.UPSTREAM
225
+ ]
211
226
  .merge(
212
227
  precomputed_distances,
213
- left_on=["focal_sc_id", "name"],
214
- right_on=["sc_id_dest", "sc_id_origin"],
228
+ left_on=["focal_sc_id", NAPISTU_GRAPH_VERTICES.NAME],
229
+ right_on=[DISTANCES.SC_ID_DEST, DISTANCES.SC_ID_ORIGIN],
215
230
  )
216
- .query("abs(path_weight_x - path_upstream_weights) > 1e-13")
231
+ .query("abs(path_weights_x - path_upstream_weights) > 1e-13")
217
232
  )
218
233
 
219
234
  assert downstream_disagreement_w_precompute.shape[0] == 0