napistu 0.1.0__py3-none-any.whl → 0.2.4.dev2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. napistu/__init__.py +1 -1
  2. napistu/consensus.py +1010 -513
  3. napistu/constants.py +24 -0
  4. napistu/gcs/constants.py +2 -2
  5. napistu/gcs/downloads.py +57 -25
  6. napistu/gcs/utils.py +21 -0
  7. napistu/identifiers.py +105 -6
  8. napistu/ingestion/constants.py +0 -1
  9. napistu/ingestion/obo.py +24 -8
  10. napistu/ingestion/psi_mi.py +20 -5
  11. napistu/ingestion/reactome.py +8 -32
  12. napistu/mcp/__init__.py +69 -0
  13. napistu/mcp/__main__.py +180 -0
  14. napistu/mcp/codebase.py +182 -0
  15. napistu/mcp/codebase_utils.py +298 -0
  16. napistu/mcp/constants.py +72 -0
  17. napistu/mcp/documentation.py +166 -0
  18. napistu/mcp/documentation_utils.py +235 -0
  19. napistu/mcp/execution.py +382 -0
  20. napistu/mcp/profiles.py +73 -0
  21. napistu/mcp/server.py +86 -0
  22. napistu/mcp/tutorials.py +124 -0
  23. napistu/mcp/tutorials_utils.py +230 -0
  24. napistu/mcp/utils.py +47 -0
  25. napistu/mechanism_matching.py +782 -26
  26. napistu/modify/constants.py +41 -0
  27. napistu/modify/curation.py +4 -1
  28. napistu/modify/gaps.py +243 -156
  29. napistu/modify/pathwayannot.py +26 -8
  30. napistu/network/neighborhoods.py +16 -7
  31. napistu/network/net_create.py +209 -54
  32. napistu/network/net_propagation.py +118 -0
  33. napistu/network/net_utils.py +1 -32
  34. napistu/rpy2/netcontextr.py +10 -7
  35. napistu/rpy2/rids.py +7 -5
  36. napistu/sbml_dfs_core.py +46 -29
  37. napistu/sbml_dfs_utils.py +37 -1
  38. napistu/source.py +8 -2
  39. napistu/utils.py +67 -8
  40. napistu-0.2.4.dev2.dist-info/METADATA +84 -0
  41. napistu-0.2.4.dev2.dist-info/RECORD +95 -0
  42. {napistu-0.1.0.dist-info → napistu-0.2.4.dev2.dist-info}/WHEEL +1 -1
  43. tests/conftest.py +11 -5
  44. tests/test_consensus.py +4 -1
  45. tests/test_gaps.py +127 -0
  46. tests/test_gcs.py +3 -2
  47. tests/test_igraph.py +14 -0
  48. tests/test_mcp_documentation_utils.py +13 -0
  49. tests/test_mechanism_matching.py +658 -0
  50. tests/test_net_propagation.py +89 -0
  51. tests/test_net_utils.py +83 -0
  52. tests/test_sbml.py +2 -0
  53. tests/{test_sbml_dfs_create.py → test_sbml_dfs_core.py} +68 -4
  54. tests/test_utils.py +81 -0
  55. napistu-0.1.0.dist-info/METADATA +0 -56
  56. napistu-0.1.0.dist-info/RECORD +0 -77
  57. {napistu-0.1.0.dist-info → napistu-0.2.4.dev2.dist-info}/entry_points.txt +0 -0
  58. {napistu-0.1.0.dist-info → napistu-0.2.4.dev2.dist-info}/licenses/LICENSE +0 -0
  59. {napistu-0.1.0.dist-info → napistu-0.2.4.dev2.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,89 @@
1
+ import pytest
2
+ import igraph as ig
3
+ import numpy as np
4
+ from napistu.network.net_propagation import personalized_pagerank_by_attribute
5
+
6
+
7
+ def test_personalized_pagerank_by_attribute_basic():
8
+ g = ig.Graph.Full(3)
9
+ g.vs["name"] = ["A", "B", "C"]
10
+ g.vs["score"] = [1, 0, 2]
11
+ df = personalized_pagerank_by_attribute(g, "score")
12
+ assert set(df.columns) == {
13
+ "name",
14
+ "pagerank_by_attribute",
15
+ "pagerank_uniform",
16
+ "score",
17
+ }
18
+ assert np.isclose(df["score"].sum(), 3)
19
+ assert np.isclose(df["pagerank_by_attribute"].sum(), 1)
20
+ assert np.isclose(df["pagerank_uniform"].sum(), 1)
21
+ # Uniform should only include A and C
22
+ assert df.loc[df["name"] == "B", "pagerank_uniform"].values[0] > 0
23
+
24
+
25
+ def test_personalized_pagerank_by_attribute_no_uniform():
26
+ g = ig.Graph.Full(3)
27
+ g.vs["score"] = [1, 0, 2]
28
+ df = personalized_pagerank_by_attribute(g, "score", calculate_uniform_dist=False)
29
+ assert "pagerank_uniform" not in df.columns
30
+ assert np.isclose(df["pagerank_by_attribute"].sum(), 1)
31
+
32
+
33
+ def test_personalized_pagerank_by_attribute_missing_and_negative():
34
+ g = ig.Graph.Full(3)
35
+ g.vs["score"] = [1, None, 2]
36
+ # None should be treated as 0
37
+ df = personalized_pagerank_by_attribute(g, "score")
38
+ assert np.isclose(df["score"].sum(), 3)
39
+ # Negative values should raise
40
+ g.vs["score"] = [1, -1, 2]
41
+ with pytest.raises(ValueError):
42
+ personalized_pagerank_by_attribute(g, "score")
43
+
44
+
45
+ def test_personalized_pagerank_by_attribute_additional_args_directed():
46
+ # create an asymmetric directed graph to test whether additional_propagation_args is respected
47
+ g = ig.Graph(directed=True)
48
+ g.add_vertices(3)
49
+ g.add_edges([(0, 1), (1, 2)])
50
+ g.vs["score"] = [1, 0, 2]
51
+ # Run with directed=False, which should treat the graph as undirected
52
+ df_directed = personalized_pagerank_by_attribute(
53
+ g, "score", additional_propagation_args={"directed": True}
54
+ )
55
+ df_undirected = personalized_pagerank_by_attribute(
56
+ g, "score", additional_propagation_args={"directed": False}
57
+ )
58
+ # The results should differ for directed vs undirected
59
+ assert not np.allclose(
60
+ df_directed["pagerank_by_attribute"], df_undirected["pagerank_by_attribute"]
61
+ )
62
+ # Uniform should also be affected
63
+ assert not np.allclose(
64
+ df_directed["pagerank_uniform"], df_undirected["pagerank_uniform"]
65
+ )
66
+
67
+
68
+ def test_personalized_pagerank_by_attribute_additional_args_invalid():
69
+ g = ig.Graph.Full(3)
70
+ g.vs["score"] = [1, 0, 2]
71
+ # Passing an invalid argument should raise ValueError
72
+ with pytest.raises(ValueError):
73
+ personalized_pagerank_by_attribute(
74
+ g, "score", additional_propagation_args={"not_a_real_arg": 123}
75
+ )
76
+
77
+
78
+ def test_personalized_pagerank_by_attribute_all_missing():
79
+ g = ig.Graph.Full(3)
80
+ # No 'score' attribute at all
81
+ with pytest.raises(ValueError, match="missing for all vertices"):
82
+ personalized_pagerank_by_attribute(g, "score")
83
+
84
+
85
+ def test_personalized_pagerank_by_attribute_all_zero():
86
+ g = ig.Graph.Full(3)
87
+ g.vs["score"] = [0, 0, 0]
88
+ with pytest.raises(ValueError, match="zero for all vertices"):
89
+ personalized_pagerank_by_attribute(g, "score")
tests/test_net_utils.py CHANGED
@@ -3,6 +3,7 @@ from __future__ import annotations
3
3
  import pytest
4
4
 
5
5
  import igraph as ig
6
+ import numpy as np
6
7
  import pandas as pd
7
8
  from napistu.network import net_utils
8
9
  from napistu.network import net_create
@@ -56,6 +57,85 @@ def test_validate_graph_attributes(sbml_dfs):
56
57
  net_utils._validate_vertex_attributes(cpr_graph, "baz")
57
58
 
58
59
 
60
+ def test_pluck_entity_data_species_identity(sbml_dfs):
61
+ # Take first 10 species IDs
62
+ species_ids = sbml_dfs.species.index[:10]
63
+ # Create mock data with explicit dtype to ensure cross-platform consistency
64
+ # Fix for issue-42: Use explicit dtypes to avoid platform-specific dtype differences
65
+ # between Windows (int32) and macOS/Linux (int64)
66
+ mock_df = pd.DataFrame(
67
+ {
68
+ "string_col": [f"str_{i}" for i in range(10)],
69
+ "mixed_col": np.arange(-5, 5, dtype=np.int64), # Explicitly use int64
70
+ "ones_col": np.ones(10, dtype=np.float64), # Explicitly use float64
71
+ "squared_col": np.arange(10, dtype=np.int64), # Explicitly use int64
72
+ },
73
+ index=species_ids,
74
+ )
75
+ # Assign to species_data
76
+ sbml_dfs.species_data["mock_table"] = mock_df
77
+
78
+ # Custom transformation: square
79
+ def square(x):
80
+ return x**2
81
+
82
+ custom_transformations = {"square": square}
83
+ # Create graph_attrs for species
84
+ graph_attrs = {
85
+ "species": {
86
+ "string_col": {
87
+ "table": "mock_table",
88
+ "variable": "string_col",
89
+ "trans": "identity",
90
+ },
91
+ "mixed_col": {
92
+ "table": "mock_table",
93
+ "variable": "mixed_col",
94
+ "trans": "identity",
95
+ },
96
+ "ones_col": {
97
+ "table": "mock_table",
98
+ "variable": "ones_col",
99
+ "trans": "identity",
100
+ },
101
+ "squared_col": {
102
+ "table": "mock_table",
103
+ "variable": "squared_col",
104
+ "trans": "square",
105
+ },
106
+ }
107
+ }
108
+ # Call pluck_entity_data with custom transformation
109
+ result = net_create.pluck_entity_data(
110
+ sbml_dfs, graph_attrs, "species", custom_transformations=custom_transformations
111
+ )
112
+ # Check output
113
+ assert isinstance(result, pd.DataFrame)
114
+ assert set(result.columns) == {"string_col", "mixed_col", "ones_col", "squared_col"}
115
+ assert list(result.index) == list(species_ids)
116
+ # Check values
117
+ pd.testing.assert_series_equal(result["string_col"], mock_df["string_col"])
118
+ pd.testing.assert_series_equal(result["mixed_col"], mock_df["mixed_col"])
119
+ pd.testing.assert_series_equal(result["ones_col"], mock_df["ones_col"])
120
+ pd.testing.assert_series_equal(
121
+ result["squared_col"], mock_df["squared_col"].apply(square)
122
+ )
123
+
124
+
125
+ def test_pluck_entity_data_missing_species_key(sbml_dfs):
126
+ # graph_attrs does not contain 'species' key
127
+ graph_attrs = {}
128
+ result = net_create.pluck_entity_data(sbml_dfs, graph_attrs, "species")
129
+ assert result is None
130
+
131
+
132
+ def test_pluck_entity_data_empty_species_dict(sbml_dfs):
133
+ # graph_attrs contains 'species' key but value is empty dict
134
+ graph_attrs = {"species": {}}
135
+ result = net_create.pluck_entity_data(sbml_dfs, graph_attrs, "species")
136
+ assert result is None
137
+
138
+
59
139
  ################################################
60
140
  # __main__
61
141
  ################################################
@@ -64,3 +144,6 @@ if __name__ == "__main__":
64
144
  test_safe_fill()
65
145
  test_cpr_graph_to_pandas_dfs()
66
146
  test_validate_graph_attributes()
147
+ test_pluck_entity_data_species_identity()
148
+ test_pluck_entity_data_missing_species_key()
149
+ test_pluck_entity_data_empty_species_dict()
tests/test_sbml.py CHANGED
@@ -1,6 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import os
4
+ import pytest
4
5
 
5
6
  import pandas as pd
6
7
  from napistu import sbml_dfs_core
@@ -19,6 +20,7 @@ def test_sbml_dfs(sbml_path):
19
20
  assert type(dfs.get_identifiers("species")) is pd.DataFrame
20
21
 
21
22
 
23
+ @pytest.mark.skip_on_windows
22
24
  def test_adding_sbml_annotations(sbml_model):
23
25
  annotations = pd.DataFrame(
24
26
  [
@@ -2,12 +2,17 @@ from __future__ import annotations
2
2
 
3
3
  import os
4
4
 
5
+ import numpy as np
5
6
  import pandas as pd
6
7
  import pytest
7
8
  from napistu import sbml_dfs_core
8
9
  from napistu.ingestion import sbml
9
10
  from napistu.modify import pathwayannot
10
11
 
12
+ from napistu import identifiers as napistu_identifiers
13
+ from napistu.constants import SBML_DFS
14
+ from napistu.sbml_dfs_core import SBML_dfs
15
+
11
16
 
12
17
  def test_drop_cofactors(sbml_dfs):
13
18
  starting_rscs = sbml_dfs.reaction_species.shape[0]
@@ -238,7 +243,7 @@ def test_read_sbml_with_invalid_ids():
238
243
  def test_stubbed_compartment():
239
244
  compartment = sbml_dfs_core._stub_compartments()
240
245
 
241
- assert compartment["c_Identifiers"][0].ids[0] == {
246
+ assert compartment["c_Identifiers"].iloc[0].ids[0] == {
242
247
  "ontology": "go",
243
248
  "identifier": "GO:0005575",
244
249
  "url": "https://www.ebi.ac.uk/QuickGO/term/GO:0005575",
@@ -255,7 +260,7 @@ def test_get_table(sbml_dfs):
255
260
  sbml_dfs.get_table("foo", {"id"})
256
261
 
257
262
  # bad type
258
- with pytest.raises(AssertionError):
263
+ with pytest.raises(TypeError):
259
264
  sbml_dfs.get_table("reaction_species", "id")
260
265
 
261
266
  # reaction species don't have ids
@@ -303,5 +308,64 @@ def test_species_status(sbml_dfs):
303
308
  )
304
309
 
305
310
 
306
- # if __name__ == "__main__":
307
- # test_get_table()
311
+ def test_get_identifiers_handles_missing_values():
312
+
313
+ # Minimal DataFrame with all types
314
+ df = pd.DataFrame(
315
+ {
316
+ SBML_DFS.S_NAME: ["A", "B", "C", "D"],
317
+ SBML_DFS.S_IDENTIFIERS: [
318
+ napistu_identifiers.Identifiers([]),
319
+ None,
320
+ np.nan,
321
+ pd.NA,
322
+ ],
323
+ SBML_DFS.S_SOURCE: [None, None, None, None],
324
+ },
325
+ index=["s1", "s2", "s3", "s4"],
326
+ )
327
+ df.index.name = SBML_DFS.S_ID
328
+
329
+ sbml_dict = {
330
+ SBML_DFS.COMPARTMENTS: pd.DataFrame(
331
+ {
332
+ SBML_DFS.C_NAME: ["cytosol"],
333
+ SBML_DFS.C_IDENTIFIERS: [None],
334
+ SBML_DFS.C_SOURCE: [None],
335
+ },
336
+ index=["c1"],
337
+ ),
338
+ SBML_DFS.SPECIES: df,
339
+ SBML_DFS.COMPARTMENTALIZED_SPECIES: pd.DataFrame(
340
+ {
341
+ SBML_DFS.SC_NAME: ["A [cytosol]"],
342
+ SBML_DFS.S_ID: ["s1"],
343
+ SBML_DFS.C_ID: ["c1"],
344
+ SBML_DFS.SC_SOURCE: [None],
345
+ },
346
+ index=["sc1"],
347
+ ),
348
+ SBML_DFS.REACTIONS: pd.DataFrame(
349
+ {
350
+ SBML_DFS.R_NAME: [],
351
+ SBML_DFS.R_IDENTIFIERS: [],
352
+ SBML_DFS.R_SOURCE: [],
353
+ SBML_DFS.R_ISREVERSIBLE: [],
354
+ },
355
+ index=[],
356
+ ),
357
+ SBML_DFS.REACTION_SPECIES: pd.DataFrame(
358
+ {
359
+ SBML_DFS.R_ID: [],
360
+ SBML_DFS.SC_ID: [],
361
+ SBML_DFS.STOICHIOMETRY: [],
362
+ SBML_DFS.SBO_TERM: [],
363
+ },
364
+ index=[],
365
+ ),
366
+ }
367
+ sbml = SBML_dfs(sbml_dict, validate=False)
368
+ result = sbml.get_identifiers(SBML_DFS.SPECIES)
369
+ assert result.shape[0] == 0 or all(
370
+ result[SBML_DFS.S_ID] == "s1"
371
+ ), "Only Identifiers objects should be returned."
tests/test_utils.py CHANGED
@@ -16,6 +16,8 @@ from google.cloud import storage
16
16
  from pytest import fixture
17
17
  from testcontainers.core.container import DockerContainer
18
18
 
19
+ from napistu.utils import drop_extra_cols
20
+
19
21
 
20
22
  @fixture(scope="session")
21
23
  def gcs_storage():
@@ -485,3 +487,82 @@ def test_click_str_to_list():
485
487
  assert utils.click_str_to_list("['foo', bar]") == ["foo", "bar"]
486
488
  with pytest.raises(ValueError):
487
489
  utils.click_str_to_list("foo")
490
+
491
+
492
+ def test_drop_extra_cols():
493
+ """Test the _drop_extra_cols function for removing and reordering columns."""
494
+ # Setup test DataFrames
495
+ df_in = pd.DataFrame({"col1": [1, 2, 3], "col2": [4, 5, 6], "col3": [7, 8, 9]})
496
+
497
+ df_out = pd.DataFrame(
498
+ {
499
+ "col2": [10, 11, 12],
500
+ "col3": [13, 14, 15],
501
+ "col4": [16, 17, 18], # Extra column that should be dropped
502
+ "col1": [19, 20, 21], # Different order than df_in
503
+ }
504
+ )
505
+
506
+ # Test basic functionality without always_include
507
+ result = drop_extra_cols(df_in, df_out)
508
+
509
+ # Check that extra column was dropped
510
+ assert "col4" not in result.columns
511
+
512
+ # Check that columns are in the same order as df_in
513
+ assert list(result.columns) == list(df_in.columns)
514
+
515
+ # Check that values are preserved
516
+ pd.testing.assert_frame_equal(
517
+ result,
518
+ pd.DataFrame(
519
+ {"col1": [19, 20, 21], "col2": [10, 11, 12], "col3": [13, 14, 15]}
520
+ )[
521
+ list(df_in.columns)
522
+ ], # Ensure same column order
523
+ )
524
+
525
+ # Test with always_include
526
+ result_with_include = drop_extra_cols(df_in, df_out, always_include=["col4"])
527
+
528
+ # Check that col4 is retained and appears at the end
529
+ assert list(result_with_include.columns) == list(df_in.columns) + ["col4"]
530
+ assert result_with_include["col4"].equals(df_out["col4"])
531
+
532
+ # Test with always_include containing non-existent column
533
+ result_non_existent = drop_extra_cols(
534
+ df_in, df_out, always_include=["col4", "col5"]
535
+ )
536
+ assert list(result_non_existent.columns) == list(df_in.columns) + ["col4"]
537
+
538
+ # Test with always_include containing column from df_in
539
+ result_overlap = drop_extra_cols(df_in, df_out, always_include=["col1", "col4"])
540
+ assert list(result_overlap.columns) == list(df_in.columns) + ["col4"]
541
+
542
+ # Test with no overlapping columns but some in always_include
543
+ df_out_no_overlap = pd.DataFrame({"col4": [1, 2, 3], "col5": [4, 5, 6]})
544
+ result_no_overlap = drop_extra_cols(df_in, df_out_no_overlap)
545
+ assert result_no_overlap.empty
546
+ assert list(result_no_overlap.columns) == []
547
+
548
+ result_no_overlap_with_include = drop_extra_cols(
549
+ df_in, df_out_no_overlap, always_include=["col4"]
550
+ )
551
+ assert list(result_no_overlap_with_include.columns) == ["col4"]
552
+ assert result_no_overlap_with_include["col4"].equals(df_out_no_overlap["col4"])
553
+
554
+ # Test with subset of columns
555
+ df_out_subset = pd.DataFrame(
556
+ {"col1": [1, 2, 3], "col3": [7, 8, 9], "col4": [10, 11, 12]}
557
+ )
558
+ result_subset = drop_extra_cols(df_in, df_out_subset)
559
+ assert list(result_subset.columns) == ["col1", "col3"]
560
+ pd.testing.assert_frame_equal(result_subset, df_out_subset[["col1", "col3"]])
561
+
562
+ result_subset_with_include = drop_extra_cols(
563
+ df_in, df_out_subset, always_include=["col4"]
564
+ )
565
+ assert list(result_subset_with_include.columns) == ["col1", "col3", "col4"]
566
+ pd.testing.assert_frame_equal(
567
+ result_subset_with_include, df_out_subset[["col1", "col3", "col4"]]
568
+ )
@@ -1,56 +0,0 @@
1
- Metadata-Version: 2.4
2
- Name: napistu
3
- Version: 0.1.0
4
- Summary: Connecting high-dimensional data to curated pathways
5
- Home-page: https://github.com/napistu/napistu-py
6
- Author: Sean Hackett
7
- Author-email: seanmchackett@gmail.com
8
- Project-URL: Bug Tracker, https://github.com/napistu/napistu-py/issues
9
- Classifier: License :: OSI Approved :: MIT License
10
- Classifier: Operating System :: OS Independent
11
- Classifier: Programming Language :: Python :: 3
12
- Classifier: Programming Language :: Python :: 3 :: Only
13
- Requires-Python: >=3.11
14
- Description-Content-Type: text/markdown
15
- License-File: LICENSE
16
- Requires-Dist: Jinja2
17
- Requires-Dist: PyYAML==6.*
18
- Requires-Dist: click==8.*
19
- Requires-Dist: click-logging
20
- Requires-Dist: fs==2.4.*
21
- Requires-Dist: fs-gcsfs==1.5.*
22
- Requires-Dist: igraph
23
- Requires-Dist: matplotlib==3.*
24
- Requires-Dist: numpy==1.26.*
25
- Requires-Dist: pandas==1.5.*
26
- Requires-Dist: pydantic==2.*
27
- Requires-Dist: python-libsbml
28
- Requires-Dist: requests>=2
29
- Requires-Dist: scipy==1.14.*
30
- Requires-Dist: tqdm
31
- Requires-Dist: zeep==3.*
32
- Provides-Extra: dev
33
- Requires-Dist: black==25.*; extra == "dev"
34
- Requires-Dist: ipykernel; extra == "dev"
35
- Requires-Dist: pre-commit==3.3.*; extra == "dev"
36
- Requires-Dist: pytest==7.*; extra == "dev"
37
- Requires-Dist: pytest-cov; extra == "dev"
38
- Requires-Dist: ruff; extra == "dev"
39
- Requires-Dist: testcontainers; extra == "dev"
40
- Provides-Extra: rpy2
41
- Requires-Dist: pyarrow==18.0.0; extra == "rpy2"
42
- Requires-Dist: rpy2==3.5.*; extra == "rpy2"
43
- Requires-Dist: rpy2-arrow==0.1.1; extra == "rpy2"
44
- Dynamic: license-file
45
-
46
- # Napistu Python Library
47
-
48
- This Python package hosts the majority of the algorithmic code for the [Napistu project](https://github.com/napistu/napistu).
49
-
50
- ## Setup
51
-
52
- Currently the only way to use this repository is to clone the repo and perform a local install. e.g., from this directory:
53
-
54
- ```bash
55
- pip install .
56
- ```
@@ -1,77 +0,0 @@
1
- napistu/__init__.py,sha256=rz6NdV9Fm6a6bBR17VQPHeJQD4DUZWC7zR9a7nNMOhw,269
2
- napistu/__main__.py,sha256=i1OyReHD58GjyGYShXmMuBfA0VoGBF9dirg2nA4JCa8,28334
3
- napistu/consensus.py,sha256=p8GjWFzq1cvBB-H-LVSatWL_9fxbdYq2wsF4-JZnc_M,54641
4
- napistu/constants.py,sha256=hQ1OLH07xFTxMukJLCptzqqHk22vgrByej8lvMb2qbc,14702
5
- napistu/identifiers.py,sha256=wque0qsMZK2AMsAhkF1ERSMrEF7h6b5SMp3iqVu8e1o,28796
6
- napistu/indices.py,sha256=UeJjjsYs0sGvZIKz1y4ZQ6aUkABn-6TCUDZ2VCVT9JI,7534
7
- napistu/mechanism_matching.py,sha256=CPhtM6GERmGlBK8zH1cEvSpsasa0mG7ojLKDOze3dyE,21704
8
- napistu/sbml_dfs_core.py,sha256=iSng-3cpJVpVMb340YGM6s0pFBTA9SyYCdYvHdZRSMA,79387
9
- napistu/sbml_dfs_utils.py,sha256=j6Bu3acqOFSEbyVzASXhlnV8hQvi4k-vdMYzVMPzz5A,10318
10
- napistu/source.py,sha256=oBgw2OZLVBETQG8Mwoc5ZUe-6cg_Yt6Mxsto3fCdw1k,13386
11
- napistu/utils.py,sha256=G4IfG_WBSxkn5RBdPPn9sAkzrz2BdKgXlFFmMsB9wsA,28038
12
- napistu/gcs/__init__.py,sha256=1kqmRHlEyI7VpILzurZb1URwC_UIc1PTMEBHQnjXW6s,246
13
- napistu/gcs/constants.py,sha256=rc-oQBh6pdu7cjqTCerHG_fDub-FQcEjzWh2ic715cs,2844
14
- napistu/gcs/downloads.py,sha256=EiOxLW1MMexdPTSiakWknTB-BGY1y__s2n1z9Sd8VYM,5033
15
- napistu/ingestion/__init__.py,sha256=1kqmRHlEyI7VpILzurZb1URwC_UIc1PTMEBHQnjXW6s,246
16
- napistu/ingestion/bigg.py,sha256=XPJZv64mrIMCuKe1mjQfS5QPR9tmengGvndSjc3QFLA,5559
17
- napistu/ingestion/constants.py,sha256=TYATiVNrLyuQ1AvLVt35F1xQ8pQ3U19o_N6ZSkdW3PA,9941
18
- napistu/ingestion/cpr_edgelist.py,sha256=eVT9M7gmdBuGHcAYlvkD_zzvTtyzXufKWjwDiT8OxF4,3572
19
- napistu/ingestion/identifiers_etl.py,sha256=6ppDUA6lEZurdmVbiFLOUzphYbr-hndMhtqsQnq_yAc,5009
20
- napistu/ingestion/obo.py,sha256=pszLLfImZxDYjL3WQUCow2hQFURROGHqIq3qbgVtzAM,8836
21
- napistu/ingestion/psi_mi.py,sha256=Icj73EK75ytFPBw-TH2B6yW1ZWAmckmn5mtPl9pIxuA,9389
22
- napistu/ingestion/reactome.py,sha256=-Q3GsAsfVkTD7cDD1fLEEnWQbI6vs7nxsdYInk7ZvVE,7907
23
- napistu/ingestion/sbml.py,sha256=gK6_jHgo6oaiG16WlrbBSvxq_0VzFR4a5fG9IQrp5bU,24153
24
- napistu/ingestion/string.py,sha256=tsaHrjppgFbl9NnRcB2DytpoontqrpfQL65zD9HPgEM,11668
25
- napistu/ingestion/trrust.py,sha256=ccjZc_eF3PdxxurnukiEo_e0-aKc_3z22NYbaJBtHdY,9774
26
- napistu/ingestion/yeast.py,sha256=bwFBNxRq-dLDaddgBL1hpfZj0eQ56nBXyR_9n0NZT9Y,5233
27
- napistu/modify/__init__.py,sha256=1kqmRHlEyI7VpILzurZb1URwC_UIc1PTMEBHQnjXW6s,246
28
- napistu/modify/constants.py,sha256=KHigix_8A8kCLWYVGR9_6_n34UNDcq2guDLC1KLeNZ4,2648
29
- napistu/modify/curation.py,sha256=UNeAfJ26XDFvSwkPL8WHCAP0FQYiVUrSvJn3UIt5jy8,21607
30
- napistu/modify/gaps.py,sha256=XqwfvzgJywA7ws5hzDlj22xs5tRGc4xOdbQ2v51UJqc,23983
31
- napistu/modify/pathwayannot.py,sha256=onbQy9YNYPbeOih8fSxymxUQJc1jXjRIQOABv3xkvng,47183
32
- napistu/modify/uncompartmentalize.py,sha256=U5X4Q7Z-YIkC8_711x3sU21vTVdv9rKfauwz4JNzl6c,9690
33
- napistu/network/__init__.py,sha256=1kqmRHlEyI7VpILzurZb1URwC_UIc1PTMEBHQnjXW6s,246
34
- napistu/network/constants.py,sha256=jz8vRjgns74piUcvmoIP_f-8s9w15SxWAEw2lf6XmDY,3661
35
- napistu/network/neighborhoods.py,sha256=TopPpcUD09bAfJuT_L4dkHwJhV1VJJlzXpyzldYi85A,55512
36
- napistu/network/net_create.py,sha256=9Rb5I6uLlL50SNADucsT_90F9k7rzmp2EQfEgNlr37E,60343
37
- napistu/network/net_utils.py,sha256=cMWLOHlz4XvPA8PlPiSFYNDjAEv4t1qlZxwabBaZrK8,21188
38
- napistu/network/paths.py,sha256=a2J3JWIdMufdNs8Amh6I7s3TOVD2EzLV9khqbWHvGlA,16652
39
- napistu/network/precompute.py,sha256=83Vr2pxCmEtJJmE_Lq1BI-pEmESDNG0N7vByXjBf_oQ,7517
40
- napistu/rpy2/__init__.py,sha256=B9tZHiEp6bvysjqvBRQ1aGY493Ks9kouwb0pW7KsKqA,4100
41
- napistu/rpy2/callr.py,sha256=76ICWj7Jso-qrYLNfiV-DgPyrMTdRXz_EhyGOD9CbFM,4301
42
- napistu/rpy2/constants.py,sha256=JpJqsxImZis8fFFfePXYdbkhUZhXDZoHSHVf92w1h8U,2619
43
- napistu/rpy2/netcontextr.py,sha256=gkpBgrASNeH_8IjFyY-Tj-S87HjNOkGdfMta0WRdEnU,16278
44
- napistu/rpy2/rids.py,sha256=sGMTRuOQRDpHBHZwfTS7uKUW9TBI_yMpht6SFhup8vw,23937
45
- napistu-0.1.0.dist-info/licenses/LICENSE,sha256=kW8wVT__JWoHjl2BbbJDAZInWa9AxzJeR_uv6-i5x1g,1063
46
- tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
47
- tests/conftest.py,sha256=uDuqgZKR37csoUI4t4U-pzEoANMFAIyTA4k8XLRu_Bw,2572
48
- tests/test_consensus.py,sha256=u_5Hyocz2peNbJMd5ydgoAwQ4il0lFm-PrzrckTCitI,9403
49
- tests/test_constants.py,sha256=gJLDv7QMeeBiiupyMazj6mumk20KWvGMgm2myHMKKfc,531
50
- tests/test_curation.py,sha256=-Q2J0D7qs9PGjHZX-rM4NxRLLdwxoapytSo_98q9ItY,3864
51
- tests/test_edgelist.py,sha256=bdEtQJdd4MeQsNtng9afHYNVDsEy0U07sfVwguAdIBM,560
52
- tests/test_gcs.py,sha256=sq-zIDfmLIpZ5oFKCmyuaw9tfSzAY5hSnpuN-xqiqpk,561
53
- tests/test_identifiers.py,sha256=RyuPAMhYI8cDOl2r62idweLxgy7rAs9omeZQ62h56kY,5019
54
- tests/test_igraph.py,sha256=HoYeFAAFXWtxdonnUTpV59-jCLicNa0_utPaaGKXMAw,10926
55
- tests/test_indices.py,sha256=-TrKfX4qXsofg_TPQEhHaQc_CuQMEd4_0maJgGCgSfE,2468
56
- tests/test_mechanism_matching.py,sha256=gD_n2saM7yYa56QU0RMAYMKMAk7oF8ESbM7GHbI6bFY,4156
57
- tests/test_net_utils.py,sha256=4HqfFF6yycAz7oQYRz6MefzQVQ_ZjWpeUEA4lUDOMJc,1614
58
- tests/test_netcontextr.py,sha256=PKH0D-8EL0HNrCMtF-fAaYv5Lao4mwVPDZLQ5LHJXqc,3399
59
- tests/test_obo.py,sha256=47qNCElPzu2nA36Oq83Dqp1RGhITqztjl7UyZ5cMsj4,959
60
- tests/test_pathwayannot.py,sha256=bceosccNy9tgxQei_7j7ATBSSvBSxOngJvK-mAzR_K0,3312
61
- tests/test_precomputed_distances.py,sha256=ht7lVz0wGOOQl9UTI1o9ftm0Dk7q8E40UV2jxVmE-Tg,7203
62
- tests/test_rpy2.py,sha256=beihvGlWsQA9U7V3tfqBIOUL-S8m8Nj84Bg2Wt2sNH8,1491
63
- tests/test_sbml.py,sha256=w_VU06psAP0Ku3B0flbP4hKhBfx2ZWV3oOdUgWzrMP4,1276
64
- tests/test_sbml_dfs_create.py,sha256=w29mUcnC6g9Yqp8Q3b-oRQc5GiDvzjS5_GOE_LjwGZo,9982
65
- tests/test_sbml_dfs_utils.py,sha256=onFWdhrTix30XR1-CMrMXld37BYxEGi6TZrweugLDzI,505
66
- tests/test_sbo.py,sha256=x_PENFaXYsrZIzOZu9cj_Wrej7i7SNGxgBYYvcigLs0,308
67
- tests/test_set_coverage.py,sha256=gM6Zl3MhVRHUi0_z0ISqpeXckWT8XdpXb58ipCoWyHU,1606
68
- tests/test_source.py,sha256=hT0IlpexR5zP0OhWl5BBaho9d1aCYQlFZLwRIRRnw_Y,1969
69
- tests/test_uncompartmentalize.py,sha256=nAk5kfAVLU9a2VWe2x2HYVcKqj-EnwmwddERIPRax8c,1289
70
- tests/test_utils.py,sha256=knOWMN9xgaNLDj_4T_ZI3f22p1ZqovRLVDBFaMhOnFs,14845
71
- tests/utils.py,sha256=SoWQ_5roJteFGcMaOeEiQ5ucwq3Z2Fa3AAs9iXHTsJY,749
72
- tests/test_data/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
73
- napistu-0.1.0.dist-info/METADATA,sha256=bFJYn_d8Q0WfF5fpnE5tRZAqU3jNHuxXV0xyI-bt0yk,1830
74
- napistu-0.1.0.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
75
- napistu-0.1.0.dist-info/entry_points.txt,sha256=_QnaPOvJNA3IltxmZgWIiBoen-L1bPYX18YQfC7oJgQ,41
76
- napistu-0.1.0.dist-info/top_level.txt,sha256=Gpvk0a_PjrtqhYcQ9IDr3zR5LqpZ-uIHidQMIpjlvhY,14
77
- napistu-0.1.0.dist-info/RECORD,,