pyobo 0.11.0__py3-none-any.whl → 0.11.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. pyobo/constants.py +1 -0
  2. pyobo/gilda_utils.py +14 -11
  3. pyobo/obographs.py +5 -2
  4. pyobo/resources/so.py +55 -0
  5. pyobo/resources/so.tsv +2604 -0
  6. pyobo/sources/complexportal.py +54 -15
  7. pyobo/sources/dictybase_gene.py +14 -9
  8. pyobo/sources/drugcentral.py +4 -1
  9. pyobo/sources/expasy.py +22 -4
  10. pyobo/sources/flybase.py +3 -2
  11. pyobo/sources/hgnc.py +24 -19
  12. pyobo/sources/hgncgenefamily.py +7 -7
  13. pyobo/sources/kegg/genome.py +18 -6
  14. pyobo/sources/mirbase.py +9 -3
  15. pyobo/sources/npass.py +1 -1
  16. pyobo/sources/pathbank.py +32 -23
  17. pyobo/sources/pombase.py +6 -3
  18. pyobo/sources/reactome.py +28 -7
  19. pyobo/sources/rgd.py +1 -1
  20. pyobo/sources/slm.py +28 -14
  21. pyobo/sources/uniprot/uniprot.py +7 -6
  22. pyobo/sources/zfin.py +18 -6
  23. pyobo/struct/reference.py +9 -8
  24. pyobo/struct/struct.py +30 -20
  25. pyobo/struct/typedef.py +5 -0
  26. pyobo/version.py +1 -1
  27. {pyobo-0.11.0.dist-info → pyobo-0.11.2.dist-info}/METADATA +50 -62
  28. {pyobo-0.11.0.dist-info → pyobo-0.11.2.dist-info}/RECORD +31 -45
  29. {pyobo-0.11.0.dist-info → pyobo-0.11.2.dist-info}/WHEEL +1 -1
  30. pyobo/apps/__init__.py +0 -3
  31. pyobo/apps/cli.py +0 -24
  32. pyobo/apps/gilda/__init__.py +0 -3
  33. pyobo/apps/gilda/__main__.py +0 -8
  34. pyobo/apps/gilda/app.py +0 -48
  35. pyobo/apps/gilda/cli.py +0 -36
  36. pyobo/apps/gilda/templates/base.html +0 -33
  37. pyobo/apps/gilda/templates/home.html +0 -11
  38. pyobo/apps/gilda/templates/matches.html +0 -32
  39. pyobo/apps/mapper/__init__.py +0 -3
  40. pyobo/apps/mapper/__main__.py +0 -11
  41. pyobo/apps/mapper/cli.py +0 -37
  42. pyobo/apps/mapper/mapper.py +0 -187
  43. pyobo/apps/mapper/templates/base.html +0 -35
  44. pyobo/apps/mapper/templates/mapper_home.html +0 -64
  45. pyobo-0.11.0.dist-info/LICENSE +0 -21
  46. {pyobo-0.11.0.dist-info → pyobo-0.11.2.dist-info}/entry_points.txt +0 -0
  47. {pyobo-0.11.0.dist-info → pyobo-0.11.2.dist-info}/top_level.txt +0 -0
pyobo/sources/rgd.py CHANGED
@@ -137,7 +137,7 @@ def get_terms(force: bool = False, version: Optional[str] = None) -> Iterable[Te
137
137
  continue
138
138
  if prefix == "uniprot":
139
139
  term.append_relationship(
140
- has_gene_product, Reference.auto(prefix=prefix, identifier=xref_id)
140
+ has_gene_product, Reference(prefix=prefix, identifier=xref_id)
141
141
  )
142
142
  elif prefix == "ensembl":
143
143
  if xref_id.startswith("ENSMUSG") or xref_id.startswith("ENSRNOG"):
pyobo/sources/slm.py CHANGED
@@ -77,10 +77,10 @@ def iter_terms(version: str, force: bool = False):
77
77
  smiles,
78
78
  inchi,
79
79
  inchikey,
80
- chebi_id,
81
- lipidmaps_id,
82
- hmdb_id,
83
- pmids,
80
+ chebi_ids,
81
+ lipidmaps_ids,
82
+ hmdb_ids,
83
+ pubmed_ids,
84
84
  ) in tqdm(
85
85
  df[COLUMNS].values, desc=f"[{PREFIX}] generating terms", unit_scale=True, unit="lipid"
86
86
  ):
@@ -103,21 +103,35 @@ def iter_terms(version: str, force: bool = False):
103
103
  inchi = inchi[len("InChI=") :]
104
104
  term.append_property(has_inchi, inchi)
105
105
  if pd.notna(inchikey):
106
- if inchikey.startswith("InChIKey="):
107
- inchikey = inchikey[len("InChIKey=") :]
108
- term.append_exact_match(Reference(prefix="inchikey", identifier=inchikey))
109
- if pd.notna(chebi_id):
110
- term.append_exact_match(("chebi", chebi_id))
111
- if pd.notna(lipidmaps_id):
106
+ inchikey = inchikey.removeprefix("InChIKey=").strip()
107
+ if inchikey and inchikey != "none":
108
+ try:
109
+ inchi_ref = Reference(prefix="inchikey", identifier=inchikey)
110
+ except ValueError:
111
+ tqdm.write(
112
+ f"[slm:{identifier}] had invalid inchikey reference: ({type(inchikey)}) {inchikey}"
113
+ )
114
+ else:
115
+ term.append_exact_match(inchi_ref)
116
+ for chebi_id in _split(chebi_ids):
117
+ term.append_xref(("chebi", chebi_id))
118
+ for lipidmaps_id in _split(lipidmaps_ids):
112
119
  term.append_exact_match(("lipidmaps", lipidmaps_id))
113
- if pd.notna(hmdb_id):
120
+ for hmdb_id in _split(hmdb_ids):
114
121
  term.append_exact_match(("hmdb", hmdb_id))
115
- if pd.notna(pmids):
116
- for pmid in pmids.split("|"):
117
- term.append_provenance(("pubmed", pmid))
122
+ for pubmed_id in _split(pubmed_ids):
123
+ term.append_provenance(("pubmed", pubmed_id))
118
124
  # TODO how to handle class, parents, and components?
119
125
  yield term
120
126
 
121
127
 
128
+ def _split(s: str) -> Iterable[str]:
129
+ if pd.notna(s):
130
+ for x in s.split("|"):
131
+ x = x.strip()
132
+ if x:
133
+ yield x
134
+
135
+
122
136
  if __name__ == "__main__":
123
137
  get_obo().write_default(write_obo=True, use_tqdm=True)
@@ -82,7 +82,7 @@ def iter_terms(version: Optional[str] = None) -> Iterable[Term]:
82
82
  pubmeds,
83
83
  pdbs,
84
84
  proteome,
85
- gene_id,
85
+ gene_ids,
86
86
  rhea_curies,
87
87
  go_components,
88
88
  go_functions,
@@ -94,13 +94,14 @@ def iter_terms(version: Optional[str] = None) -> Iterable[Term]:
94
94
  description = description.removeprefix("FUNCTION: ")
95
95
  term = Term(
96
96
  reference=Reference(prefix=PREFIX, identifier=uniprot_id, name=accession),
97
- definition=description or None,
97
+ # definition=description or None,
98
98
  )
99
99
  term.set_species(taxonomy_id)
100
- if gene_id:
101
- term.append_relationship(
102
- gene_product_of, Reference(prefix="ncbigene", identifier=gene_id)
103
- )
100
+ if gene_ids:
101
+ for gene_id in gene_ids.split(";"):
102
+ term.append_relationship(
103
+ gene_product_of, Reference(prefix="ncbigene", identifier=gene_id.strip())
104
+ )
104
105
 
105
106
  # TODO add type=Reference(prefix="xsd", identifier="boolean")
106
107
  term.append_property("reviewed", "true")
pyobo/sources/zfin.py CHANGED
@@ -7,6 +7,7 @@ from typing import Optional
7
7
 
8
8
  from tqdm.auto import tqdm
9
9
 
10
+ from pyobo.resources.so import get_so_name
10
11
  from pyobo.struct import (
11
12
  Obo,
12
13
  Reference,
@@ -113,7 +114,9 @@ def get_terms(force: bool = False, version: Optional[str] = None) -> Iterable[Te
113
114
  )
114
115
  df["sequence_ontology_id"] = df["sequence_ontology_id"].map(lambda x: x[len("SO:") :])
115
116
  so = {
116
- sequence_ontology_id: Reference.auto(prefix="SO", identifier=sequence_ontology_id)
117
+ sequence_ontology_id: Reference(
118
+ prefix="SO", identifier=sequence_ontology_id, name=get_so_name(sequence_ontology_id)
119
+ )
117
120
  for sequence_ontology_id in df["sequence_ontology_id"].unique()
118
121
  }
119
122
  for _, reference in sorted(so.items()):
@@ -135,17 +138,26 @@ def get_terms(force: bool = False, version: Optional[str] = None) -> Iterable[Te
135
138
  term.append_alt(alt_id)
136
139
  entrez_id = entrez_mappings.get(identifier)
137
140
  if entrez_id:
138
- term.append_exact_match(Reference(prefix="ncbigene", identifier=entrez_id))
141
+ try:
142
+ ncbigene_ref = Reference(prefix="ncbigene", identifier=entrez_id)
143
+ except ValueError:
144
+ tqdm.write(f"[zfin] invalid NCBI gene: {entrez_id}")
145
+ else:
146
+ term.append_exact_match(ncbigene_ref)
139
147
  for uniprot_id in uniprot_mappings.get(identifier, []):
140
- term.append_relationship(has_gene_product, Reference.auto("uniprot", uniprot_id))
148
+ term.append_relationship(
149
+ has_gene_product, Reference(prefix="uniprot", identifier=uniprot_id)
150
+ )
141
151
  for hgnc_id in human_orthologs.get(identifier, []):
142
- term.append_relationship(orthologous, Reference.auto("hgnc", hgnc_id))
152
+ term.append_relationship(orthologous, Reference(prefix="hgnc", identifier=hgnc_id))
143
153
  for mgi_curie in mouse_orthologs.get(identifier, []):
144
- mouse_ortholog = Reference.from_curie(mgi_curie, auto=True)
154
+ mouse_ortholog = Reference.from_curie(mgi_curie)
145
155
  if mouse_ortholog:
146
156
  term.append_relationship(orthologous, mouse_ortholog)
147
157
  for flybase_id in fly_orthologs.get(identifier, []):
148
- term.append_relationship(orthologous, Reference.auto("flybase", flybase_id))
158
+ term.append_relationship(
159
+ orthologous, Reference(prefix="flybase", identifier=flybase_id)
160
+ )
149
161
 
150
162
  yield term
151
163
 
pyobo/struct/reference.py CHANGED
@@ -8,6 +8,7 @@ from curies.api import ExpansionError
8
8
  from pydantic import Field, field_validator, model_validator
9
9
 
10
10
  from .utils import obo_escape
11
+ from ..constants import GLOBAL_CHECK_IDS
11
12
  from ..identifier_utils import normalize_curie
12
13
 
13
14
  __all__ = [
@@ -45,13 +46,13 @@ class Reference(curies.Reference):
45
46
  prefix, identifier = values.get("prefix"), values.get("identifier")
46
47
  if not prefix or not identifier:
47
48
  return values
48
- norm_prefix = bioregistry.normalize_prefix(prefix)
49
- if norm_prefix is None:
49
+ resource = bioregistry.get_resource(prefix)
50
+ if resource is None:
50
51
  raise ExpansionError(f"Unknown prefix: {prefix}")
51
- values["prefix"] = norm_prefix
52
- values["identifier"] = bioregistry.standardize_identifier(norm_prefix, identifier).strip()
53
- # if not bioregistry.is_valid_identifier(norm_prefix, values["identifier"]):
54
- # raise ValueError(f"non-standard identifier: {norm_prefix}:{norm_identifier}")
52
+ values["prefix"] = resource.prefix
53
+ values["identifier"] = resource.standardize_identifier(identifier)
54
+ if GLOBAL_CHECK_IDS and not resource.is_valid_identifier(values["identifier"]):
55
+ raise ValueError(f"non-standard identifier: {resource.prefix}:{values['identifier']}")
55
56
  return values
56
57
 
57
58
  @classmethod
@@ -60,7 +61,7 @@ class Reference(curies.Reference):
60
61
  from ..api import get_name
61
62
 
62
63
  name = get_name(prefix, identifier)
63
- return cls(prefix=prefix, identifier=identifier, name=name)
64
+ return cls.model_validate({"prefix": prefix, "identifier": identifier, "name": name})
64
65
 
65
66
  @property
66
67
  def bioregistry_link(self) -> str:
@@ -116,7 +117,7 @@ class Reference(curies.Reference):
116
117
  return None
117
118
  if name is None and auto:
118
119
  return cls.auto(prefix=prefix, identifier=identifier)
119
- return cls(prefix=prefix, identifier=identifier, name=name)
120
+ return cls.model_validate({"prefix": prefix, "identifier": identifier, "name": name})
120
121
 
121
122
  @property
122
123
  def _escaped_identifier(self):
pyobo/struct/struct.py CHANGED
@@ -4,6 +4,7 @@ import gzip
4
4
  import json
5
5
  import logging
6
6
  import os
7
+ import sys
7
8
  from collections import defaultdict
8
9
  from collections.abc import Collection, Iterable, Iterator, Mapping, Sequence
9
10
  from dataclasses import dataclass, field
@@ -603,6 +604,11 @@ class Obo:
603
604
 
604
605
  return graph_from_obo(self)
605
606
 
607
+ def write_obograph(self, path: Path) -> None:
608
+ """Write OBO Graph json."""
609
+ graph = self.get_graph()
610
+ path.write_text(graph.model_dump_json(indent=2, exclude_none=True, exclude_unset=True))
611
+
606
612
  @classmethod
607
613
  def cli(cls) -> None:
608
614
  """Run the CLI for this class."""
@@ -616,22 +622,31 @@ class Obo:
616
622
  @click.command()
617
623
  @verbose_option
618
624
  @force_option
625
+ @click.option("--rewrite", "-r", is_flag=True)
619
626
  @click.option("--owl", is_flag=True, help="Write OWL via ROBOT")
620
- @click.option("--graph", is_flag=True, help="Write OBO Graph JSON via ROBOT")
621
627
  @click.option("--nodes", is_flag=True, help="Write nodes file")
622
628
  @click.option(
623
629
  "--version", help="Specify data version to get. Use this if bioversions is acting up."
624
630
  )
625
- def _main(force: bool, owl: bool, graph: bool, nodes: bool, version: Optional[str]):
626
- inst = cls(force=force, data_version=version)
627
- inst.write_default(
628
- write_obograph=graph,
629
- write_obo=True,
630
- write_owl=owl,
631
- write_nodes=nodes,
632
- force=force,
633
- use_tqdm=True,
634
- )
631
+ def _main(force: bool, owl: bool, nodes: bool, version: Optional[str], rewrite: bool):
632
+ try:
633
+ inst = cls(force=force, data_version=version)
634
+ except Exception as e:
635
+ click.secho(f"[{cls.ontology}] Got an exception during instantiation - {type(e)}")
636
+ sys.exit(1)
637
+
638
+ try:
639
+ inst.write_default(
640
+ write_obograph=True,
641
+ write_obo=True,
642
+ write_owl=owl,
643
+ write_nodes=nodes,
644
+ force=force or rewrite,
645
+ use_tqdm=True,
646
+ )
647
+ except Exception as e:
648
+ click.secho(f"[{cls.ontology}] Got an exception during OBO writing {type(e)}")
649
+ sys.exit(1)
635
650
 
636
651
  return _main
637
652
 
@@ -865,16 +880,11 @@ class Obo:
865
880
  relation_df.sort_values(list(relation_df.columns), inplace=True)
866
881
  relation_df.to_csv(relations_path, sep="\t", index=False)
867
882
 
868
- if (write_obo or write_obograph or write_owl) and (not self._obo_path.exists() or force):
883
+ if (write_obo or write_owl) and (not self._obo_path.exists() or force):
869
884
  self.write_obo(self._obo_path, use_tqdm=use_tqdm)
870
- if write_obograph:
871
- # obo_to_obograph(self._obo_path, self._obograph_path)
872
- self._obograph_path.write_text(
873
- self.get_graph().json(
874
- indent=2, ensure_ascii=False, exclude_none=True, exclude_unset=True
875
- )
876
- )
877
- if write_owl:
885
+ if write_obograph and (not self._obograph_path.exists() or force):
886
+ self.write_obograph(self._obograph_path)
887
+ if write_owl and (not self._owl_path.exists() or force):
878
888
  obo_to_owl(self._obo_path, self._owl_path)
879
889
  if write_obonet and (not self._obonet_gz_path.exists() or force):
880
890
  logger.debug("writing obonet to %s", self._obonet_gz_path)
pyobo/struct/typedef.py CHANGED
@@ -365,6 +365,11 @@ has_homepage = TypeDef(
365
365
  reference=Reference(prefix="foaf", identifier="homepage", name="homepage"), is_metadata_tag=True
366
366
  )
367
367
 
368
+ has_category = TypeDef(
369
+ reference=Reference(prefix="biolink", identifier="category", name="has category"),
370
+ is_metadata_tag=True,
371
+ )
372
+
368
373
  default_typedefs: dict[tuple[str, str], TypeDef] = {
369
374
  v.pair: v for k, v in locals().items() if isinstance(v, TypeDef)
370
375
  }
pyobo/version.py CHANGED
@@ -12,7 +12,7 @@ __all__ = [
12
12
  "get_git_hash",
13
13
  ]
14
14
 
15
- VERSION = "0.11.0"
15
+ VERSION = "0.11.2"
16
16
 
17
17
 
18
18
  def get_git_hash() -> str:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: pyobo
3
- Version: 0.11.0
3
+ Version: 0.11.2
4
4
  Summary: A python package for handling and generating OBO
5
5
  Author-email: Charles Tapley Hoyt <cthoyt@gmail.com>
6
6
  Maintainer-email: Charles Tapley Hoyt <cthoyt@gmail.com>
@@ -49,40 +49,40 @@ Classifier: Topic :: Scientific/Engineering :: Bio-Informatics
49
49
  Classifier: Topic :: Scientific/Engineering :: Chemistry
50
50
  Requires-Python: >=3.9
51
51
  Description-Content-Type: text/markdown
52
- License-File: LICENSE
53
- Requires-Dist: obonet >=0.3.0
52
+ Requires-Dist: obonet>=0.3.0
54
53
  Requires-Dist: click
55
54
  Requires-Dist: tqdm
56
55
  Requires-Dist: pyyaml
57
56
  Requires-Dist: pandas
58
57
  Requires-Dist: requests
59
58
  Requires-Dist: protmapper
60
- Requires-Dist: more-itertools
61
- Requires-Dist: more-click >=0.0.2
59
+ Requires-Dist: more_itertools
60
+ Requires-Dist: more_click>=0.0.2
62
61
  Requires-Dist: humanize
63
62
  Requires-Dist: tabulate
64
63
  Requires-Dist: cachier
65
- Requires-Dist: pystow >=0.2.7
66
- Requires-Dist: bioversions >=0.5.514
67
- Requires-Dist: bioregistry >=0.10.20
68
- Requires-Dist: bioontologies >=0.4.0
69
- Requires-Dist: zenodo-client >=0.0.5
70
- Requires-Dist: class-resolver
64
+ Requires-Dist: pystow>=0.2.7
65
+ Requires-Dist: bioversions>=0.5.535
66
+ Requires-Dist: bioregistry>=0.11.23
67
+ Requires-Dist: bioontologies>=0.4.0
68
+ Requires-Dist: zenodo-client>=0.0.5
69
+ Requires-Dist: class_resolver
71
70
  Requires-Dist: psycopg2-binary
72
- Requires-Dist: pydantic >=2.0
73
- Requires-Dist: drugbank-downloader
74
- Requires-Dist: chembl-downloader
75
- Requires-Dist: umls-downloader >=0.1.3
76
- Requires-Dist: typing-extensions
71
+ Requires-Dist: pydantic>=2.0
72
+ Requires-Dist: requests-ftp
73
+ Requires-Dist: drugbank_downloader
74
+ Requires-Dist: chembl_downloader
75
+ Requires-Dist: umls_downloader>=0.1.3
76
+ Requires-Dist: typing_extensions
77
77
  Requires-Dist: rdflib
78
- Provides-Extra: docs
79
- Requires-Dist: sphinx >=8 ; extra == 'docs'
80
- Requires-Dist: sphinx-rtd-theme >=3.0 ; extra == 'docs'
81
- Requires-Dist: sphinx-click ; extra == 'docs'
82
- Requires-Dist: sphinx-automodapi ; extra == 'docs'
83
78
  Provides-Extra: tests
84
- Requires-Dist: pytest ; extra == 'tests'
85
- Requires-Dist: coverage ; extra == 'tests'
79
+ Requires-Dist: pytest; extra == "tests"
80
+ Requires-Dist: coverage; extra == "tests"
81
+ Provides-Extra: docs
82
+ Requires-Dist: sphinx>=8; extra == "docs"
83
+ Requires-Dist: sphinx-rtd-theme>=3.0; extra == "docs"
84
+ Requires-Dist: sphinx-click; extra == "docs"
85
+ Requires-Dist: sphinx_automodapi; extra == "docs"
86
86
 
87
87
  <!--
88
88
  <p align="center">
@@ -109,10 +109,12 @@ Requires-Dist: coverage ; extra == 'tests'
109
109
  <img src="https://codecov.io/gh/biopragmatics/pyobo/branch/main/graph/badge.svg" alt="Codecov status" /></a>
110
110
  <a href="https://github.com/cthoyt/cookiecutter-python-package">
111
111
  <img alt="Cookiecutter template from @cthoyt" src="https://img.shields.io/badge/Cookiecutter-snekpack-blue" /></a>
112
- <a href='https://github.com/psf/black'>
113
- <img src='https://img.shields.io/badge/code%20style-black-000000.svg' alt='Code style: black' /></a>
112
+ <a href="https://github.com/astral-sh/ruff">
113
+ <img src="https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json" alt="Ruff" style="max-width:100%;"></a>
114
114
  <a href="https://github.com/biopragmatics/pyobo/blob/main/.github/CODE_OF_CONDUCT.md">
115
115
  <img src="https://img.shields.io/badge/Contributor%20Covenant-2.1-4baaaa.svg" alt="Contributor Covenant"/></a>
116
+ <a href="https://doi.org/10.5281/zenodo.3381961">
117
+ <img src="https://zenodo.org/badge/DOI/10.5281/zenodo.3381961.svg" alt="DOI"></a>
116
118
  </p>
117
119
 
118
120
  Tools for biological identifiers, names, synonyms, xrefs, hierarchies, relations, and properties through the
@@ -500,14 +502,14 @@ and make update to the `url` entry for that namespace in the Bioregistry.
500
502
  The most recent release can be installed from
501
503
  [PyPI](https://pypi.org/project/pyobo/) with:
502
504
 
503
- ```shell
504
- pip install pyobo
505
+ ```console
506
+ python3 -m pip install pyobo
505
507
  ```
506
508
 
507
509
  The most recent code and data can be installed directly from GitHub with:
508
510
 
509
- ```shell
510
- pip install git+https://github.com/biopragmatics/pyobo.git
511
+ ```console
512
+ python3 -m pip install git+https://github.com/biopragmatics/pyobo.git
511
513
  ```
512
514
 
513
515
  ## 👐 Contributing
@@ -564,10 +566,10 @@ The final section of the README is for if you want to get involved by making a c
564
566
 
565
567
  To install in development mode, use the following:
566
568
 
567
- ```bash
569
+ ```console
568
570
  git clone git+https://github.com/biopragmatics/pyobo.git
569
571
  cd pyobo
570
- pip install -e .
572
+ python3 -m pip install -e .
571
573
  ```
572
574
 
573
575
  ### Updating Package Boilerplate
@@ -576,8 +578,8 @@ This project uses `cruft` to keep boilerplate (i.e., configuration, contribution
576
578
  configuration)
577
579
  up-to-date with the upstream cookiecutter package. Update with the following:
578
580
 
579
- ```shell
580
- pip install cruft
581
+ ```console
582
+ python3 -m pip install cruft
581
583
  cruft update
582
584
  ```
583
585
 
@@ -586,10 +588,11 @@ available [here](https://github.com/cruft/cruft?tab=readme-ov-file#updating-a-pr
586
588
 
587
589
  ### 🥼 Testing
588
590
 
589
- After cloning the repository and installing `tox` with `pip install tox tox-uv`,
591
+ After cloning the repository and installing `tox` with
592
+ `python3 -m pip install tox tox-uv`,
590
593
  the unit tests in the `tests/` folder can be run reproducibly with:
591
594
 
592
- ```shell
595
+ ```console
593
596
  tox -e py
594
597
  ```
595
598
 
@@ -600,12 +603,12 @@ Additionally, these tests are automatically re-run with each commit in a
600
603
 
601
604
  The documentation can be built locally using the following:
602
605
 
603
- ```shell
606
+ ```console
604
607
  git clone git+https://github.com/biopragmatics/pyobo.git
605
608
  cd pyobo
606
609
  tox -e docs
607
610
  open docs/build/html/index.html
608
- ```
611
+ ```
609
612
 
610
613
  The documentation automatically installs the package as well as the `docs`
611
614
  extra specified in the [`pyproject.toml`](../../Desktop/pyobo/pyproject.toml). `sphinx` plugins
@@ -661,38 +664,23 @@ You only have to do the following steps once.
661
664
 
662
665
  #### Configuring your machine's connection to PyPI
663
666
 
664
- You have to do the following steps once per machine. Create a file in your home directory called
665
- `.pypirc` and include the following:
666
-
667
- ```ini
668
- [distutils]
669
- index-servers =
670
- pypi
671
- testpypi
672
-
673
- [pypi]
674
- username = __token__
675
- password = <the API token you just got>
667
+ You have to do the following steps once per machine.
676
668
 
677
- # This block is optional in case you want to be able to make test releases to the Test PyPI server
678
- [testpypi]
679
- repository = https://test.pypi.org/legacy/
680
- username = __token__
681
- password = <an API token from test PyPI>
669
+ ```console
670
+ $ uv tool install keyring
671
+ $ keyring set https://upload.pypi.org/legacy/ __token__
672
+ $ keyring set https://test.pypi.org/legacy/ __token__
682
673
  ```
683
674
 
684
- Note that since PyPI is requiring token-based authentication, we use `__token__` as the user, verbatim.
685
- If you already have a `.pypirc` file with a `[distutils]` section, just make sure that there is an `index-servers`
686
- key and that `pypi` is in its associated list. More information on configuring the `.pypirc` file can
687
- be found [here](https://packaging.python.org/en/latest/specifications/pypirc).
675
+ Note that this deprecates previous workflows using `.pypirc`.
688
676
 
689
677
  #### Uploading to PyPI
690
678
 
691
679
  After installing the package in development mode and installing
692
- `tox` with `pip install tox tox-uv`,
693
- run the following from the shell:
680
+ `tox` with `python3 -m pip install tox tox-uv`,
681
+ run the following from the console:
694
682
 
695
- ```shell
683
+ ```console
696
684
  tox -e finish
697
685
  ```
698
686
 
@@ -703,7 +691,7 @@ This script does the following:
703
691
  and [`docs/source/conf.py`](../../Desktop/pyobo/docs/source/conf.py) to not have the `-dev` suffix
704
692
  2. Packages the code in both a tar archive and a wheel using
705
693
  [`uv build`](https://docs.astral.sh/uv/guides/publish/#building-your-package)
706
- 3. Uploads to PyPI using [`twine`](https://github.com/pypa/twine).
694
+ 3. Uploads to PyPI using [`uv publish`](https://docs.astral.sh/uv/guides/publish/#publishing-your-package).
707
695
  4. Push to GitHub. You'll need to make a release going with the commit where the version was bumped.
708
696
  5. Bump the version to the next patch. If you made big changes and want to bump the version by minor, you can
709
697
  use `tox -e bumpversion -- minor` after.