dalia_dif 0.0.15__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dalia_dif-0.0.15/LICENSE +21 -0
- dalia_dif-0.0.15/PKG-INFO +468 -0
- dalia_dif-0.0.15/README.md +420 -0
- dalia_dif-0.0.15/pyproject.toml +290 -0
- dalia_dif-0.0.15/src/dalia_dif/.DS_Store +0 -0
- dalia_dif-0.0.15/src/dalia_dif/__init__.py +1 -0
- dalia_dif-0.0.15/src/dalia_dif/__main__.py +6 -0
- dalia_dif-0.0.15/src/dalia_dif/cli.py +102 -0
- dalia_dif-0.0.15/src/dalia_dif/dif13/__init__.py +21 -0
- dalia_dif-0.0.15/src/dalia_dif/dif13/community/__init__.py +40 -0
- dalia_dif-0.0.15/src/dalia_dif/dif13/community/dalia_communities.csv +76 -0
- dalia_dif-0.0.15/src/dalia_dif/dif13/constants.py +152 -0
- dalia_dif-0.0.15/src/dalia_dif/dif13/export/__init__.py +1 -0
- dalia_dif-0.0.15/src/dalia_dif/dif13/export/charts.py +453 -0
- dalia_dif-0.0.15/src/dalia_dif/dif13/export/fti.py +186 -0
- dalia_dif-0.0.15/src/dalia_dif/dif13/legacy/__init__.py +8 -0
- dalia_dif-0.0.15/src/dalia_dif/dif13/legacy/authors.py +174 -0
- dalia_dif-0.0.15/src/dalia_dif/dif13/legacy/components.py +350 -0
- dalia_dif-0.0.15/src/dalia_dif/dif13/legacy/learning_resource.py +123 -0
- dalia_dif-0.0.15/src/dalia_dif/dif13/model.py +185 -0
- dalia_dif-0.0.15/src/dalia_dif/dif13/picklists.py +154 -0
- dalia_dif-0.0.15/src/dalia_dif/dif13/predicates.py +56 -0
- dalia_dif-0.0.15/src/dalia_dif/dif13/rdf.py +193 -0
- dalia_dif-0.0.15/src/dalia_dif/dif13/reader.py +536 -0
- dalia_dif-0.0.15/src/dalia_dif/dif13/utils.py +29 -0
- dalia_dif-0.0.15/src/dalia_dif/namespace/__init__.py +114 -0
- dalia_dif-0.0.15/src/dalia_dif/namespace/bibframe_lite_relation.py +12 -0
- dalia_dif-0.0.15/src/dalia_dif/namespace/bibo.py +16 -0
- dalia_dif-0.0.15/src/dalia_dif/namespace/citedcat.py +12 -0
- dalia_dif-0.0.15/src/dalia_dif/namespace/educor.py +11 -0
- dalia_dif-0.0.15/src/dalia_dif/namespace/fabio.py +14 -0
- dalia_dif-0.0.15/src/dalia_dif/namespace/hcrt.py +20 -0
- dalia_dif-0.0.15/src/dalia_dif/namespace/metadata4ing.py +12 -0
- dalia_dif-0.0.15/src/dalia_dif/namespace/modalia.py +49 -0
- dalia_dif-0.0.15/src/dalia_dif/namespace/rec.py +12 -0
- dalia_dif-0.0.15/src/dalia_dif/py.typed +1 -0
- dalia_dif-0.0.15/src/dalia_dif/utils.py +25 -0
- dalia_dif-0.0.15/src/dalia_dif/version.py +39 -0
dalia_dif-0.0.15/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025 Charles Tapley Hoyt
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
@@ -0,0 +1,468 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: dalia_dif
|
|
3
|
+
Version: 0.0.15
|
|
4
|
+
Summary: Tools for DALIA's data model for open educational resources
|
|
5
|
+
Keywords: snekpack,cookiecutter,open educational resources,educational resources,training
|
|
6
|
+
Author: Charles Tapley Hoyt
|
|
7
|
+
Author-email: Charles Tapley Hoyt <cthoyt@gmail.com>
|
|
8
|
+
License-File: LICENSE
|
|
9
|
+
Classifier: Development Status :: 1 - Planning
|
|
10
|
+
Classifier: Environment :: Console
|
|
11
|
+
Classifier: Intended Audience :: Developers
|
|
12
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
13
|
+
Classifier: Operating System :: OS Independent
|
|
14
|
+
Classifier: Framework :: Pytest
|
|
15
|
+
Classifier: Framework :: tox
|
|
16
|
+
Classifier: Framework :: Sphinx
|
|
17
|
+
Classifier: Natural Language :: English
|
|
18
|
+
Classifier: Programming Language :: Python
|
|
19
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
20
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
21
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
22
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
23
|
+
Classifier: Programming Language :: Python :: 3 :: Only
|
|
24
|
+
Classifier: Typing :: Typed
|
|
25
|
+
Requires-Dist: click
|
|
26
|
+
Requires-Dist: pydantic
|
|
27
|
+
Requires-Dist: pydantic-extra-types[pycountry]
|
|
28
|
+
Requires-Dist: pydantic-metamodel
|
|
29
|
+
Requires-Dist: pystow
|
|
30
|
+
Requires-Dist: rdflib
|
|
31
|
+
Requires-Dist: tqdm
|
|
32
|
+
Requires-Dist: curies
|
|
33
|
+
Requires-Dist: base32-crockford
|
|
34
|
+
Requires-Dist: matplotlib ; extra == 'export'
|
|
35
|
+
Requires-Dist: seaborn ; extra == 'export'
|
|
36
|
+
Requires-Dist: pandas ; extra == 'export'
|
|
37
|
+
Requires-Dist: pandas ; extra == 'fti'
|
|
38
|
+
Maintainer: Charles Tapley Hoyt
|
|
39
|
+
Maintainer-email: Charles Tapley Hoyt <cthoyt@gmail.com>
|
|
40
|
+
Requires-Python: >=3.10
|
|
41
|
+
Project-URL: Bug Tracker, https://github.com/data-literacy-alliance/dalia-dif/issues
|
|
42
|
+
Project-URL: Documentation, https://dalia-dif.readthedocs.io
|
|
43
|
+
Project-URL: Homepage, https://github.com/data-literacy-alliance/dalia-dif
|
|
44
|
+
Project-URL: Repository, https://github.com/data-literacy-alliance/dalia-dif.git
|
|
45
|
+
Provides-Extra: export
|
|
46
|
+
Provides-Extra: fti
|
|
47
|
+
Description-Content-Type: text/markdown
|
|
48
|
+
|
|
49
|
+
<!--
|
|
50
|
+
<p align="center">
|
|
51
|
+
<img src="https://github.com/data-literacy-alliance/dalia-dif/raw/main/docs/source/logo.png" height="150">
|
|
52
|
+
</p>
|
|
53
|
+
-->
|
|
54
|
+
|
|
55
|
+
<h1 align="center">
|
|
56
|
+
DALIA Interaction Format (DIF)
|
|
57
|
+
</h1>
|
|
58
|
+
|
|
59
|
+
<p align="center">
|
|
60
|
+
<a href="https://github.com/data-literacy-alliance/dalia-dif/actions/workflows/tests.yml">
|
|
61
|
+
<img alt="Tests" src="https://github.com/data-literacy-alliance/dalia-dif/actions/workflows/tests.yml/badge.svg" /></a>
|
|
62
|
+
<a href="https://pypi.org/project/dalia_dif">
|
|
63
|
+
<img alt="PyPI" src="https://img.shields.io/pypi/v/dalia_dif" /></a>
|
|
64
|
+
<a href="https://pypi.org/project/dalia_dif">
|
|
65
|
+
<img alt="PyPI - Python Version" src="https://img.shields.io/pypi/pyversions/dalia_dif" /></a>
|
|
66
|
+
<a href="https://github.com/data-literacy-alliance/dalia-dif/blob/main/LICENSE">
|
|
67
|
+
<img alt="PyPI - License" src="https://img.shields.io/pypi/l/dalia_dif" /></a>
|
|
68
|
+
<a href='https://dalia_dif.readthedocs.io/en/latest/?badge=latest'>
|
|
69
|
+
<img src='https://readthedocs.org/projects/dalia_dif/badge/?version=latest' alt='Documentation Status' /></a>
|
|
70
|
+
<a href="https://codecov.io/gh/data-literacy-alliance/dalia-dif/branch/main">
|
|
71
|
+
<img src="https://codecov.io/gh/data-literacy-alliance/dalia-dif/branch/main/graph/badge.svg" alt="Codecov status" /></a>
|
|
72
|
+
<a href="https://github.com/cthoyt/cookiecutter-python-package">
|
|
73
|
+
<img alt="Cookiecutter template from @cthoyt" src="https://img.shields.io/badge/Cookiecutter-snekpack-blue" /></a>
|
|
74
|
+
<a href="https://github.com/astral-sh/ruff">
|
|
75
|
+
<img src="https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json" alt="Ruff" style="max-width:100%;"></a>
|
|
76
|
+
<a href="https://github.com/data-literacy-alliance/dalia-dif/blob/main/.github/CODE_OF_CONDUCT.md">
|
|
77
|
+
<img src="https://img.shields.io/badge/Contributor%20Covenant-2.1-4baaaa.svg" alt="Contributor Covenant"/></a>
|
|
78
|
+
<a href="https://doi.org/10.5281/zenodo.17532540">
|
|
79
|
+
<img src="https://zenodo.org/badge/DOI/10.5281/zenodo.17532540.svg" alt="DOI"></a>
|
|
80
|
+
</p>
|
|
81
|
+
|
|
82
|
+
The DALIA Interaction Format (DIF) v1.3 is the data model and CSV-based input
|
|
83
|
+
format for open educational resources (OERs) in the DALIA OER platform.
|
|
84
|
+
|
|
85
|
+
This repository contains an implementation of the data model in Pydantic, a
|
|
86
|
+
workflow for serializing to RDF based on
|
|
87
|
+
[pydantic-metamodel](https://github.com/cthoyt/pydantic-metamodel), a CSV
|
|
88
|
+
reader, and a command line validator.
|
|
89
|
+
|
|
90
|
+
A tutorial/guide for curating OERs in a tabular form (CSV) can be found
|
|
91
|
+
[here](docs/curation.md).
|
|
92
|
+
|
|
93
|
+
```mermaid
|
|
94
|
+
graph LR
|
|
95
|
+
er[Educational Resource] -- "supported by (0..*)" --> community[Community]
|
|
96
|
+
er -- "recommended by by (0..*)" --> community
|
|
97
|
+
er -- "has author (1..*)" --> author[Author]
|
|
98
|
+
er -- "has discipline (0..*)" --> discipline[Discipline]
|
|
99
|
+
er -- "type (1)" --> lrt[Learning Resource Type]
|
|
100
|
+
er -- "has media type (0..*)" --> mediatype[Media Type]
|
|
101
|
+
er -- "has target group (0..*)" --> tg[Target Group]
|
|
102
|
+
er -- "requires (0..*)" --> pl[Proficiency Level]
|
|
103
|
+
```
|
|
104
|
+
|
|
105
|
+
## 💪 Getting Started
|
|
106
|
+
|
|
107
|
+
The `dalia_dif` command line tool can be used from the console with to validate
|
|
108
|
+
CSV files (both local and remote).
|
|
109
|
+
|
|
110
|
+
```console
|
|
111
|
+
$ dalia_dif validate https://raw.githubusercontent.com/NFDI4BIOIMAGE/training/refs/heads/main/docs/export/DALIA_training_materials.csv
|
|
112
|
+
```
|
|
113
|
+
|
|
114
|
+
Serialize to RDF with `dalia_dif convert`. It guesses the format based on the
|
|
115
|
+
file extension, right now `.ttl` and `.jsonl` are supported.
|
|
116
|
+
|
|
117
|
+
```console
|
|
118
|
+
$ dalia_dif convert -o output.ttl https://raw.githubusercontent.com/NFDI4BIOIMAGE/training/refs/heads/main/docs/export/DALIA_training_materials.csv
|
|
119
|
+
```
|
|
120
|
+
|
|
121
|
+
```console
|
|
122
|
+
$ dalia_dif convert -o output.jsonl https://raw.githubusercontent.com/NFDI4BIOIMAGE/training/refs/heads/main/docs/export/DALIA_training_materials.csv
|
|
123
|
+
```
|
|
124
|
+
|
|
125
|
+
Using the data model:
|
|
126
|
+
|
|
127
|
+
```python
|
|
128
|
+
from pydantic_metamodel.api import PredicateObject
|
|
129
|
+
|
|
130
|
+
from dalia_dif.dif13.model import AuthorDIF13, EducationalResourceDIF13, OrganizationDIF13
|
|
131
|
+
from dalia_dif.dif13.picklists import (
|
|
132
|
+
MEDIA_TYPES,
|
|
133
|
+
PROFICIENCY_LEVELS,
|
|
134
|
+
RELATED_WORKS_RELATIONS,
|
|
135
|
+
TARGET_GROUPS,
|
|
136
|
+
LEARNING_RESOURCE_TYPES,
|
|
137
|
+
)
|
|
138
|
+
from dalia_dif.namespace import DALIA_COMMUNITY, HSFS
|
|
139
|
+
|
|
140
|
+
resource = EducationalResourceDIF13(
|
|
141
|
+
uuid="b37ddf6e-f136-4230-8418-faf18c4c34d2",
|
|
142
|
+
title="Chemotion ELN Instruction Videos",
|
|
143
|
+
description="Chemotion ELN Instruction Videos Chemotion[1] is an open source "
|
|
144
|
+
"system for storing and managing experiments and molecular data in "
|
|
145
|
+
"chemistry and its related sciences.",
|
|
146
|
+
links=["https://doi.org/10.5281/zenodo.7634481"],
|
|
147
|
+
authors=[
|
|
148
|
+
AuthorDIF13(given_name="Fabian", family_name="Fink", orcid="0000-0002-1863-2087"),
|
|
149
|
+
AuthorDIF13(given_name="Salim", family_name="Benjamaa", orcid="0000-0001-6215-6834"),
|
|
150
|
+
AuthorDIF13(given_name="Nicole", family_name="Parks", orcid="0000-0002-6243-2840"),
|
|
151
|
+
AuthorDIF13(
|
|
152
|
+
given_name="Alexander", family_name="Hoffmann", orcid="0000-0002-9647-8839"
|
|
153
|
+
),
|
|
154
|
+
AuthorDIF13(
|
|
155
|
+
given_name="Sonja", family_name="Herres-Pawlis", orcid="0000-0002-4354-4353"
|
|
156
|
+
),
|
|
157
|
+
],
|
|
158
|
+
license="https://creativecommons.org/licenses/by/4.0",
|
|
159
|
+
supporting_communities=[],
|
|
160
|
+
recommending_communities=[
|
|
161
|
+
DALIA_COMMUNITY["bead62a8-c3c2-46d6-9eb1-ffeaba38d5bf"], # NFDI4Chem
|
|
162
|
+
],
|
|
163
|
+
disciplines=[
|
|
164
|
+
HSFS["n40"], # chemistry
|
|
165
|
+
],
|
|
166
|
+
file_formats=[
|
|
167
|
+
".mp4",
|
|
168
|
+
],
|
|
169
|
+
keywords=["research data management", "NFDI", "RDM", "FDM", "NFDI4Chem", "Chemotion"],
|
|
170
|
+
languages=["eng"],
|
|
171
|
+
learning_resource_types=[
|
|
172
|
+
LEARNING_RESOURCE_TYPES["tutorial"],
|
|
173
|
+
],
|
|
174
|
+
media_types=[
|
|
175
|
+
MEDIA_TYPES["video"],
|
|
176
|
+
],
|
|
177
|
+
proficiency_levels=[
|
|
178
|
+
PROFICIENCY_LEVELS["novice"],
|
|
179
|
+
],
|
|
180
|
+
publication_date="2023-02-13",
|
|
181
|
+
target_groups=[
|
|
182
|
+
TARGET_GROUPS["student (ba)"],
|
|
183
|
+
],
|
|
184
|
+
related_works=[
|
|
185
|
+
PredicateObject(
|
|
186
|
+
predicate=RELATED_WORKS_RELATIONS["isTranslationOf"],
|
|
187
|
+
object="https://id.dalia.education/learning-resource/20be255e-e2da-4f9c-90b3-5573d6a12619",
|
|
188
|
+
)
|
|
189
|
+
],
|
|
190
|
+
file_size="703.2 MB",
|
|
191
|
+
version=None,
|
|
192
|
+
)
|
|
193
|
+
turtle_str = resource.model_dump_turtle()
|
|
194
|
+
```
|
|
195
|
+
|
|
196
|
+
## 🚀 Installation
|
|
197
|
+
|
|
198
|
+
The most recent release can be installed from
|
|
199
|
+
[PyPI](https://pypi.org/project/dalia_dif/) with uv:
|
|
200
|
+
|
|
201
|
+
```console
|
|
202
|
+
$ uv pip install dalia_dif
|
|
203
|
+
```
|
|
204
|
+
|
|
205
|
+
or with pip:
|
|
206
|
+
|
|
207
|
+
```console
|
|
208
|
+
$ python3 -m pip install dalia_dif
|
|
209
|
+
```
|
|
210
|
+
|
|
211
|
+
The most recent code and data can be installed directly from GitHub with uv:
|
|
212
|
+
|
|
213
|
+
```console
|
|
214
|
+
$ uv pip install git+https://github.com/data-literacy-alliance/dalia-dif.git
|
|
215
|
+
```
|
|
216
|
+
|
|
217
|
+
or with pip:
|
|
218
|
+
|
|
219
|
+
```console
|
|
220
|
+
$ python3 -m pip install git+https://github.com/data-literacy-alliance/dalia-dif.git
|
|
221
|
+
```
|
|
222
|
+
|
|
223
|
+
## 👐 Contributing
|
|
224
|
+
|
|
225
|
+
Contributions, whether filing an issue, making a pull request, or forking, are
|
|
226
|
+
appreciated. See
|
|
227
|
+
[CONTRIBUTING.md](https://github.com/data-literacy-alliance/dalia-dif/blob/master/.github/CONTRIBUTING.md)
|
|
228
|
+
for more information on getting involved.
|
|
229
|
+
|
|
230
|
+
## 👋 Attribution
|
|
231
|
+
|
|
232
|
+
### ⚖️ License
|
|
233
|
+
|
|
234
|
+
The code in this package is licensed under the MIT License.
|
|
235
|
+
|
|
236
|
+
### 📖 Citation
|
|
237
|
+
|
|
238
|
+
An abstract describing the DIF has been published in the proceedings of the
|
|
239
|
+
2<sup>nd</sup> Conference on Research Data Infrastructure (CoRDI).
|
|
240
|
+
|
|
241
|
+
```bibtex
|
|
242
|
+
@misc{steiner2025,
|
|
243
|
+
author = {Steiner, Petra C. and Geiger, Jonathan D. and Fuhrmans, Marc and Amer Desouki, Abdelmoneim and Hüppe, Henrika M.},
|
|
244
|
+
title = {The Revised DALIA Interchange Format - New Picklists for Describing Open Educational Resources},
|
|
245
|
+
month = aug,
|
|
246
|
+
year = 2025,
|
|
247
|
+
publisher = {Zenodo},
|
|
248
|
+
doi = {10.5281/zenodo.16736170},
|
|
249
|
+
url = {https://doi.org/10.5281/zenodo.16736170},
|
|
250
|
+
}
|
|
251
|
+
```
|
|
252
|
+
|
|
253
|
+
### 🎁 Support
|
|
254
|
+
|
|
255
|
+
This project has been supported by the following organizations (in alphabetical
|
|
256
|
+
order):
|
|
257
|
+
|
|
258
|
+
- [NFDI4Chem](https://www.nfdi4chem.de)
|
|
259
|
+
- [NFDI4Culture](https://nfdi4culture.de)
|
|
260
|
+
- [NFDI4Ing](https://nfdi4ing.de)
|
|
261
|
+
|
|
262
|
+
### 💰 Funding
|
|
263
|
+
|
|
264
|
+
This project has been supported by the following grants:
|
|
265
|
+
|
|
266
|
+
| Funding Body | Program | Grant Number |
|
|
267
|
+
| ------------------------------------------------------------------ | ------- | ------------ |
|
|
268
|
+
| German Federal Ministry of Research, Technology, and Space (BMFTR) | | 16DWWQP07 |
|
|
269
|
+
| EU Capacity Building and Resilience Facility | | 16DWWQP07 |
|
|
270
|
+
|
|
271
|
+
### 🍪 Cookiecutter
|
|
272
|
+
|
|
273
|
+
This package was created with
|
|
274
|
+
[@audreyfeldroy](https://github.com/audreyfeldroy)'s
|
|
275
|
+
[cookiecutter](https://github.com/cookiecutter/cookiecutter) package using
|
|
276
|
+
[@cthoyt](https://github.com/cthoyt)'s
|
|
277
|
+
[cookiecutter-snekpack](https://github.com/cthoyt/cookiecutter-snekpack)
|
|
278
|
+
template.
|
|
279
|
+
|
|
280
|
+
## 🛠️ For Developers
|
|
281
|
+
|
|
282
|
+
<details>
|
|
283
|
+
<summary>See developer instructions</summary>
|
|
284
|
+
|
|
285
|
+
The final section of the README is for if you want to get involved by making a
|
|
286
|
+
code contribution.
|
|
287
|
+
|
|
288
|
+
### Development Installation
|
|
289
|
+
|
|
290
|
+
To install in development mode, use the following:
|
|
291
|
+
|
|
292
|
+
```console
|
|
293
|
+
$ git clone git+https://github.com/data-literacy-alliance/dalia-dif.git
|
|
294
|
+
$ cd dalia-dif
|
|
295
|
+
$ uv pip install -e .
|
|
296
|
+
```
|
|
297
|
+
|
|
298
|
+
Alternatively, install using pip:
|
|
299
|
+
|
|
300
|
+
```console
|
|
301
|
+
$ python3 -m pip install -e .
|
|
302
|
+
```
|
|
303
|
+
|
|
304
|
+
### 🥼 Testing
|
|
305
|
+
|
|
306
|
+
After cloning the repository and installing `tox` with
|
|
307
|
+
`uv tool install tox --with tox-uv` or `python3 -m pip install tox tox-uv`, the
|
|
308
|
+
unit tests in the `tests/` folder can be run reproducibly with:
|
|
309
|
+
|
|
310
|
+
```console
|
|
311
|
+
$ tox -e py
|
|
312
|
+
```
|
|
313
|
+
|
|
314
|
+
Additionally, these tests are automatically re-run with each commit in a
|
|
315
|
+
[GitHub Action](https://github.com/data-literacy-alliance/dalia-dif/actions?query=workflow%3ATests).
|
|
316
|
+
|
|
317
|
+
### 📖 Building the Documentation
|
|
318
|
+
|
|
319
|
+
The documentation can be built locally using the following:
|
|
320
|
+
|
|
321
|
+
```console
|
|
322
|
+
$ git clone git+https://github.com/data-literacy-alliance/dalia-dif.git
|
|
323
|
+
$ cd dalia-dif
|
|
324
|
+
$ tox -e docs
|
|
325
|
+
$ open docs/build/html/index.html
|
|
326
|
+
```
|
|
327
|
+
|
|
328
|
+
The documentation automatically installs the package as well as the `docs` extra
|
|
329
|
+
specified in the [`pyproject.toml`](pyproject.toml). `sphinx` plugins like
|
|
330
|
+
`texext` can be added there. Additionally, they need to be added to the
|
|
331
|
+
`extensions` list in [`docs/source/conf.py`](docs/source/conf.py).
|
|
332
|
+
|
|
333
|
+
The documentation can be deployed to [ReadTheDocs](https://readthedocs.io) using
|
|
334
|
+
[this guide](https://docs.readthedocs.io/en/stable/intro/import-guide.html). The
|
|
335
|
+
[`.readthedocs.yml`](.readthedocs.yml) YAML file contains all the configuration
|
|
336
|
+
you'll need. You can also set up continuous integration on GitHub to check not
|
|
337
|
+
only that Sphinx can build the documentation in an isolated environment (i.e.,
|
|
338
|
+
with `tox -e docs-test`) but also that
|
|
339
|
+
[ReadTheDocs can build it too](https://docs.readthedocs.io/en/stable/pull-requests.html).
|
|
340
|
+
|
|
341
|
+
</details>
|
|
342
|
+
|
|
343
|
+
## 🧑💻 For Maintainers
|
|
344
|
+
|
|
345
|
+
<details>
|
|
346
|
+
<summary>See maintainer instructions</summary>
|
|
347
|
+
|
|
348
|
+
### Initial Configuration
|
|
349
|
+
|
|
350
|
+
#### Configuring ReadTheDocs
|
|
351
|
+
|
|
352
|
+
[ReadTheDocs](https://readthedocs.org) is an external documentation hosting
|
|
353
|
+
service that integrates with GitHub's CI/CD. Do the following for each
|
|
354
|
+
repository:
|
|
355
|
+
|
|
356
|
+
1. Log in to ReadTheDocs with your GitHub account to install the integration at
|
|
357
|
+
https://readthedocs.org/accounts/login/?next=/dashboard/
|
|
358
|
+
2. Import your project by navigating to https://readthedocs.org/dashboard/import
|
|
359
|
+
then clicking the plus icon next to your repository
|
|
360
|
+
3. You can rename the repository on the next screen using a more stylized name
|
|
361
|
+
(i.e., with spaces and capital letters)
|
|
362
|
+
4. Click next, and you're good to go!
|
|
363
|
+
|
|
364
|
+
#### Configuring Archival on Zenodo
|
|
365
|
+
|
|
366
|
+
[Zenodo](https://zenodo.org) is a long-term archival system that assigns a DOI
|
|
367
|
+
to each release of your package. Do the following for each repository:
|
|
368
|
+
|
|
369
|
+
1. Log in to Zenodo via GitHub with this link:
|
|
370
|
+
https://zenodo.org/oauth/login/github/?next=%2F. This brings you to a page
|
|
371
|
+
that lists all of your organizations and asks you to approve installing the
|
|
372
|
+
Zenodo app on GitHub. Click "grant" next to any organizations you want to
|
|
373
|
+
enable the integration for, then click the big green "approve" button. This
|
|
374
|
+
step only needs to be done once.
|
|
375
|
+
2. Navigate to https://zenodo.org/account/settings/github/, which lists all of
|
|
376
|
+
your GitHub repositories (both in your username and any organizations you
|
|
377
|
+
enabled). Click the on/off toggle for any relevant repositories. When you
|
|
378
|
+
make a new repository, you'll have to come back to this
|
|
379
|
+
|
|
380
|
+
After these steps, you're ready to go! After you make "release" on GitHub (steps
|
|
381
|
+
for this are below), you can navigate to
|
|
382
|
+
https://zenodo.org/account/settings/github/repository/data-literacy-alliance/dalia-dif
|
|
383
|
+
to see the DOI for the release and link to the Zenodo record for it.
|
|
384
|
+
|
|
385
|
+
#### Registering with the Python Package Index (PyPI)
|
|
386
|
+
|
|
387
|
+
The [Python Package Index (PyPI)](https://pypi.org) hosts packages so they can
|
|
388
|
+
be easily installed with `pip`, `uv`, and equivalent tools.
|
|
389
|
+
|
|
390
|
+
1. Register for an account [here](https://pypi.org/account/register)
|
|
391
|
+
2. Navigate to https://pypi.org/manage/account and make sure you have verified
|
|
392
|
+
your email address. A verification email might not have been sent by default,
|
|
393
|
+
so you might have to click the "options" dropdown next to your address to get
|
|
394
|
+
to the "re-send verification email" button
|
|
395
|
+
3. 2-Factor authentication is required for PyPI since the end of 2023 (see this
|
|
396
|
+
[blog post from PyPI](https://blog.pypi.org/posts/2023-05-25-securing-pypi-with-2fa/)).
|
|
397
|
+
This means you have to first issue account recovery codes, then set up
|
|
398
|
+
2-factor authentication
|
|
399
|
+
4. Issue an API token from https://pypi.org/manage/account/token
|
|
400
|
+
|
|
401
|
+
This only needs to be done once per developer.
|
|
402
|
+
|
|
403
|
+
#### Configuring your machine's connection to PyPI
|
|
404
|
+
|
|
405
|
+
This needs to be done once per machine.
|
|
406
|
+
|
|
407
|
+
```console
|
|
408
|
+
$ uv tool install keyring
|
|
409
|
+
$ keyring set https://upload.pypi.org/legacy/ __token__
|
|
410
|
+
$ keyring set https://test.pypi.org/legacy/ __token__
|
|
411
|
+
```
|
|
412
|
+
|
|
413
|
+
Note that this deprecates previous workflows using `.pypirc`.
|
|
414
|
+
|
|
415
|
+
### 📦 Making a Release
|
|
416
|
+
|
|
417
|
+
#### Uploading to PyPI
|
|
418
|
+
|
|
419
|
+
After installing the package in development mode and installing `tox` with
|
|
420
|
+
`uv tool install tox --with tox-uv` or `python3 -m pip install tox tox-uv`, run
|
|
421
|
+
the following from the console:
|
|
422
|
+
|
|
423
|
+
```console
|
|
424
|
+
$ tox -e finish
|
|
425
|
+
```
|
|
426
|
+
|
|
427
|
+
This script does the following:
|
|
428
|
+
|
|
429
|
+
1. Uses [bump-my-version](https://github.com/callowayproject/bump-my-version) to
|
|
430
|
+
switch the version number in the `pyproject.toml`, `CITATION.cff`,
|
|
431
|
+
`src/dalia_dif/version.py`, and [`docs/source/conf.py`](docs/source/conf.py)
|
|
432
|
+
to not have the `-dev` suffix
|
|
433
|
+
2. Packages the code in both a tar archive and a wheel using
|
|
434
|
+
[`uv build`](https://docs.astral.sh/uv/guides/publish/#building-your-package)
|
|
435
|
+
3. Uploads to PyPI using
|
|
436
|
+
[`uv publish`](https://docs.astral.sh/uv/guides/publish/#publishing-your-package).
|
|
437
|
+
4. Push to GitHub. You'll need to make a release going with the commit where the
|
|
438
|
+
version was bumped.
|
|
439
|
+
5. Bump the version to the next patch. If you made big changes and want to bump
|
|
440
|
+
the version by minor, you can use `tox -e bumpversion -- minor` after.
|
|
441
|
+
|
|
442
|
+
#### Releasing on GitHub
|
|
443
|
+
|
|
444
|
+
1. Navigate to https://github.com/data-literacy-alliance/dalia-dif/releases/new
|
|
445
|
+
to draft a new release
|
|
446
|
+
2. Click the "Choose a Tag" dropdown and select the tag corresponding to the
|
|
447
|
+
release you just made
|
|
448
|
+
3. Click the "Generate Release Notes" button to get a quick outline of recent
|
|
449
|
+
changes. Modify the title and description as you see fit
|
|
450
|
+
4. Click the big green "Publish Release" button
|
|
451
|
+
|
|
452
|
+
This will trigger Zenodo to assign a DOI to your release as well.
|
|
453
|
+
|
|
454
|
+
### Updating Package Boilerplate
|
|
455
|
+
|
|
456
|
+
This project uses `cruft` to keep boilerplate (i.e., configuration, contribution
|
|
457
|
+
guidelines, documentation configuration) up-to-date with the upstream
|
|
458
|
+
cookiecutter package. Install cruft with either `uv tool install cruft` or
|
|
459
|
+
`python3 -m pip install cruft` then run:
|
|
460
|
+
|
|
461
|
+
```console
|
|
462
|
+
$ cruft update
|
|
463
|
+
```
|
|
464
|
+
|
|
465
|
+
More info on Cruft's update command is available
|
|
466
|
+
[here](https://github.com/cruft/cruft?tab=readme-ov-file#updating-a-project).
|
|
467
|
+
|
|
468
|
+
</details>
|