dkist-processing-core 7.2.1rc1__tar.gz → 7.2.2rc2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/CHANGELOG.rst +10 -0
  2. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/PKG-INFO +3 -1
  3. dkist_processing_core-7.2.2rc2/changelog/72.misc.rst +1 -0
  4. dkist_processing_core-7.2.2rc2/changelog/76.feature.1.rst +1 -0
  5. dkist_processing_core-7.2.2rc2/changelog/76.feature.2.rst +1 -0
  6. dkist_processing_core-7.2.2rc2/changelog/77.misc.rst +1 -0
  7. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/dkist_processing_core/build_utils.py +2 -4
  8. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/dkist_processing_core/config.py +5 -0
  9. dkist_processing_core-7.2.2rc2/dkist_processing_core/notebook_utils.py +70 -0
  10. dkist_processing_core-7.2.2rc2/dkist_processing_core/tests/test_notebook_utils.py +99 -0
  11. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/dkist_processing_core/tests/test_workflow.py +3 -2
  12. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/dkist_processing_core/workflow.py +33 -30
  13. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/dkist_processing_core.egg-info/PKG-INFO +3 -1
  14. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/dkist_processing_core.egg-info/SOURCES.txt +6 -1
  15. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/dkist_processing_core.egg-info/requires.txt +2 -0
  16. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/pyproject.toml +6 -0
  17. dkist_processing_core-7.2.1rc1/changelog/75.misc.rst +0 -1
  18. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/.gitignore +0 -0
  19. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/.pre-commit-config.yaml +0 -0
  20. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/.readthedocs.yml +0 -0
  21. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/.snyk +0 -0
  22. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/README.rst +0 -0
  23. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/bitbucket-pipelines.yml +0 -0
  24. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/changelog/.gitempty +0 -0
  25. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/dkist_processing_core/__init__.py +0 -0
  26. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/dkist_processing_core/failure_callback.py +0 -0
  27. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/dkist_processing_core/node.py +0 -0
  28. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/dkist_processing_core/resource_queue.py +0 -0
  29. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/dkist_processing_core/task.py +0 -0
  30. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/dkist_processing_core/tests/__init__.py +0 -0
  31. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/dkist_processing_core/tests/conftest.py +0 -0
  32. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/dkist_processing_core/tests/invalid_workflow_cyclic/__init__.py +0 -0
  33. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/dkist_processing_core/tests/invalid_workflow_cyclic/workflow.py +0 -0
  34. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/dkist_processing_core/tests/invalid_workflow_for_docker_multi_category/__init__.py +0 -0
  35. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/dkist_processing_core/tests/invalid_workflow_for_docker_multi_category/workflow.py +0 -0
  36. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/dkist_processing_core/tests/task_example.py +0 -0
  37. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/dkist_processing_core/tests/test_build_utils.py +0 -0
  38. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/dkist_processing_core/tests/test_export.py +0 -0
  39. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/dkist_processing_core/tests/test_failure_callback.py +0 -0
  40. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/dkist_processing_core/tests/test_node.py +0 -0
  41. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/dkist_processing_core/tests/test_task.py +0 -0
  42. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/dkist_processing_core/tests/valid_workflow_package/__init__.py +0 -0
  43. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/dkist_processing_core/tests/valid_workflow_package/workflow.py +0 -0
  44. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/dkist_processing_core/tests/zero_node_workflow_package/__init__.py +0 -0
  45. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/dkist_processing_core/tests/zero_node_workflow_package/workflow.py +0 -0
  46. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/dkist_processing_core.egg-info/dependency_links.txt +0 -0
  47. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/dkist_processing_core.egg-info/top_level.txt +0 -0
  48. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/docs/Makefile +0 -0
  49. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/docs/auto-proc-concept-model.png +0 -0
  50. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/docs/auto_proc_brick.png +0 -0
  51. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/docs/automated-processing-deployed.png +0 -0
  52. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/docs/changelog.rst +0 -0
  53. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/docs/conf.py +0 -0
  54. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/docs/index.rst +0 -0
  55. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/docs/landing_page.rst +0 -0
  56. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/docs/make.bat +0 -0
  57. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/docs/requirements.txt +0 -0
  58. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/licenses/LICENSE.rst +0 -0
  59. {dkist_processing_core-7.2.1rc1 → dkist_processing_core-7.2.2rc2}/setup.cfg +0 -0
@@ -1,3 +1,13 @@
1
+ v7.2.1 (2026-04-16)
2
+ ===================
3
+
4
+ Misc
5
+ ----
6
+
7
+ - Configure pyproject.toml to turn warnings raised during testing into errors. (`#74 <https://bitbucket.org/dkistdc/dkist-processing-core/pull-requests/74>`__)
8
+ - Upgrade Read the Docs LTS build image to Ubuntu 24.04. (`#75 <https://bitbucket.org/dkistdc/dkist-processing-core/pull-requests/75>`__)
9
+
10
+
1
11
  v7.1.0 (2026-04-01)
2
12
  ===================
3
13
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dkist-processing-core
3
- Version: 7.2.1rc1
3
+ Version: 7.2.2rc2
4
4
  Summary: Abstraction layer used by the DKIST science data processing pipelines with Apache Airflow
5
5
  Author-email: NSO / AURA <dkistdc@nso.edu>
6
6
  License: BSD-3-Clause
@@ -18,6 +18,8 @@ Requires-Dist: requests>=2.23
18
18
  Requires-Dist: talus<2.0,>=1.3.4
19
19
  Requires-Dist: pendulum
20
20
  Requires-Dist: nbformat>=5.9.2
21
+ Requires-Dist: notebook<8.0,>=7.5.5
22
+ Requires-Dist: nbconvert<8.0,>=7.16.6
21
23
  Requires-Dist: dkist-service-configuration<5.0,>=4.3.0
22
24
  Requires-Dist: pydantic>2.0
23
25
  Provides-Extra: test
@@ -0,0 +1 @@
1
+ Add an environment variable to indicate when the execution environment is a Jupyter notebook.
@@ -0,0 +1 @@
1
+ Add utility for working with the notebook build target which supports waiting for a notebook to autosave.
@@ -0,0 +1 @@
1
+ Add utility for working with the notebook build target which supports exporting a notebook as markdown.
@@ -0,0 +1 @@
1
+ Refactor the dag naming function to be accessible outside of the context of a Workflow instance.
@@ -115,13 +115,11 @@ class NotebookDockerfile:
115
115
  def setup(self) -> list[str]:
116
116
  """Environment setup lines."""
117
117
  return [
118
+ "ENV IS_NOTEBOOK_EXECUTION_ENVIRONMENT=true",
118
119
  "COPY . /app",
119
120
  "WORKDIR /app",
120
121
  "RUN python -m pip install -U pip",
121
- "RUN pip install notebook",
122
- "RUN pip freeze | grep notebook= > constraints.txt",
123
- "RUN cat constraints.txt",
124
- "RUN python -m pip install -c constraints.txt .",
122
+ "RUN python -m pip install .",
125
123
  ]
126
124
 
127
125
  @property
@@ -27,6 +27,11 @@ class DKISTProcessingCoreConfiguration(InstrumentedMeshServiceConfigurationBase)
27
27
  )
28
28
  pip_timeout: int = Field(default=15, description="Timeout for pip installs in seconds.")
29
29
  pip_retries: int = Field(default=5, description="Number of retries for pip installs.")
30
+ is_notebook_execution_environment: bool = Field(
31
+ default=False,
32
+ description="Indication of whether the code is running in a notebook execution environment.",
33
+ examples=[True, False],
34
+ )
30
35
 
31
36
  @property
32
37
  def isb_mesh_service(self) -> MeshService:
@@ -0,0 +1,70 @@
1
+ """Utilities for working with the notebook execution environment."""
2
+
3
+ import time
4
+ from pathlib import Path
5
+ from uuid import uuid4
6
+
7
+ import nbformat
8
+ from nbconvert import MarkdownExporter
9
+
10
+
11
+ def wait_for_notebook_save(
12
+ notebook_path: Path,
13
+ sentinel: str | None = None,
14
+ attempts: int = 120,
15
+ delay_s: int = 1,
16
+ ) -> None:
17
+ """
18
+ Wait until the notebook on disk contains the sentinel output.
19
+
20
+ Parameters
21
+ ----------
22
+ notebook_path
23
+ The path to the notebook file to check for the sentinel output.
24
+
25
+ sentinel
26
+ The string to search for in the notebook outputs to confirm that the notebook has been saved with the expected output.
27
+
28
+ attempts
29
+ The number of times to check the notebook for the sentinel output before giving up and raising a
30
+ TimeoutError.
31
+
32
+ delay_s
33
+ The number of seconds to wait between attempts to check the notebook for the sentinel output.
34
+
35
+ Returns
36
+ -------
37
+ None if the sentinel is found within the notebook outputs, otherwise raises a TimeoutError after the specified number of attempts.
38
+ """
39
+ sentinel = sentinel or uuid4().hex
40
+
41
+ print(f"Waiting for notebook to save with sentinel '{sentinel}'...", flush=True)
42
+
43
+ for _ in range(attempts):
44
+ text = notebook_path.read_text(encoding="utf-8")
45
+ if sentinel in text:
46
+ return
47
+ time.sleep(delay_s)
48
+
49
+ raise TimeoutError("Notebook did not save updated outputs in time")
50
+
51
+
52
+ def export_notebook_by_path(notebook_path: Path) -> bytes:
53
+ """
54
+ Export the notebook at the given path to markdown format and return the resulting bytes.
55
+
56
+ Parameters
57
+ ----------
58
+ notebook_path
59
+ The path to the notebook file to export.
60
+
61
+ Returns
62
+ -------
63
+ The exported notebook in markdown format as bytes.
64
+ """
65
+ notebook_contents = notebook_path.read_text(encoding="utf-8")
66
+ notebook = nbformat.reads(notebook_contents, as_version=nbformat.NO_CONVERT)
67
+
68
+ exporter = MarkdownExporter()
69
+ body, _ = exporter.from_notebook_node(notebook)
70
+ return body.encode("utf-8")
@@ -0,0 +1,99 @@
1
+ """Tests for the notebook_utils module"""
2
+
3
+ from pathlib import Path
4
+ from uuid import uuid4
5
+
6
+ import nbformat
7
+ import pytest
8
+ from nbformat.v4 import new_code_cell
9
+ from nbformat.v4 import new_markdown_cell
10
+ from nbformat.v4 import new_notebook
11
+ from nbformat.v4 import new_output
12
+
13
+ from dkist_processing_core.notebook_utils import export_notebook_by_path
14
+ from dkist_processing_core.notebook_utils import wait_for_notebook_save
15
+
16
+
17
+ @pytest.fixture
18
+ def notebook_path_factory(tmp_path: Path):
19
+ def _make_notebook(*, cells=None) -> Path:
20
+ notebook_path = tmp_path / "test.ipynb"
21
+ cells = cells or []
22
+ nb = new_notebook(cells=cells)
23
+ with notebook_path.open("w", encoding="utf-8") as f:
24
+ nbformat.write(nb, f)
25
+ return notebook_path
26
+
27
+ return _make_notebook
28
+
29
+
30
+ @pytest.fixture
31
+ def notebook_sentinel() -> str:
32
+ return uuid4().hex
33
+
34
+
35
+ @pytest.fixture
36
+ def notebook_with_sentinel_output(notebook_path_factory, notebook_sentinel) -> Path:
37
+ return notebook_path_factory(
38
+ cells=[
39
+ new_markdown_cell("# Title"),
40
+ new_code_cell(
41
+ source="print('ready')",
42
+ outputs=[
43
+ new_output(
44
+ output_type="stream",
45
+ name="stdout",
46
+ text=f"before {notebook_sentinel} after",
47
+ )
48
+ ],
49
+ ),
50
+ ]
51
+ )
52
+
53
+
54
+ @pytest.fixture
55
+ def notebook_without_sentinel_output(notebook_path_factory) -> Path:
56
+ return notebook_path_factory(
57
+ cells=[
58
+ new_code_cell(
59
+ source="print('not ready')",
60
+ outputs=[
61
+ new_output(
62
+ output_type="stream",
63
+ name="stdout",
64
+ text="some other output",
65
+ )
66
+ ],
67
+ )
68
+ ]
69
+ )
70
+
71
+
72
+ def test_wait_for_notebook_save_succeeds_when_sentinel_in_code_cell_output(
73
+ notebook_with_sentinel_output, notebook_sentinel
74
+ ):
75
+ # no error raised
76
+ wait_for_notebook_save(
77
+ notebook_with_sentinel_output, sentinel=notebook_sentinel, attempts=1, delay_s=0
78
+ )
79
+
80
+
81
+ def test_wait_for_notebook_save_times_out_when_sentinel_missing(
82
+ notebook_without_sentinel_output,
83
+ ):
84
+ with pytest.raises(TimeoutError, match="Notebook did not save updated outputs in time"):
85
+ wait_for_notebook_save(notebook_without_sentinel_output, attempts=1, delay_s=0)
86
+
87
+
88
+ def test_export_notebook_by_path_returns_markdown_bytes(
89
+ notebook_with_sentinel_output, notebook_sentinel
90
+ ) -> None:
91
+ exported = export_notebook_by_path(notebook_with_sentinel_output)
92
+
93
+ assert isinstance(exported, bytes)
94
+
95
+ output = exported.decode("utf-8")
96
+
97
+ assert "# Title" in output # markdown
98
+ assert "print('ready')" in output # code
99
+ assert notebook_sentinel in output # output
@@ -9,6 +9,7 @@ from airflow import DAG
9
9
  from dkist_processing_core import ResourceQueue
10
10
  from dkist_processing_core import Workflow
11
11
  from dkist_processing_core.workflow import MAXIMUM_ALLOWED_WORKFLOW_NAME_LENGTH
12
+ from dkist_processing_core.workflow import _check_dag_name_characters
12
13
  from dkist_processing_core.workflow import workflow_name_from_details
13
14
 
14
15
 
@@ -165,9 +166,9 @@ def test_check_dag_name_characters():
165
166
  When: checking if it is a valid airflow name or not
166
167
  Then: correctly identify valid and invalid names
167
168
  """
168
- Workflow.check_dag_name_characters(dag_name="This_dag_name_is_valid")
169
+ _check_dag_name_characters(dag_name="This_dag_name_is_valid")
169
170
  with pytest.raises(ValueError):
170
- Workflow.check_dag_name_characters(dag_name="Invalid*dag*name")
171
+ _check_dag_name_characters(dag_name="Invalid*dag*name")
171
172
 
172
173
 
173
174
  @pytest.mark.parametrize(
@@ -13,7 +13,7 @@ from dkist_processing_core.node import task_type_hint
13
13
  from dkist_processing_core.node import upstreams_type_hint
14
14
  from dkist_processing_core.resource_queue import ResourceQueue
15
15
 
16
- __all__ = ["Workflow", "workflow_name_from_details"]
16
+ __all__ = ["Workflow", "workflow_name_from_details", "dag_name"]
17
17
 
18
18
  MAXIMUM_ALLOWED_WORKFLOW_NAME_LENGTH = 100
19
19
 
@@ -34,6 +34,35 @@ def workflow_name_from_details(
34
34
  return workflow_name
35
35
 
36
36
 
37
+ def _check_dag_name_characters(dag_name: str):
38
+ """
39
+ Figure out if the dag name is an Airflow-allowed name.
40
+
41
+ Can only contain
42
+ * ascii letters
43
+ * numbers
44
+ * dash (-)
45
+ * dot (.)
46
+ * underscore (_)
47
+
48
+ Raise error if non-allowed characters are found.
49
+ """
50
+ allowed_chars = [c for c in string.ascii_letters] + ["-", ".", "_"] + [n for n in string.digits]
51
+ if not all([char in allowed_chars for char in dag_name]):
52
+ raise ValueError(
53
+ f"Dag name {dag_name} contains invalid characters. "
54
+ f"Only ascii letters and the dash, dot, and "
55
+ f"underscore symbols are permitted."
56
+ )
57
+
58
+
59
+ def dag_name(workflow_name: str, workflow_version: str) -> str:
60
+ """Return the dag name created from its constituent parts."""
61
+ result = f"{workflow_name}_{workflow_version}"
62
+ _check_dag_name_characters(result) # raise an error if in valid
63
+ return result
64
+
65
+
37
66
  class Workflow:
38
67
  """
39
68
  Abstraction to create a workflow in 1 or more target execution environment.
@@ -116,33 +145,7 @@ class Workflow:
116
145
  @property
117
146
  def dag_name(self) -> str:
118
147
  """Return the dag name created from its constituent parts."""
119
- result = f"{self.workflow_name}_{self.workflow_version}"
120
- self.check_dag_name_characters(result) # raise an error if in valid
121
- return result
122
-
123
- @staticmethod
124
- def check_dag_name_characters(dag_name: str):
125
- """
126
- Figure out if the dag name is an Airflow-allowed name.
127
-
128
- Can only contain
129
- * ascii letters
130
- * numbers
131
- * dash (-)
132
- * dot (.)
133
- * underscore (_)
134
-
135
- Raise error if non-allowed characters are found.
136
- """
137
- allowed_chars = (
138
- [c for c in string.ascii_letters] + ["-", ".", "_"] + [n for n in string.digits]
139
- )
140
- if not all([char in allowed_chars for char in dag_name]):
141
- raise ValueError(
142
- f"Dag name {dag_name} contains invalid characters. "
143
- f"Only ascii letters and the dash, dot, and "
144
- f"underscore symbols are permitted."
145
- )
148
+ return dag_name(workflow_name=self.workflow_name, workflow_version=self.workflow_version)
146
149
 
147
150
  @property
148
151
  def dag_tags(self) -> str:
@@ -223,7 +226,7 @@ class Workflow:
223
226
  f.write("\n")
224
227
  return workflow_py
225
228
 
226
- def export_notebook(self, path: str | Path | None = None):
229
+ def export_notebook(self, path: str | Path | None = None) -> Path:
227
230
  """Render the workflow as a jupyter notebook."""
228
231
  path = path or "notebooks/"
229
232
  path = Path(path)
@@ -238,7 +241,7 @@ class Workflow:
238
241
  nbf.write(nb, f)
239
242
  return notebook_ipynb
240
243
 
241
- def topological_sort(self) -> [Node]:
244
+ def topological_sort(self) -> list[Node]:
242
245
  """Use a topological sort to find a valid linear order for task execution."""
243
246
  node_task_names = {node.task.__name__: node for node in self.nodes}
244
247
  node_upstream_tasks = {node.task: node.upstreams for node in self.nodes}
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dkist-processing-core
3
- Version: 7.2.1rc1
3
+ Version: 7.2.2rc2
4
4
  Summary: Abstraction layer used by the DKIST science data processing pipelines with Apache Airflow
5
5
  Author-email: NSO / AURA <dkistdc@nso.edu>
6
6
  License: BSD-3-Clause
@@ -18,6 +18,8 @@ Requires-Dist: requests>=2.23
18
18
  Requires-Dist: talus<2.0,>=1.3.4
19
19
  Requires-Dist: pendulum
20
20
  Requires-Dist: nbformat>=5.9.2
21
+ Requires-Dist: notebook<8.0,>=7.5.5
22
+ Requires-Dist: nbconvert<8.0,>=7.16.6
21
23
  Requires-Dist: dkist-service-configuration<5.0,>=4.3.0
22
24
  Requires-Dist: pydantic>2.0
23
25
  Provides-Extra: test
@@ -7,12 +7,16 @@ README.rst
7
7
  bitbucket-pipelines.yml
8
8
  pyproject.toml
9
9
  changelog/.gitempty
10
- changelog/75.misc.rst
10
+ changelog/72.misc.rst
11
+ changelog/76.feature.1.rst
12
+ changelog/76.feature.2.rst
13
+ changelog/77.misc.rst
11
14
  dkist_processing_core/__init__.py
12
15
  dkist_processing_core/build_utils.py
13
16
  dkist_processing_core/config.py
14
17
  dkist_processing_core/failure_callback.py
15
18
  dkist_processing_core/node.py
19
+ dkist_processing_core/notebook_utils.py
16
20
  dkist_processing_core/resource_queue.py
17
21
  dkist_processing_core/task.py
18
22
  dkist_processing_core/workflow.py
@@ -28,6 +32,7 @@ dkist_processing_core/tests/test_build_utils.py
28
32
  dkist_processing_core/tests/test_export.py
29
33
  dkist_processing_core/tests/test_failure_callback.py
30
34
  dkist_processing_core/tests/test_node.py
35
+ dkist_processing_core/tests/test_notebook_utils.py
31
36
  dkist_processing_core/tests/test_task.py
32
37
  dkist_processing_core/tests/test_workflow.py
33
38
  dkist_processing_core/tests/invalid_workflow_cyclic/__init__.py
@@ -3,6 +3,8 @@ requests>=2.23
3
3
  talus<2.0,>=1.3.4
4
4
  pendulum
5
5
  nbformat>=5.9.2
6
+ notebook<8.0,>=7.5.5
7
+ nbconvert<8.0,>=7.16.6
6
8
  dkist-service-configuration<5.0,>=4.3.0
7
9
  pydantic>2.0
8
10
 
@@ -26,6 +26,8 @@ dependencies = [
26
26
  "talus >= 1.3.4, <2.0",
27
27
  "pendulum",
28
28
  "nbformat >= 5.9.2",
29
+ "notebook >= 7.5.5, < 8.0",
30
+ "nbconvert >= 7.16.6, < 8.0",
29
31
  "dkist-service-configuration >=4.3.0, <5.0",
30
32
  "pydantic > 2.0",
31
33
  ]
@@ -63,6 +65,10 @@ markers = [
63
65
  "development: For tests that can only be run while developing with a sidecar proxy (as opposed to in bitbucket pipelines)",
64
66
  "long: For tests that take a long time to run",
65
67
  ]
68
+ filterwarnings = [
69
+ # Turn all warnings into errors so they do not pass silently.
70
+ "error",
71
+ ]
66
72
 
67
73
  [tool.coverage.run]
68
74
  omit = [
@@ -1 +0,0 @@
1
- Upgrade Read the Docs LTS build image to Ubuntu 24.04.