dkist-processing-core 7.2.2rc1__tar.gz → 7.3.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/CHANGELOG.rst +25 -0
  2. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/PKG-INFO +4 -5
  3. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/README.rst +2 -0
  4. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/dkist_processing_core/build_utils.py +1 -4
  5. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/dkist_processing_core/notebook_utils.py +21 -27
  6. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/dkist_processing_core/tests/test_build_utils.py +1 -1
  7. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/dkist_processing_core/tests/test_notebook_utils.py +16 -7
  8. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/dkist_processing_core/workflow.py +7 -10
  9. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/dkist_processing_core.egg-info/PKG-INFO +4 -5
  10. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/dkist_processing_core.egg-info/SOURCES.txt +0 -4
  11. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/dkist_processing_core.egg-info/requires.txt +1 -4
  12. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/pyproject.toml +1 -4
  13. dkist_processing_core-7.2.2rc1/changelog/72.misc.rst +0 -1
  14. dkist_processing_core-7.2.2rc1/changelog/76.feature.1.rst +0 -1
  15. dkist_processing_core-7.2.2rc1/changelog/76.feature.2.rst +0 -1
  16. dkist_processing_core-7.2.2rc1/changelog/77.misc.rst +0 -1
  17. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/.gitignore +0 -0
  18. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/.pre-commit-config.yaml +0 -0
  19. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/.readthedocs.yml +0 -0
  20. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/.snyk +0 -0
  21. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/bitbucket-pipelines.yml +0 -0
  22. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/changelog/.gitempty +0 -0
  23. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/dkist_processing_core/__init__.py +0 -0
  24. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/dkist_processing_core/config.py +0 -0
  25. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/dkist_processing_core/failure_callback.py +0 -0
  26. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/dkist_processing_core/node.py +0 -0
  27. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/dkist_processing_core/resource_queue.py +0 -0
  28. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/dkist_processing_core/task.py +0 -0
  29. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/dkist_processing_core/tests/__init__.py +0 -0
  30. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/dkist_processing_core/tests/conftest.py +0 -0
  31. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/dkist_processing_core/tests/invalid_workflow_cyclic/__init__.py +0 -0
  32. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/dkist_processing_core/tests/invalid_workflow_cyclic/workflow.py +0 -0
  33. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/dkist_processing_core/tests/invalid_workflow_for_docker_multi_category/__init__.py +0 -0
  34. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/dkist_processing_core/tests/invalid_workflow_for_docker_multi_category/workflow.py +0 -0
  35. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/dkist_processing_core/tests/task_example.py +0 -0
  36. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/dkist_processing_core/tests/test_export.py +0 -0
  37. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/dkist_processing_core/tests/test_failure_callback.py +0 -0
  38. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/dkist_processing_core/tests/test_node.py +0 -0
  39. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/dkist_processing_core/tests/test_task.py +0 -0
  40. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/dkist_processing_core/tests/test_workflow.py +0 -0
  41. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/dkist_processing_core/tests/valid_workflow_package/__init__.py +0 -0
  42. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/dkist_processing_core/tests/valid_workflow_package/workflow.py +0 -0
  43. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/dkist_processing_core/tests/zero_node_workflow_package/__init__.py +0 -0
  44. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/dkist_processing_core/tests/zero_node_workflow_package/workflow.py +0 -0
  45. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/dkist_processing_core.egg-info/dependency_links.txt +0 -0
  46. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/dkist_processing_core.egg-info/top_level.txt +0 -0
  47. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/docs/Makefile +0 -0
  48. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/docs/auto-proc-concept-model.png +0 -0
  49. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/docs/auto_proc_brick.png +0 -0
  50. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/docs/automated-processing-deployed.png +0 -0
  51. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/docs/changelog.rst +0 -0
  52. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/docs/conf.py +0 -0
  53. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/docs/index.rst +0 -0
  54. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/docs/landing_page.rst +0 -0
  55. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/docs/make.bat +0 -0
  56. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/docs/requirements.txt +0 -0
  57. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/licenses/LICENSE.rst +0 -0
  58. {dkist_processing_core-7.2.2rc1 → dkist_processing_core-7.3.0}/setup.cfg +0 -0
@@ -1,3 +1,28 @@
1
+ v7.3.0 (2026-05-06)
2
+ ===================
3
+
4
+ Features
5
+ --------
6
+
7
+ - Add utility for working with the notebook build target which supports waiting for a notebook to autosave. (`#76 <https://bitbucket.org/dkistdc/dkist-processing-core/pull-requests/76>`__)
8
+ - Add utility for working with the notebook build target which supports exporting a notebook as markdown. (`#76 <https://bitbucket.org/dkistdc/dkist-processing-core/pull-requests/76>`__)
9
+
10
+
11
+ Bugfixes
12
+ --------
13
+
14
+ - Prevent DAG version churn caused by dynamic `start_date` values by ensuring DAG metadata is deterministic. (`#78 <https://bitbucket.org/dkistdc/dkist-processing-core/pull-requests/78>`__)
15
+
16
+
17
+ Misc
18
+ ----
19
+
20
+ - Add 'IS_NOTEBOOK_EXECUTION_ENVIRONMENT' as an environment variable to indicate when the execution environment is a Jupyter notebook. (`#72 <https://bitbucket.org/dkistdc/dkist-processing-core/pull-requests/72>`__)
21
+ - Refactor the dag naming function to be accessible outside of the context of a Workflow instance. (`#77 <https://bitbucket.org/dkistdc/dkist-processing-core/pull-requests/77>`__)
22
+ - Address deprecated import of airflow.exceptions.DuplicateTaskIdFound using airflow.sdk.exceptions.DuplicateTaskIdFound instead. (`#79 <https://bitbucket.org/dkistdc/dkist-processing-core/pull-requests/79>`__)
23
+ - Upgrade `apache-airflow` to version 3.2.1. (`#79 <https://bitbucket.org/dkistdc/dkist-processing-core/pull-requests/79>`__)
24
+
25
+
1
26
  v7.2.1 (2026-04-16)
2
27
  ===================
3
28
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dkist-processing-core
3
- Version: 7.2.2rc1
3
+ Version: 7.3.0
4
4
  Summary: Abstraction layer used by the DKIST science data processing pipelines with Apache Airflow
5
5
  Author-email: NSO / AURA <dkistdc@nso.edu>
6
6
  License: BSD-3-Clause
@@ -13,10 +13,9 @@ Classifier: Programming Language :: Python :: 3
13
13
  Classifier: Programming Language :: Python :: 3.13
14
14
  Requires-Python: >=3.13
15
15
  Description-Content-Type: text/x-rst
16
- Requires-Dist: apache-airflow[celery,postgres]==3.1.8
16
+ Requires-Dist: apache-airflow[celery,postgres]==3.2.1
17
17
  Requires-Dist: requests>=2.23
18
18
  Requires-Dist: talus<2.0,>=1.3.4
19
- Requires-Dist: pendulum
20
19
  Requires-Dist: nbformat>=5.9.2
21
20
  Requires-Dist: notebook<8.0,>=7.5.5
22
21
  Requires-Dist: nbconvert<8.0,>=7.16.6
@@ -28,8 +27,6 @@ Requires-Dist: pytest-cov; extra == "test"
28
27
  Requires-Dist: pytest-mock; extra == "test"
29
28
  Requires-Dist: jinja2; extra == "test"
30
29
  Requires-Dist: towncrier; extra == "test"
31
- Requires-Dist: nbconvert; extra == "test"
32
- Requires-Dist: ipython; extra == "test"
33
30
  Provides-Extra: docs
34
31
  Requires-Dist: sphinx; extra == "docs"
35
32
  Requires-Dist: sphinx-astropy; extra == "docs"
@@ -178,6 +175,8 @@ Environment Variables
178
175
  - annotation=int required=False default=15 description='Timeout for pip installs in seconds.'
179
176
  * - PIP_RETRIES
180
177
  - annotation=int required=False default=5 description='Number of retries for pip installs.'
178
+ * - IS_NOTEBOOK_EXECUTION_ENVIRONMENT
179
+ - annotation=bool required=False default=False description='Indication of whether the code is running in a notebook execution environment.' examples=[True, False]
181
180
 
182
181
  Development
183
182
  -----------
@@ -135,6 +135,8 @@ Environment Variables
135
135
  - annotation=int required=False default=15 description='Timeout for pip installs in seconds.'
136
136
  * - PIP_RETRIES
137
137
  - annotation=int required=False default=5 description='Number of retries for pip installs.'
138
+ * - IS_NOTEBOOK_EXECUTION_ENVIRONMENT
139
+ - annotation=bool required=False default=False description='Indication of whether the code is running in a notebook execution environment.' examples=[True, False]
138
140
 
139
141
  Development
140
142
  -----------
@@ -119,10 +119,7 @@ class NotebookDockerfile:
119
119
  "COPY . /app",
120
120
  "WORKDIR /app",
121
121
  "RUN python -m pip install -U pip",
122
- "RUN pip install notebook",
123
- "RUN pip freeze | grep notebook= > constraints.txt",
124
- "RUN cat constraints.txt",
125
- "RUN python -m pip install -c constraints.txt .",
122
+ "RUN python -m pip install .",
126
123
  ]
127
124
 
128
125
  @property
@@ -2,19 +2,18 @@
2
2
 
3
3
  import time
4
4
  from pathlib import Path
5
+ from uuid import uuid4
5
6
 
6
7
  import nbformat
7
8
  from nbconvert import MarkdownExporter
8
9
 
9
- DEFAULT_EXPORT_SENTINEL = "MPW_EXPORT_READY"
10
-
11
10
 
12
11
  def wait_for_notebook_save(
13
- notebook_path: Path | str,
14
- sentinel: str = DEFAULT_EXPORT_SENTINEL,
15
- attempts=120,
16
- delay_s=1,
17
- ):
12
+ notebook_path: Path,
13
+ sentinel: str | None = None,
14
+ attempts: int = 120,
15
+ delay_s: int = 1,
16
+ ) -> None:
18
17
  """
19
18
  Wait until the notebook on disk contains the sentinel output.
20
19
 
@@ -37,28 +36,20 @@ def wait_for_notebook_save(
37
36
  -------
38
37
  None if the sentinel is found within the notebook outputs, otherwise raises a TimeoutError after the specified number of attempts.
39
38
  """
39
+ sentinel = sentinel or uuid4().hex
40
+
40
41
  print(f"Waiting for notebook to save with sentinel '{sentinel}'...", flush=True)
41
- for attempt in range(attempts):
42
- with open(notebook_path, "r", encoding="utf-8") as f:
43
- nb = nbformat.read(f, as_version=nbformat.NO_CONVERT)
44
-
45
- # Search outputs
46
- for cell in nb.cells:
47
- if cell.cell_type == "code":
48
- for output in cell.get("outputs", []):
49
- text = ""
50
-
51
- if output.output_type == "stream":
52
- text = output.get("text", "")
53
- elif output.output_type in ("execute_result", "display_data"):
54
- text = str(output.get("data", ""))
55
- if sentinel in text:
56
- return
42
+
43
+ for _ in range(attempts):
44
+ text = notebook_path.read_text(encoding="utf-8")
45
+ if sentinel in text:
46
+ return
57
47
  time.sleep(delay_s)
48
+
58
49
  raise TimeoutError("Notebook did not save updated outputs in time")
59
50
 
60
51
 
61
- def export_notebook_by_path(notebook_path) -> bytes:
52
+ def export_notebook_by_path(notebook_path: Path) -> bytes:
62
53
  """
63
54
  Export the notebook at the given path to markdown format and return the resulting bytes.
64
55
 
@@ -66,10 +57,13 @@ def export_notebook_by_path(notebook_path) -> bytes:
66
57
  ----------
67
58
  notebook_path
68
59
  The path to the notebook file to export.
60
+
61
+ Returns
62
+ -------
63
+ The exported notebook in markdown format as bytes.
69
64
  """
70
- with open(notebook_path, "r", encoding="utf-8") as f:
71
- nb = nbformat.read(f, as_version=4)
65
+ notebook = nbformat.read(notebook_path, as_version=nbformat.NO_CONVERT)
72
66
 
73
67
  exporter = MarkdownExporter()
74
- body, _ = exporter.from_notebook_node(nb)
68
+ body, _ = exporter.from_notebook_node(notebook)
75
69
  return body.encode("utf-8")
@@ -7,7 +7,7 @@ from shutil import rmtree
7
7
 
8
8
  import pytest
9
9
  from airflow.exceptions import AirflowException
10
- from airflow.exceptions import DuplicateTaskIdFound
10
+ from airflow.sdk.exceptions import DuplicateTaskIdFound
11
11
 
12
12
  from dkist_processing_core.build_utils import export_dags
13
13
  from dkist_processing_core.build_utils import export_notebook_dockerfile
@@ -1,6 +1,7 @@
1
1
  """Tests for the notebook_utils module"""
2
2
 
3
3
  from pathlib import Path
4
+ from uuid import uuid4
4
5
 
5
6
  import nbformat
6
7
  import pytest
@@ -9,7 +10,6 @@ from nbformat.v4 import new_markdown_cell
9
10
  from nbformat.v4 import new_notebook
10
11
  from nbformat.v4 import new_output
11
12
 
12
- from dkist_processing_core.notebook_utils import DEFAULT_EXPORT_SENTINEL
13
13
  from dkist_processing_core.notebook_utils import export_notebook_by_path
14
14
  from dkist_processing_core.notebook_utils import wait_for_notebook_save
15
15
 
@@ -28,7 +28,12 @@ def notebook_path_factory(tmp_path: Path):
28
28
 
29
29
 
30
30
  @pytest.fixture
31
- def notebook_with_sentinel_output(notebook_path_factory) -> Path:
31
+ def notebook_sentinel() -> str:
32
+ return uuid4().hex
33
+
34
+
35
+ @pytest.fixture
36
+ def notebook_with_sentinel_output(notebook_path_factory, notebook_sentinel) -> Path:
32
37
  return notebook_path_factory(
33
38
  cells=[
34
39
  new_markdown_cell("# Title"),
@@ -38,7 +43,7 @@ def notebook_with_sentinel_output(notebook_path_factory) -> Path:
38
43
  new_output(
39
44
  output_type="stream",
40
45
  name="stdout",
41
- text=f"before {DEFAULT_EXPORT_SENTINEL} after",
46
+ text=f"before {notebook_sentinel} after",
42
47
  )
43
48
  ],
44
49
  ),
@@ -65,10 +70,12 @@ def notebook_without_sentinel_output(notebook_path_factory) -> Path:
65
70
 
66
71
 
67
72
  def test_wait_for_notebook_save_succeeds_when_sentinel_in_code_cell_output(
68
- notebook_with_sentinel_output,
73
+ notebook_with_sentinel_output, notebook_sentinel
69
74
  ):
70
75
  # no error raised
71
- wait_for_notebook_save(notebook_with_sentinel_output, attempts=1, delay_s=0)
76
+ wait_for_notebook_save(
77
+ notebook_with_sentinel_output, sentinel=notebook_sentinel, attempts=1, delay_s=0
78
+ )
72
79
 
73
80
 
74
81
  def test_wait_for_notebook_save_times_out_when_sentinel_missing(
@@ -78,7 +85,9 @@ def test_wait_for_notebook_save_times_out_when_sentinel_missing(
78
85
  wait_for_notebook_save(notebook_without_sentinel_output, attempts=1, delay_s=0)
79
86
 
80
87
 
81
- def test_export_notebook_by_path_returns_markdown_bytes(notebook_with_sentinel_output) -> None:
88
+ def test_export_notebook_by_path_returns_markdown_bytes(
89
+ notebook_with_sentinel_output, notebook_sentinel
90
+ ) -> None:
82
91
  exported = export_notebook_by_path(notebook_with_sentinel_output)
83
92
 
84
93
  assert isinstance(exported, bytes)
@@ -87,4 +96,4 @@ def test_export_notebook_by_path_returns_markdown_bytes(notebook_with_sentinel_o
87
96
 
88
97
  assert "# Title" in output # markdown
89
98
  assert "print('ready')" in output # code
90
- assert DEFAULT_EXPORT_SENTINEL in output # output
99
+ assert notebook_sentinel in output # output
@@ -13,7 +13,7 @@ from dkist_processing_core.node import task_type_hint
13
13
  from dkist_processing_core.node import upstreams_type_hint
14
14
  from dkist_processing_core.resource_queue import ResourceQueue
15
15
 
16
- __all__ = ["Workflow", "workflow_name_from_details"]
16
+ __all__ = ["Workflow", "workflow_name_from_details", "dag_name"]
17
17
 
18
18
  MAXIMUM_ALLOWED_WORKFLOW_NAME_LENGTH = 100
19
19
 
@@ -47,12 +47,11 @@ def _check_dag_name_characters(dag_name: str):
47
47
 
48
48
  Raise error if non-allowed characters are found.
49
49
  """
50
- allowed_chars = [c for c in string.ascii_letters] + ["-", ".", "_"] + [n for n in string.digits]
50
+ allowed_special_characters = "-._"
51
+ allowed_chars = string.ascii_letters + string.digits + allowed_special_characters
51
52
  if not all([char in allowed_chars for char in dag_name]):
52
53
  raise ValueError(
53
- f"Dag name {dag_name} contains invalid characters. "
54
- f"Only ascii letters and the dash, dot, and "
55
- f"underscore symbols are permitted."
54
+ f"Dag name {dag_name} contains invalid characters. Only {allowed_chars} are permitted."
56
55
  )
57
56
 
58
57
 
@@ -163,11 +162,10 @@ class Workflow:
163
162
  @property
164
163
  def dag_definition(self) -> str:
165
164
  """Return the string representation of the DAG object instantiation."""
166
- return f"DAG(dag_id='{self.dag_name}', start_date=pendulum.today('UTC').add(days=-2), schedule=None, catchup=False, tags={self.dag_tags})"
165
+ return f"DAG(dag_id='{self.dag_name}', schedule=None, catchup=False, tags={self.dag_tags})"
167
166
 
168
167
  def initialize_local_dag(self):
169
168
  """Create a local instance of the DAG object."""
170
- import pendulum
171
169
  from airflow import DAG
172
170
 
173
171
  return eval(self.dag_definition)
@@ -226,7 +224,7 @@ class Workflow:
226
224
  f.write("\n")
227
225
  return workflow_py
228
226
 
229
- def export_notebook(self, path: str | Path | None = None):
227
+ def export_notebook(self, path: str | Path | None = None) -> Path:
230
228
  """Render the workflow as a jupyter notebook."""
231
229
  path = path or "notebooks/"
232
230
  path = Path(path)
@@ -241,7 +239,7 @@ class Workflow:
241
239
  nbf.write(nb, f)
242
240
  return notebook_ipynb
243
241
 
244
- def topological_sort(self) -> [Node]:
242
+ def topological_sort(self) -> list[Node]:
245
243
  """Use a topological sort to find a valid linear order for task execution."""
246
244
  node_task_names = {node.task.__name__: node for node in self.nodes}
247
245
  node_upstream_tasks = {node.task: node.upstreams for node in self.nodes}
@@ -258,7 +256,6 @@ class Workflow:
258
256
  "",
259
257
  "from airflow import DAG",
260
258
  "from airflow.providers.standard.operators.bash import BashOperator",
261
- "import pendulum",
262
259
  "",
263
260
  "from dkist_processing_core.failure_callback import chat_ops_notification",
264
261
  "",
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dkist-processing-core
3
- Version: 7.2.2rc1
3
+ Version: 7.3.0
4
4
  Summary: Abstraction layer used by the DKIST science data processing pipelines with Apache Airflow
5
5
  Author-email: NSO / AURA <dkistdc@nso.edu>
6
6
  License: BSD-3-Clause
@@ -13,10 +13,9 @@ Classifier: Programming Language :: Python :: 3
13
13
  Classifier: Programming Language :: Python :: 3.13
14
14
  Requires-Python: >=3.13
15
15
  Description-Content-Type: text/x-rst
16
- Requires-Dist: apache-airflow[celery,postgres]==3.1.8
16
+ Requires-Dist: apache-airflow[celery,postgres]==3.2.1
17
17
  Requires-Dist: requests>=2.23
18
18
  Requires-Dist: talus<2.0,>=1.3.4
19
- Requires-Dist: pendulum
20
19
  Requires-Dist: nbformat>=5.9.2
21
20
  Requires-Dist: notebook<8.0,>=7.5.5
22
21
  Requires-Dist: nbconvert<8.0,>=7.16.6
@@ -28,8 +27,6 @@ Requires-Dist: pytest-cov; extra == "test"
28
27
  Requires-Dist: pytest-mock; extra == "test"
29
28
  Requires-Dist: jinja2; extra == "test"
30
29
  Requires-Dist: towncrier; extra == "test"
31
- Requires-Dist: nbconvert; extra == "test"
32
- Requires-Dist: ipython; extra == "test"
33
30
  Provides-Extra: docs
34
31
  Requires-Dist: sphinx; extra == "docs"
35
32
  Requires-Dist: sphinx-astropy; extra == "docs"
@@ -178,6 +175,8 @@ Environment Variables
178
175
  - annotation=int required=False default=15 description='Timeout for pip installs in seconds.'
179
176
  * - PIP_RETRIES
180
177
  - annotation=int required=False default=5 description='Number of retries for pip installs.'
178
+ * - IS_NOTEBOOK_EXECUTION_ENVIRONMENT
179
+ - annotation=bool required=False default=False description='Indication of whether the code is running in a notebook execution environment.' examples=[True, False]
181
180
 
182
181
  Development
183
182
  -----------
@@ -7,10 +7,6 @@ README.rst
7
7
  bitbucket-pipelines.yml
8
8
  pyproject.toml
9
9
  changelog/.gitempty
10
- changelog/72.misc.rst
11
- changelog/76.feature.1.rst
12
- changelog/76.feature.2.rst
13
- changelog/77.misc.rst
14
10
  dkist_processing_core/__init__.py
15
11
  dkist_processing_core/build_utils.py
16
12
  dkist_processing_core/config.py
@@ -1,7 +1,6 @@
1
- apache-airflow[celery,postgres]==3.1.8
1
+ apache-airflow[celery,postgres]==3.2.1
2
2
  requests>=2.23
3
3
  talus<2.0,>=1.3.4
4
- pendulum
5
4
  nbformat>=5.9.2
6
5
  notebook<8.0,>=7.5.5
7
6
  nbconvert<8.0,>=7.16.6
@@ -25,5 +24,3 @@ pytest-cov
25
24
  pytest-mock
26
25
  jinja2
27
26
  towncrier
28
- nbconvert
29
- ipython
@@ -21,10 +21,9 @@ classifiers = [
21
21
  "Programming Language :: Python :: 3.13",
22
22
  ]
23
23
  dependencies = [
24
- "apache-airflow[postgres, celery] == 3.1.8",
24
+ "apache-airflow[postgres, celery] == 3.2.1",
25
25
  "requests >= 2.23",
26
26
  "talus >= 1.3.4, <2.0",
27
- "pendulum",
28
27
  "nbformat >= 5.9.2",
29
28
  "notebook >= 7.5.5, < 8.0",
30
29
  "nbconvert >= 7.16.6, < 8.0",
@@ -45,8 +44,6 @@ test = [
45
44
  "pytest-mock",
46
45
  "jinja2",
47
46
  "towncrier",
48
- "nbconvert",
49
- "ipython",
50
47
  ]
51
48
  docs = [
52
49
  "sphinx",
@@ -1 +0,0 @@
1
- Add an environment variable to indicate when the execution environment is a Jupyter notebook.
@@ -1 +0,0 @@
1
- Add utility for working with the notebook build target which supports waiting for a notebook to autosave.
@@ -1 +0,0 @@
1
- Add utility for working with the notebook build target which supports exporting a notebook as markdown.
@@ -1 +0,0 @@
1
- Refactor the dag naming function to be accessible outside of the context of a Workflow instance.