dagstermill 0.20.2__tar.gz → 0.27.9__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. {dagstermill-0.20.2 → dagstermill-0.27.9}/LICENSE +1 -1
  2. dagstermill-0.27.9/PKG-INFO +35 -0
  3. dagstermill-0.27.9/README.md +4 -0
  4. dagstermill-0.27.9/dagstermill/__init__.py +27 -0
  5. dagstermill-0.27.9/dagstermill/__main__.py +3 -0
  6. {dagstermill-0.20.2 → dagstermill-0.27.9}/dagstermill/asset_factory.py +27 -20
  7. {dagstermill-0.20.2 → dagstermill-0.27.9}/dagstermill/cli.py +6 -7
  8. {dagstermill-0.20.2 → dagstermill-0.27.9}/dagstermill/context.py +15 -11
  9. {dagstermill-0.20.2 → dagstermill-0.27.9}/dagstermill/engine.py +2 -4
  10. {dagstermill-0.20.2 → dagstermill-0.27.9}/dagstermill/errors.py +1 -1
  11. dagstermill-0.27.9/dagstermill/examples/__init__.py +1 -0
  12. {dagstermill-0.20.2 → dagstermill-0.27.9}/dagstermill/examples/repository.py +8 -5
  13. {dagstermill-0.20.2 → dagstermill-0.27.9}/dagstermill/factory.py +32 -28
  14. {dagstermill-0.20.2 → dagstermill-0.27.9}/dagstermill/io_managers.py +7 -3
  15. {dagstermill-0.20.2 → dagstermill-0.27.9}/dagstermill/manager.py +31 -20
  16. {dagstermill-0.20.2 → dagstermill-0.27.9}/dagstermill/serialize.py +2 -2
  17. dagstermill-0.27.9/dagstermill/test_utils.py +45 -0
  18. {dagstermill-0.20.2 → dagstermill-0.27.9}/dagstermill/translator.py +3 -3
  19. dagstermill-0.27.9/dagstermill/version.py +1 -0
  20. dagstermill-0.27.9/dagstermill.egg-info/PKG-INFO +35 -0
  21. {dagstermill-0.20.2 → dagstermill-0.27.9}/dagstermill.egg-info/SOURCES.txt +1 -0
  22. {dagstermill-0.20.2 → dagstermill-0.27.9}/dagstermill.egg-info/requires.txt +1 -1
  23. {dagstermill-0.20.2 → dagstermill-0.27.9}/setup.py +5 -6
  24. dagstermill-0.20.2/PKG-INFO +0 -15
  25. dagstermill-0.20.2/README.md +0 -4
  26. dagstermill-0.20.2/dagstermill/__init__.py +0 -26
  27. dagstermill-0.20.2/dagstermill/__main__.py +0 -3
  28. dagstermill-0.20.2/dagstermill/examples/__init__.py +0 -1
  29. dagstermill-0.20.2/dagstermill/version.py +0 -1
  30. dagstermill-0.20.2/dagstermill.egg-info/PKG-INFO +0 -15
  31. {dagstermill-0.20.2 → dagstermill-0.27.9}/MANIFEST.in +0 -0
  32. {dagstermill-0.20.2 → dagstermill-0.27.9}/dagstermill/compat.py +0 -0
  33. {dagstermill-0.20.2 → dagstermill-0.27.9}/dagstermill/py.typed +0 -0
  34. {dagstermill-0.20.2 → dagstermill-0.27.9}/dagstermill.egg-info/dependency_links.txt +0 -0
  35. {dagstermill-0.20.2 → dagstermill-0.27.9}/dagstermill.egg-info/entry_points.txt +0 -0
  36. {dagstermill-0.20.2 → dagstermill-0.27.9}/dagstermill.egg-info/top_level.txt +0 -0
  37. {dagstermill-0.20.2 → dagstermill-0.27.9}/setup.cfg +0 -0
@@ -186,7 +186,7 @@
186
186
  same "printed page" as the copyright notice for easier
187
187
  identification within third-party archives.
188
188
 
189
- Copyright 2023 Elementl, Inc.
189
+ Copyright 2025 Dagster Labs, Inc.
190
190
 
191
191
  Licensed under the Apache License, Version 2.0 (the "License");
192
192
  you may not use this file except in compliance with the License.
@@ -0,0 +1,35 @@
1
+ Metadata-Version: 2.4
2
+ Name: dagstermill
3
+ Version: 0.27.9
4
+ Summary: run notebooks using the Dagster tools
5
+ Author: Dagster Labs
6
+ Author-email: hello@dagsterlabs.com
7
+ License: Apache-2.0
8
+ Classifier: Programming Language :: Python :: 3.9
9
+ Classifier: Programming Language :: Python :: 3.10
10
+ Classifier: Programming Language :: Python :: 3.11
11
+ Classifier: License :: OSI Approved :: Apache Software License
12
+ Classifier: Operating System :: OS Independent
13
+ Requires-Python: >=3.9,<3.14
14
+ License-File: LICENSE
15
+ Requires-Dist: dagster==1.11.9
16
+ Requires-Dist: ipykernel!=5.4.0,!=5.4.1,>=4.9.0
17
+ Requires-Dist: ipython_genutils>=0.2.0
18
+ Requires-Dist: packaging>=20.9
19
+ Requires-Dist: papermill>=1.0.0
20
+ Requires-Dist: scrapbook>=0.5.0
21
+ Requires-Dist: nbconvert
22
+ Requires-Dist: jupyter-client<8
23
+ Provides-Extra: test
24
+ Requires-Dist: matplotlib; extra == "test"
25
+ Requires-Dist: scikit-learn>=0.19.0; extra == "test"
26
+ Requires-Dist: tqdm<=4.48; extra == "test"
27
+ Dynamic: author
28
+ Dynamic: author-email
29
+ Dynamic: classifier
30
+ Dynamic: license
31
+ Dynamic: license-file
32
+ Dynamic: provides-extra
33
+ Dynamic: requires-dist
34
+ Dynamic: requires-python
35
+ Dynamic: summary
@@ -0,0 +1,4 @@
1
+ # dagstermill
2
+
3
+ The docs for `dagstermill` can be found
4
+ [here](https://docs.dagster.io/api/python-api/libraries/dagstermill).
@@ -0,0 +1,27 @@
1
+ from dagster_shared.libraries import DagsterLibraryRegistry
2
+
3
+ import dagstermill.factory as factory # noqa: F401
4
+ from dagstermill.asset_factory import define_dagstermill_asset as define_dagstermill_asset
5
+ from dagstermill.context import DagstermillExecutionContext as DagstermillExecutionContext
6
+ from dagstermill.errors import DagstermillError as DagstermillError
7
+ from dagstermill.factory import define_dagstermill_op as define_dagstermill_op
8
+ from dagstermill.io_managers import (
9
+ ConfigurableLocalOutputNotebookIOManager as ConfigurableLocalOutputNotebookIOManager,
10
+ local_output_notebook_io_manager as local_output_notebook_io_manager,
11
+ )
12
+ from dagstermill.manager import MANAGER_FOR_NOTEBOOK_INSTANCE as _MANAGER_FOR_NOTEBOOK_INSTANCE
13
+ from dagstermill.version import __version__ as __version__
14
+
15
+ DagsterLibraryRegistry.register("dagstermill", __version__)
16
+
17
+ get_context = _MANAGER_FOR_NOTEBOOK_INSTANCE.get_context
18
+
19
+ yield_result = _MANAGER_FOR_NOTEBOOK_INSTANCE.yield_result
20
+
21
+ yield_event = _MANAGER_FOR_NOTEBOOK_INSTANCE.yield_event
22
+
23
+ _reconstitute_job_context = _MANAGER_FOR_NOTEBOOK_INSTANCE.reconstitute_job_context
24
+
25
+ _teardown = _MANAGER_FOR_NOTEBOOK_INSTANCE.teardown_resources
26
+
27
+ _load_input_parameter = _MANAGER_FOR_NOTEBOOK_INSTANCE.load_input_parameter
@@ -0,0 +1,3 @@
1
+ from dagstermill.cli import main
2
+
3
+ main()
@@ -1,6 +1,7 @@
1
1
  import pickle
2
2
  import tempfile
3
- from typing import Any, Callable, Iterable, Mapping, Optional, Sequence, Set, Type, Union, cast
3
+ from collections.abc import Iterable, Mapping
4
+ from typing import Any, Callable, Optional, Union, cast
4
5
 
5
6
  import dagster._check as check
6
7
  from dagster import (
@@ -16,11 +17,13 @@ from dagster import (
16
17
  SourceAsset,
17
18
  asset,
18
19
  )
20
+ from dagster._annotations import beta, beta_param
19
21
  from dagster._config.pythonic_config import Config, infer_schema_from_config_class
20
- from dagster._config.pythonic_config.utils import safe_is_subclass
22
+ from dagster._config.pythonic_config.type_check_utils import safe_is_subclass
21
23
  from dagster._core.definitions.events import CoercibleToAssetKey, CoercibleToAssetKeyPrefix
22
- from dagster._core.definitions.utils import validate_tags
23
24
  from dagster._core.execution.context.compute import OpExecutionContext
25
+ from dagster._core.storage.tags import COMPUTE_KIND_TAG
26
+ from dagster._utils.tags import normalize_tags
24
27
 
25
28
  from dagstermill.factory import _clean_path_for_windows, execute_notebook
26
29
 
@@ -67,15 +70,17 @@ def _make_dagstermill_asset_compute_fn(
67
70
  return _t_fn
68
71
 
69
72
 
73
+ @beta
74
+ @beta_param(param="resource_defs")
70
75
  def define_dagstermill_asset(
71
76
  name: str,
72
77
  notebook_path: str,
73
78
  key_prefix: Optional[CoercibleToAssetKeyPrefix] = None,
74
79
  ins: Optional[Mapping[str, AssetIn]] = None,
75
- deps: Optional[Sequence[Union[CoercibleToAssetKey, AssetsDefinition, SourceAsset]]] = None,
80
+ deps: Optional[Iterable[Union[CoercibleToAssetKey, AssetsDefinition, SourceAsset]]] = None,
76
81
  metadata: Optional[Mapping[str, Any]] = None,
77
82
  config_schema: Optional[Union[Any, Mapping[str, Any]]] = None,
78
- required_resource_keys: Optional[Set[str]] = None,
83
+ required_resource_keys: Optional[set[str]] = None,
79
84
  resource_defs: Optional[Mapping[str, ResourceDefinition]] = None,
80
85
  description: Optional[str] = None,
81
86
  partitions_def: Optional[PartitionsDefinition] = None,
@@ -84,7 +89,8 @@ def define_dagstermill_asset(
84
89
  io_manager_key: Optional[str] = None,
85
90
  retry_policy: Optional[RetryPolicy] = None,
86
91
  save_notebook_on_failure: bool = False,
87
- non_argument_deps: Optional[Union[Set[AssetKey], Set[str]]] = None,
92
+ non_argument_deps: Optional[Union[set[AssetKey], set[str]]] = None,
93
+ asset_tags: Optional[Mapping[str, Any]] = None,
88
94
  ) -> AssetsDefinition:
89
95
  """Creates a Dagster asset for a Jupyter notebook.
90
96
 
@@ -104,7 +110,7 @@ def define_dagstermill_asset(
104
110
  if it does not. If not set, Dagster will accept any config provided for the op.
105
111
  metadata (Optional[Dict[str, Any]]): A dict of metadata entries for the asset.
106
112
  required_resource_keys (Optional[Set[str]]): Set of resource handles required by the notebook.
107
- description (Optional[str]): Description of the asset to display in Dagit.
113
+ description (Optional[str]): Description of the asset to display in the Dagster UI.
108
114
  partitions_def (Optional[PartitionsDefinition]): Defines the set of partition keys that
109
115
  compose the asset.
110
116
  op_tags (Optional[Dict[str, Any]]): A dictionary of tags for the op that computes the asset.
@@ -114,7 +120,7 @@ def define_dagstermill_asset(
114
120
  group_name (Optional[str]): A string name used to organize multiple assets into groups. If not provided,
115
121
  the name "default" is used.
116
122
  resource_defs (Optional[Mapping[str, ResourceDefinition]]):
117
- (Experimental) A mapping of resource keys to resource definitions. These resources
123
+ (Beta) A mapping of resource keys to resource definitions. These resources
118
124
  will be initialized during execution, and can be accessed from the
119
125
  context within the notebook.
120
126
  io_manager_key (Optional[str]): A string key for the IO manager used to store the output notebook.
@@ -123,6 +129,7 @@ def define_dagstermill_asset(
123
129
  save_notebook_on_failure (bool): If True and the notebook fails during execution, the failed notebook will be
124
130
  written to the Dagster storage directory. The location of the file will be printed in the Dagster logs.
125
131
  Defaults to False.
132
+ asset_tags (Optional[Dict[str, Any]]): A dictionary of tags to apply to the asset.
126
133
  non_argument_deps (Optional[Union[Set[AssetKey], Set[str]]]): Deprecated, use deps instead. Set of asset keys that are
127
134
  upstream dependencies, but do not pass an input to the asset.
128
135
 
@@ -172,27 +179,26 @@ def define_dagstermill_asset(
172
179
  io_manager_key, "io_manager_key", default="output_notebook_io_manager"
173
180
  )
174
181
 
175
- user_tags = validate_tags(op_tags)
182
+ user_tags = normalize_tags(op_tags)
176
183
  if op_tags is not None:
177
184
  check.invariant(
178
185
  "notebook_path" not in op_tags,
179
- (
180
- "user-defined op tags contains the `notebook_path` key, but the `notebook_path` key"
181
- " is reserved for use by Dagster"
182
- ),
186
+ "user-defined op tags contains the `notebook_path` key, but the `notebook_path` key"
187
+ " is reserved for use by Dagster",
183
188
  )
184
189
  check.invariant(
185
- "kind" not in op_tags,
186
- (
187
- "user-defined op tags contains the `kind` key, but the `kind` key is reserved for"
188
- " use by Dagster"
189
- ),
190
+ COMPUTE_KIND_TAG not in op_tags,
191
+ f"user-defined op tags contains the `{COMPUTE_KIND_TAG}` key, but the `{COMPUTE_KIND_TAG}` key is reserved for"
192
+ " use by Dagster",
190
193
  )
191
194
 
192
- default_tags = {"notebook_path": _clean_path_for_windows(notebook_path), "kind": "ipynb"}
195
+ default_tags = {
196
+ "notebook_path": _clean_path_for_windows(notebook_path),
197
+ COMPUTE_KIND_TAG: "ipynb",
198
+ }
193
199
 
194
200
  if safe_is_subclass(config_schema, Config):
195
- config_schema = infer_schema_from_config_class(cast(Type[Config], config_schema))
201
+ config_schema = infer_schema_from_config_class(cast("type[Config]", config_schema))
196
202
 
197
203
  return asset(
198
204
  name=name,
@@ -211,6 +217,7 @@ def define_dagstermill_asset(
211
217
  io_manager_key=io_mgr_key,
212
218
  retry_policy=retry_policy,
213
219
  non_argument_deps=non_argument_deps,
220
+ tags=asset_tags,
214
221
  )(
215
222
  _make_dagstermill_asset_compute_fn(
216
223
  name=name,
@@ -1,13 +1,14 @@
1
1
  import copy
2
2
  import os
3
3
  import subprocess
4
- from typing import Mapping, Optional
4
+ from collections.abc import Mapping
5
+ from typing import Optional
5
6
 
6
7
  import click
7
8
  import dagster._check as check
8
9
  import nbformat
9
- from dagster._seven.json import loads
10
10
  from dagster._utils import mkdir_p, safe_isfile
11
+ from dagster_shared.seven.json import loads
11
12
  from papermill.iorw import load_notebook_node, write_ipynb
12
13
 
13
14
 
@@ -110,11 +111,9 @@ def execute_create_notebook(notebook: str, force_overwrite: bool, kernel: str):
110
111
 
111
112
  if not force_overwrite and safe_isfile(notebook_path):
112
113
  click.confirm(
113
- (
114
- "Warning, {notebook_path} already exists and continuing "
115
- "will overwrite the existing notebook. "
116
- "Are you sure you want to continue?"
117
- ).format(notebook_path=notebook_path),
114
+ f"Warning, {notebook_path} already exists and continuing "
115
+ "will overwrite the existing notebook. "
116
+ "Are you sure you want to continue?",
118
117
  abort=True,
119
118
  )
120
119
 
@@ -1,4 +1,5 @@
1
- from typing import AbstractSet, Any, Mapping, Optional, cast
1
+ from collections.abc import Mapping
2
+ from typing import AbstractSet, Any, Optional, cast # noqa: UP035
2
3
 
3
4
  from dagster import (
4
5
  DagsterRun,
@@ -6,15 +7,18 @@ from dagster import (
6
7
  OpDefinition,
7
8
  _check as check,
8
9
  )
9
- from dagster._annotations import public
10
+ from dagster._annotations import beta, public
10
11
  from dagster._core.definitions.dependency import Node, NodeHandle
11
- from dagster._core.execution.context.compute import AbstractComputeExecutionContext
12
+ from dagster._core.definitions.repository_definition.repository_definition import (
13
+ RepositoryDefinition,
14
+ )
15
+ from dagster._core.execution.context.op_execution_context import AbstractComputeExecutionContext
12
16
  from dagster._core.execution.context.system import PlanExecutionContext, StepExecutionContext
13
17
  from dagster._core.log_manager import DagsterLogManager
14
18
  from dagster._core.system_config.objects import ResolvedRunConfig
15
- from dagster._utils.backcompat import deprecation_warning
16
19
 
17
20
 
21
+ @beta
18
22
  class DagstermillExecutionContext(AbstractComputeExecutionContext):
19
23
  """Dagstermill-specific execution context.
20
24
 
@@ -101,6 +105,11 @@ class DagstermillExecutionContext(AbstractComputeExecutionContext):
101
105
  """
102
106
  return self._job_def
103
107
 
108
+ @property
109
+ def repository_def(self) -> RepositoryDefinition:
110
+ """:class:`dagster.RepositoryDefinition`: The repository definition for the context."""
111
+ raise NotImplementedError
112
+
104
113
  @property
105
114
  def resources(self) -> Any:
106
115
  """collections.namedtuple: A dynamically-created type whose properties allow access to
@@ -114,7 +123,7 @@ class DagstermillExecutionContext(AbstractComputeExecutionContext):
114
123
  @property
115
124
  def run(self) -> DagsterRun:
116
125
  """:class:`dagster.DagsterRun`: The job run for the context."""
117
- return cast(DagsterRun, self._job_context.dagster_run)
126
+ return cast("DagsterRun", self._job_context.dagster_run)
118
127
 
119
128
  @property
120
129
  def log(self) -> DagsterLogManager:
@@ -132,7 +141,7 @@ class DagstermillExecutionContext(AbstractComputeExecutionContext):
132
141
  In interactive contexts, this may be a dagstermill-specific shim, depending whether an
133
142
  op definition was passed to ``dagstermill.get_context``.
134
143
  """
135
- return cast(OpDefinition, self._job_def.node_def_named(self.op_name))
144
+ return cast("OpDefinition", self._job_def.node_def_named(self.op_name))
136
145
 
137
146
  @property
138
147
  def node(self) -> Node:
@@ -141,11 +150,6 @@ class DagstermillExecutionContext(AbstractComputeExecutionContext):
141
150
  In interactive contexts, this may be a dagstermill-specific shim, depending whether an
142
151
  op definition was passed to ``dagstermill.get_context``.
143
152
  """
144
- deprecation_warning(
145
- "DagstermillExecutionContext.solid_def",
146
- "0.17.0",
147
- "use the 'op_def' property instead.",
148
- )
149
153
  return self.job_def.get_node(self.node_handle)
150
154
 
151
155
  @public
@@ -1,7 +1,7 @@
1
1
  import nbformat
2
2
  from papermill.log import logger
3
3
 
4
- from .compat import ExecutionError, is_papermill_2
4
+ from dagstermill.compat import ExecutionError, is_papermill_2
5
5
 
6
6
  if is_papermill_2():
7
7
  from papermill.clientwrap import PapermillNotebookClient
@@ -86,9 +86,7 @@ else:
86
86
  # the kernel down. Note that atexit doesn't seem to work at all in ipython, and hooking into
87
87
  # the ipython post_execute event doesn't work in papermill.
88
88
  def papermill_process(self, nb_man, resources):
89
- _, resources = super(DagstermillExecutePreprocessor, self).papermill_process(
90
- nb_man, resources
91
- )
89
+ _, resources = super().papermill_process(nb_man, resources)
92
90
 
93
91
  new_cell = nbformat.v4.new_code_cell(
94
92
  source="import dagstermill as __dm_dagstermill\n__dm_dagstermill._teardown()\n"
@@ -1,4 +1,4 @@
1
- from dagster._core.errors import DagsterError
1
+ from dagster_shared.error import DagsterError
2
2
 
3
3
 
4
4
  class DagstermillError(DagsterError):
@@ -0,0 +1 @@
1
+ from dagstermill.examples.repository import notebook_repo as notebook_repo
@@ -23,7 +23,7 @@ from dagster import (
23
23
  with_resources,
24
24
  )
25
25
  from dagster._config.pythonic_config import Config
26
- from dagster._core.definitions.asset_graph import AssetGraph
26
+ from dagster._core.definitions.assets.graph.asset_graph import AssetGraph
27
27
  from dagster._core.definitions.utils import DEFAULT_OUTPUT
28
28
  from dagster._utils import PICKLE_PROTOCOL, file_relative_path
29
29
 
@@ -96,7 +96,9 @@ def hello_world_job():
96
96
 
97
97
  @job(
98
98
  resource_defs={
99
- "output_notebook_io_manager": ConfigurableLocalOutputNotebookIOManager.configure_at_launch(),
99
+ "output_notebook_io_manager": (
100
+ ConfigurableLocalOutputNotebookIOManager.configure_at_launch()
101
+ ),
100
102
  "io_manager": FilesystemIOManager(),
101
103
  }
102
104
  )
@@ -301,12 +303,12 @@ def error_job():
301
303
 
302
304
  if DAGSTER_PANDAS_PRESENT and SKLEARN_PRESENT and MATPLOTLIB_PRESENT:
303
305
  # We need type-ignores here because type checkers don't understand the `*_PRESENT` kwargs.
304
- clean_data = test_nb_op("clean_data", outs={DEFAULT_OUTPUT: Out(DataFrame)}) # type: ignore
306
+ clean_data = test_nb_op("clean_data", outs={DEFAULT_OUTPUT: Out(DataFrame)}) # pyright: ignore[reportPossiblyUnboundVariable]
305
307
 
306
308
  # FIXME add an output to this
307
- tutorial_LR = test_nb_op("tutorial_LR", ins={"df": In(DataFrame)}) # type: ignore
309
+ tutorial_LR = test_nb_op("tutorial_LR", ins={"df": In(DataFrame)}) # pyright: ignore[reportPossiblyUnboundVariable]
308
310
 
309
- tutorial_RF = test_nb_op("tutorial_RF", ins={"df": In(DataFrame)}) # type: ignore
311
+ tutorial_RF = test_nb_op("tutorial_RF", ins={"df": In(DataFrame)}) # pyright: ignore[reportPossiblyUnboundVariable]
310
312
 
311
313
  @job(resource_defs=common_resource_defs)
312
314
  def tutorial_job():
@@ -568,6 +570,7 @@ custom_io_mgr_key_asset = dagstermill.define_dagstermill_asset(
568
570
  name="custom_io_mgr_key",
569
571
  notebook_path=nb_test_path("hello_world"),
570
572
  io_manager_key="my_custom_io_manager",
573
+ asset_tags={"foo": "bar"},
571
574
  )
572
575
 
573
576
  yield_event_asset = dagstermill.define_dagstermill_asset(
@@ -4,7 +4,8 @@ import pickle
4
4
  import sys
5
5
  import tempfile
6
6
  import uuid
7
- from typing import Any, Callable, Iterable, Mapping, Optional, Sequence, Set, Type, Union, cast
7
+ from collections.abc import Iterable, Mapping, Sequence
8
+ from typing import Any, Callable, Optional, Union, cast
8
9
 
9
10
  import nbformat
10
11
  import papermill
@@ -14,33 +15,35 @@ from dagster import (
14
15
  Out,
15
16
  Output,
16
17
  _check as check,
17
- _seven,
18
18
  )
19
+ from dagster._annotations import beta
19
20
  from dagster._config.pythonic_config import Config, infer_schema_from_config_class
20
- from dagster._config.pythonic_config.utils import safe_is_subclass
21
+ from dagster._config.pythonic_config.type_check_utils import safe_is_subclass
21
22
  from dagster._core.definitions.events import AssetMaterialization, Failure, RetryRequested
22
23
  from dagster._core.definitions.metadata import MetadataValue
23
24
  from dagster._core.definitions.reconstruct import ReconstructableJob
24
- from dagster._core.definitions.utils import validate_tags
25
25
  from dagster._core.execution.context.compute import OpExecutionContext
26
26
  from dagster._core.execution.context.input import build_input_context
27
27
  from dagster._core.execution.context.system import StepExecutionContext
28
28
  from dagster._core.execution.plan.outputs import StepOutputHandle
29
+ from dagster._core.storage.tags import COMPUTE_KIND_TAG
29
30
  from dagster._serdes import pack_value
30
- from dagster._seven import get_system_temp_directory
31
31
  from dagster._utils import mkdir_p, safe_tempfile_path
32
32
  from dagster._utils.error import serializable_error_info_from_exc_info
33
+ from dagster._utils.tags import normalize_tags
34
+ from dagster_shared import seven
35
+ from dagster_shared.seven import get_system_temp_directory
33
36
  from papermill.engines import papermill_engines
34
37
  from papermill.iorw import load_notebook_node, write_ipynb
35
38
 
36
- from .compat import ExecutionError
37
- from .engine import DagstermillEngine
38
- from .errors import DagstermillError
39
- from .translator import DagsterTranslator
39
+ from dagstermill.compat import ExecutionError
40
+ from dagstermill.engine import DagstermillEngine
41
+ from dagstermill.errors import DagstermillError
42
+ from dagstermill.translator import DagsterTranslator
40
43
 
41
44
 
42
45
  def _clean_path_for_windows(notebook_path: str) -> str:
43
- """In windows, the notebook cant render in dagit unless the C: prefix is removed.
46
+ """In windows, the notebook can't render in the Dagster UI unless the C: prefix is removed.
44
47
  os.path.splitdrive will split the path into (drive, tail), so just return the tail.
45
48
  """
46
49
  return os.path.splitdrive(notebook_path)[1]
@@ -60,6 +63,7 @@ def _find_first_tagged_cell_index(nb, tag):
60
63
  # This is based on papermill.parameterize.parameterize_notebook
61
64
  # Typically, papermill injects the injected-parameters cell *below* the parameters cell
62
65
  # but we want to *replace* the parameters cell, which is what this function does.
66
+ @beta
63
67
  def replace_parameters(context, nb, parameters):
64
68
  """Assigned parameters into the appropriate place in the input notebook.
65
69
 
@@ -101,11 +105,12 @@ def replace_parameters(context, nb, parameters):
101
105
  after = nb.cells
102
106
 
103
107
  nb.cells = before + [newcell] + after
104
- nb.metadata.papermill["parameters"] = _seven.json.dumps(parameters)
108
+ nb.metadata.papermill["parameters"] = seven.json.dumps(parameters)
105
109
 
106
110
  return nb
107
111
 
108
112
 
113
+ @beta
109
114
  def get_papermill_parameters(
110
115
  step_context: StepExecutionContext,
111
116
  inputs: Mapping[str, object],
@@ -159,6 +164,7 @@ def get_papermill_parameters(
159
164
  return parameters
160
165
 
161
166
 
167
+ @beta
162
168
  def execute_notebook(
163
169
  step_context: StepExecutionContext,
164
170
  name: str,
@@ -199,9 +205,7 @@ def execute_notebook(
199
205
 
200
206
  except Exception as ex:
201
207
  step_context.log.warn(
202
- "Error when attempting to materialize executed notebook: {exc}".format(
203
- exc=str(serializable_error_info_from_exc_info(sys.exc_info()))
204
- )
208
+ f"Error when attempting to materialize executed notebook: {serializable_error_info_from_exc_info(sys.exc_info())!s}"
205
209
  )
206
210
 
207
211
  if isinstance(ex, ExecutionError):
@@ -342,13 +346,14 @@ def _make_dagstermill_compute_fn(
342
346
  return _t_fn
343
347
 
344
348
 
349
+ @beta
345
350
  def define_dagstermill_op(
346
351
  name: str,
347
352
  notebook_path: str,
348
353
  ins: Optional[Mapping[str, In]] = None,
349
354
  outs: Optional[Mapping[str, Out]] = None,
350
355
  config_schema: Optional[Union[Any, Mapping[str, Any]]] = None,
351
- required_resource_keys: Optional[Set[str]] = None,
356
+ required_resource_keys: Optional[set[str]] = None,
352
357
  output_notebook_name: Optional[str] = None,
353
358
  asset_key_prefix: Optional[Union[Sequence[str], str]] = None,
354
359
  description: Optional[str] = None,
@@ -403,7 +408,7 @@ def define_dagstermill_op(
403
408
  required_resource_keys.add(io_mgr_key)
404
409
  outs = {
405
410
  **outs,
406
- cast(str, output_notebook_name): Out(io_manager_key=io_mgr_key),
411
+ cast("str", output_notebook_name): Out(io_manager_key=io_mgr_key),
407
412
  }
408
413
 
409
414
  if isinstance(asset_key_prefix, str):
@@ -414,26 +419,25 @@ def define_dagstermill_op(
414
419
  default_description = f"This op is backed by the notebook at {notebook_path}"
415
420
  description = check.opt_str_param(description, "description", default=default_description)
416
421
 
417
- user_tags = validate_tags(tags)
422
+ user_tags = normalize_tags(tags)
418
423
  if tags is not None:
419
424
  check.invariant(
420
425
  "notebook_path" not in tags,
421
- (
422
- "user-defined op tags contains the `notebook_path` key, but the `notebook_path` key"
423
- " is reserved for use by Dagster"
424
- ),
426
+ "user-defined op tags contains the `notebook_path` key, but the `notebook_path` key"
427
+ " is reserved for use by Dagster",
425
428
  )
426
429
  check.invariant(
427
- "kind" not in tags,
428
- (
429
- "user-defined op tags contains the `kind` key, but the `kind` key is reserved for"
430
- " use by Dagster"
431
- ),
430
+ COMPUTE_KIND_TAG not in tags,
431
+ "user-defined op tags contains the `kind` key, but the `kind` key is reserved for"
432
+ " use by Dagster",
432
433
  )
433
- default_tags = {"notebook_path": _clean_path_for_windows(notebook_path), "kind": "ipynb"}
434
+ default_tags = {
435
+ "notebook_path": _clean_path_for_windows(notebook_path),
436
+ COMPUTE_KIND_TAG: "ipynb",
437
+ }
434
438
 
435
439
  if safe_is_subclass(config_schema, Config):
436
- config_schema = infer_schema_from_config_class(cast(Type[Config], config_schema))
440
+ config_schema = infer_schema_from_config_class(cast("type[Config]", config_schema))
437
441
 
438
442
  return OpDefinition(
439
443
  name=name,
@@ -1,6 +1,7 @@
1
1
  import os
2
+ from collections.abc import Sequence
2
3
  from pathlib import Path
3
- from typing import Any, List, Optional, Sequence
4
+ from typing import Any, Optional
4
5
 
5
6
  import dagster._check as check
6
7
  from dagster import (
@@ -10,6 +11,7 @@ from dagster import (
10
11
  InitResourceContext,
11
12
  IOManager,
12
13
  )
14
+ from dagster._annotations import beta
13
15
  from dagster._core.definitions.metadata import MetadataValue
14
16
  from dagster._core.execution.context.input import InputContext
15
17
  from dagster._core.execution.context.output import OutputContext
@@ -33,7 +35,7 @@ class OutputNotebookIOManager(IOManager):
33
35
 
34
36
  class LocalOutputNotebookIOManager(OutputNotebookIOManager):
35
37
  def __init__(self, base_dir: str, asset_key_prefix: Optional[Sequence[str]] = None):
36
- super(LocalOutputNotebookIOManager, self).__init__(asset_key_prefix=asset_key_prefix)
38
+ super().__init__(asset_key_prefix=asset_key_prefix)
37
39
  self.base_dir = base_dir
38
40
  self.write_mode = "wb"
39
41
  self.read_mode = "rb"
@@ -82,6 +84,7 @@ class LocalOutputNotebookIOManager(OutputNotebookIOManager):
82
84
  return file_obj.read()
83
85
 
84
86
 
87
+ @beta
85
88
  class ConfigurableLocalOutputNotebookIOManager(ConfigurableIOManagerFactory):
86
89
  """Built-in IO Manager for handling output notebook."""
87
90
 
@@ -92,7 +95,7 @@ class ConfigurableLocalOutputNotebookIOManager(ConfigurableIOManagerFactory):
92
95
  " directory if not provided."
93
96
  ),
94
97
  )
95
- asset_key_prefix: List[str] = Field(
98
+ asset_key_prefix: list[str] = Field(
96
99
  default=[],
97
100
  description=(
98
101
  "Asset key prefix to apply to assets materialized for output notebooks. Defaults to no"
@@ -111,6 +114,7 @@ class ConfigurableLocalOutputNotebookIOManager(ConfigurableIOManagerFactory):
111
114
  )
112
115
 
113
116
 
117
+ @beta
114
118
  @dagster_maintained_io_manager
115
119
  @io_manager(config_schema=ConfigurableLocalOutputNotebookIOManager.to_config_schema())
116
120
  def local_output_notebook_io_manager(init_context) -> LocalOutputNotebookIOManager:
@@ -1,7 +1,9 @@
1
1
  import os
2
2
  import pickle
3
3
  import uuid
4
- from typing import AbstractSet, Any, Mapping, Optional, cast
4
+ from asyncio import AbstractEventLoop
5
+ from collections.abc import Mapping
6
+ from typing import TYPE_CHECKING, AbstractSet, Any, Optional, cast # noqa: UP035
5
7
 
6
8
  from dagster import (
7
9
  AssetMaterialization,
@@ -14,12 +16,12 @@ from dagster import (
14
16
  TypeCheck,
15
17
  _check as check,
16
18
  )
19
+ from dagster._annotations import beta
17
20
  from dagster._core.definitions.dependency import NodeHandle
18
21
  from dagster._core.definitions.events import RetryRequested
19
22
  from dagster._core.definitions.graph_definition import GraphDefinition
20
23
  from dagster._core.definitions.job_base import InMemoryJob
21
24
  from dagster._core.definitions.job_definition import JobDefinition
22
- from dagster._core.definitions.node_definition import NodeDefinition
23
25
  from dagster._core.definitions.op_definition import OpDefinition
24
26
  from dagster._core.definitions.reconstruct import ReconstructableJob
25
27
  from dagster._core.definitions.resource_definition import ScopedResourcesBuilder
@@ -27,7 +29,7 @@ from dagster._core.events import DagsterEvent
27
29
  from dagster._core.execution.api import create_execution_plan, scoped_job_context
28
30
  from dagster._core.execution.plan.outputs import StepOutputHandle
29
31
  from dagster._core.execution.plan.plan import ExecutionPlan
30
- from dagster._core.execution.plan.step import ExecutionStep
32
+ from dagster._core.execution.plan.state import KnownExecutionState
31
33
  from dagster._core.execution.resources_init import (
32
34
  get_required_resource_keys_to_init,
33
35
  resource_initialization_event_generator,
@@ -42,9 +44,13 @@ from dagster._loggers import colored_console_logger
42
44
  from dagster._serdes import unpack_value
43
45
  from dagster._utils import EventGenerationManager
44
46
 
45
- from .context import DagstermillExecutionContext, DagstermillRuntimeExecutionContext
46
- from .errors import DagstermillError
47
- from .serialize import PICKLE_PROTOCOL
47
+ from dagstermill.context import DagstermillExecutionContext, DagstermillRuntimeExecutionContext
48
+ from dagstermill.errors import DagstermillError
49
+ from dagstermill.serialize import PICKLE_PROTOCOL
50
+
51
+ if TYPE_CHECKING:
52
+ from dagster._core.definitions.node_definition import NodeDefinition
53
+ from dagster._core.execution.plan.step import ExecutionStep
48
54
 
49
55
 
50
56
  class DagstermillResourceEventGenerationManager(EventGenerationManager):
@@ -57,14 +63,10 @@ class DagstermillResourceEventGenerationManager(EventGenerationManager):
57
63
  return iter(())
58
64
 
59
65
  def teardown(self):
60
- return [
61
- teardown_event
62
- for teardown_event in super(
63
- DagstermillResourceEventGenerationManager, self
64
- ).generate_teardown_events()
65
- ]
66
+ return [teardown_event for teardown_event in super().generate_teardown_events()]
66
67
 
67
68
 
69
+ @beta
68
70
  class Manager:
69
71
  def __init__(self):
70
72
  self.job = None
@@ -84,6 +86,7 @@ class Manager:
84
86
  resource_keys_to_init: Optional[AbstractSet[str]],
85
87
  instance: Optional[DagsterInstance],
86
88
  emit_persistent_events: Optional[bool],
89
+ event_loop: Optional[AbstractEventLoop],
87
90
  ):
88
91
  """Drop-in replacement for
89
92
  `dagster._core.execution.resources_init.resource_initialization_manager`. It uses a
@@ -98,6 +101,7 @@ class Manager:
98
101
  resource_keys_to_init=resource_keys_to_init,
99
102
  instance=instance,
100
103
  emit_persistent_events=emit_persistent_events,
104
+ event_loop=event_loop,
101
105
  )
102
106
  self.resource_manager = DagstermillResourceEventGenerationManager(
103
107
  generator, ScopedResourcesBuilder
@@ -175,6 +179,12 @@ class Manager:
175
179
  # Set this flag even though we're not in test for clearer error reporting
176
180
  raise_on_error=True,
177
181
  ) as job_context:
182
+ known_state = None
183
+ if dagster_run.parent_run_id:
184
+ known_state = KnownExecutionState.build_for_reexecution(
185
+ instance=instance,
186
+ parent_run=check.not_none(instance.get_run_by_id(dagster_run.parent_run_id)),
187
+ )
178
188
  self.context = DagstermillRuntimeExecutionContext(
179
189
  job_context=job_context,
180
190
  job_def=job_def,
@@ -186,9 +196,10 @@ class Manager:
186
196
  op_name=op.name,
187
197
  node_handle=node_handle,
188
198
  step_context=cast(
189
- StepExecutionContext,
199
+ "StepExecutionContext",
190
200
  job_context.for_step(
191
- cast(ExecutionStep, execution_plan.get_step_by_key(step_key))
201
+ cast("ExecutionStep", execution_plan.get_step_by_key(step_key)),
202
+ known_state=known_state,
192
203
  ),
193
204
  ),
194
205
  )
@@ -301,18 +312,18 @@ class Manager:
301
312
  # deferred import for perf
302
313
  import scrapbook
303
314
 
304
- if not self.op_def.has_output(output_name):
315
+ if not self.op_def.has_output(output_name): # pyright: ignore[reportOptionalMemberAccess]
305
316
  raise DagstermillError(
306
- f"Op {self.op_def.name} does not have output named {output_name}.Expected one of"
307
- f" {[str(output_def.name) for output_def in self.op_def.output_defs]}"
317
+ f"Op {self.op_def.name} does not have output named {output_name}.Expected one of" # pyright: ignore[reportOptionalMemberAccess]
318
+ f" {[str(output_def.name) for output_def in self.op_def.output_defs]}" # pyright: ignore[reportOptionalMemberAccess]
308
319
  )
309
320
 
310
321
  # pass output value cross process boundary using io manager
311
- step_context = self.context._step_context # noqa: SLF001
322
+ step_context = self.context._step_context # noqa: SLF001 # pyright: ignore[reportAttributeAccessIssue,reportOptionalMemberAccess]
312
323
  # Note: yield_result currently does not support DynamicOutput
313
324
 
314
325
  # dagstermill assets do not support yielding additional results within the notebook:
315
- if len(step_context.job_def.asset_layer.asset_keys) > 0:
326
+ if len(step_context.job_def.asset_layer.executable_asset_keys) > 0: # pyright: ignore[reportArgumentType]
316
327
  raise DagstermillError(
317
328
  "dagstermill assets do not currently support dagstermill.yield_result"
318
329
  )
@@ -359,7 +370,7 @@ class Manager:
359
370
  import scrapbook
360
371
 
361
372
  event_id = f"event-{uuid.uuid4()}"
362
- out_file_path = os.path.join(self.marshal_dir, event_id)
373
+ out_file_path = os.path.join(self.marshal_dir, event_id) # pyright: ignore[reportCallIssue,reportArgumentType]
363
374
  with open(out_file_path, "wb") as fd:
364
375
  fd.write(pickle.dumps(dagster_event, PICKLE_PROTOCOL))
365
376
 
@@ -1,11 +1,11 @@
1
- from dagster import _seven
1
+ from dagster_shared import seven
2
2
 
3
3
  PICKLE_PROTOCOL = 2
4
4
 
5
5
 
6
6
  def is_json_serializable(value):
7
7
  try:
8
- _seven.json.dumps(value)
8
+ seven.json.dumps(value)
9
9
  return True
10
10
  except TypeError:
11
11
  return False
@@ -0,0 +1,45 @@
1
+ import os
2
+ from contextlib import contextmanager
3
+
4
+ from dagster import execute_job
5
+ from dagster._core.definitions.metadata import NotebookMetadataValue, PathMetadataValue
6
+ from dagster._core.definitions.reconstruct import ReconstructableJob
7
+ from dagster._core.test_utils import instance_for_test
8
+
9
+
10
+ def get_path(materialization_event):
11
+ for value in materialization_event.event_specific_data.materialization.metadata.values():
12
+ if isinstance(value, (PathMetadataValue, NotebookMetadataValue)):
13
+ return value.value
14
+
15
+
16
+ def cleanup_result_notebook(result):
17
+ if not result:
18
+ return
19
+ materialization_events = [
20
+ x for x in result.all_events if x.event_type_value == "ASSET_MATERIALIZATION"
21
+ ]
22
+ for materialization_event in materialization_events:
23
+ result_path = get_path(materialization_event)
24
+ if os.path.exists(result_path): # pyright: ignore[reportArgumentType]
25
+ os.unlink(result_path) # pyright: ignore[reportArgumentType]
26
+
27
+
28
+ @contextmanager
29
+ def exec_for_test(fn_name, env=None, raise_on_error=True, **kwargs):
30
+ result = None
31
+ recon_job = ReconstructableJob.for_module("dagstermill.examples.repository", fn_name)
32
+
33
+ with instance_for_test() as instance:
34
+ try:
35
+ with execute_job(
36
+ job=recon_job,
37
+ run_config=env,
38
+ instance=instance,
39
+ raise_on_error=raise_on_error,
40
+ **kwargs,
41
+ ) as result:
42
+ yield result
43
+ finally:
44
+ if result:
45
+ cleanup_result_notebook(result)
@@ -1,6 +1,6 @@
1
1
  import papermill
2
2
  import papermill.translators
3
- from dagster import _seven
3
+ from dagster_shared import seven
4
4
 
5
5
  RESERVED_INPUT_NAMES = [
6
6
  "__dm_context",
@@ -16,7 +16,7 @@ RESERVED_INPUT_NAMES = [
16
16
 
17
17
  INJECTED_BOILERPLATE = """
18
18
  # Injected parameters
19
- from dagster import seven as __dm_seven
19
+ from dagster_shared import seven as __dm_seven
20
20
  import dagstermill as __dm_dagstermill
21
21
  context = __dm_dagstermill._reconstitute_job_context(
22
22
  **{{
@@ -51,7 +51,7 @@ class DagsterTranslator(papermill.translators.PythonTranslator):
51
51
  )
52
52
 
53
53
  for key in job_context_args:
54
- job_context_args[key] = _seven.json.dumps(job_context_args[key])
54
+ job_context_args[key] = seven.json.dumps(job_context_args[key])
55
55
 
56
56
  content = INJECTED_BOILERPLATE.format(job_context_args=job_context_args)
57
57
 
@@ -0,0 +1 @@
1
+ __version__ = "0.27.9"
@@ -0,0 +1,35 @@
1
+ Metadata-Version: 2.4
2
+ Name: dagstermill
3
+ Version: 0.27.9
4
+ Summary: run notebooks using the Dagster tools
5
+ Author: Dagster Labs
6
+ Author-email: hello@dagsterlabs.com
7
+ License: Apache-2.0
8
+ Classifier: Programming Language :: Python :: 3.9
9
+ Classifier: Programming Language :: Python :: 3.10
10
+ Classifier: Programming Language :: Python :: 3.11
11
+ Classifier: License :: OSI Approved :: Apache Software License
12
+ Classifier: Operating System :: OS Independent
13
+ Requires-Python: >=3.9,<3.14
14
+ License-File: LICENSE
15
+ Requires-Dist: dagster==1.11.9
16
+ Requires-Dist: ipykernel!=5.4.0,!=5.4.1,>=4.9.0
17
+ Requires-Dist: ipython_genutils>=0.2.0
18
+ Requires-Dist: packaging>=20.9
19
+ Requires-Dist: papermill>=1.0.0
20
+ Requires-Dist: scrapbook>=0.5.0
21
+ Requires-Dist: nbconvert
22
+ Requires-Dist: jupyter-client<8
23
+ Provides-Extra: test
24
+ Requires-Dist: matplotlib; extra == "test"
25
+ Requires-Dist: scikit-learn>=0.19.0; extra == "test"
26
+ Requires-Dist: tqdm<=4.48; extra == "test"
27
+ Dynamic: author
28
+ Dynamic: author-email
29
+ Dynamic: classifier
30
+ Dynamic: license
31
+ Dynamic: license-file
32
+ Dynamic: provides-extra
33
+ Dynamic: requires-dist
34
+ Dynamic: requires-python
35
+ Dynamic: summary
@@ -16,6 +16,7 @@ dagstermill/io_managers.py
16
16
  dagstermill/manager.py
17
17
  dagstermill/py.typed
18
18
  dagstermill/serialize.py
19
+ dagstermill/test_utils.py
19
20
  dagstermill/translator.py
20
21
  dagstermill/version.py
21
22
  dagstermill.egg-info/PKG-INFO
@@ -1,4 +1,4 @@
1
- dagster==1.4.2
1
+ dagster==1.11.9
2
2
  ipykernel!=5.4.0,!=5.4.1,>=4.9.0
3
3
  ipython_genutils>=0.2.0
4
4
  packaging>=20.9
@@ -1,11 +1,10 @@
1
1
  from pathlib import Path
2
- from typing import Dict
3
2
 
4
3
  from setuptools import find_packages, setup
5
4
 
6
5
 
7
6
  def get_version() -> str:
8
- version: Dict[str, str] = {}
7
+ version: dict[str, str] = {}
9
8
  with open(Path(__file__).parent / "dagstermill/version.py", encoding="utf8") as fp:
10
9
  exec(fp.read(), version)
11
10
 
@@ -19,20 +18,20 @@ setup(
19
18
  name="dagstermill",
20
19
  version=ver,
21
20
  description="run notebooks using the Dagster tools",
22
- author="Elementl",
23
- author_email="hello@elementl.com",
21
+ author="Dagster Labs",
22
+ author_email="hello@dagsterlabs.com",
24
23
  license="Apache-2.0",
25
24
  packages=find_packages(exclude=["dagstermill_tests*"]),
26
25
  classifiers=[
27
- "Programming Language :: Python :: 3.8",
28
26
  "Programming Language :: Python :: 3.9",
29
27
  "Programming Language :: Python :: 3.10",
30
28
  "Programming Language :: Python :: 3.11",
31
29
  "License :: OSI Approved :: Apache Software License",
32
30
  "Operating System :: OS Independent",
33
31
  ],
32
+ python_requires=">=3.9,<3.14",
34
33
  install_requires=[
35
- "dagster==1.4.2",
34
+ "dagster==1.11.9",
36
35
  # ipykernel 5.4.0 and 5.4.1 broke papermill
37
36
  # see https://github.com/dagster-io/dagster/issues/3401,
38
37
  # https://github.com/nteract/papermill/issues/519,
@@ -1,15 +0,0 @@
1
- Metadata-Version: 2.1
2
- Name: dagstermill
3
- Version: 0.20.2
4
- Summary: run notebooks using the Dagster tools
5
- Author: Elementl
6
- Author-email: hello@elementl.com
7
- License: Apache-2.0
8
- Classifier: Programming Language :: Python :: 3.8
9
- Classifier: Programming Language :: Python :: 3.9
10
- Classifier: Programming Language :: Python :: 3.10
11
- Classifier: Programming Language :: Python :: 3.11
12
- Classifier: License :: OSI Approved :: Apache Software License
13
- Classifier: Operating System :: OS Independent
14
- Provides-Extra: test
15
- License-File: LICENSE
@@ -1,4 +0,0 @@
1
- # dagstermill
2
-
3
- The docs for `dagstermill` can be found
4
- [here](https://docs.dagster.io/_apidocs/libraries/dagstermill).
@@ -1,26 +0,0 @@
1
- from dagster._core.libraries import DagsterLibraryRegistry
2
-
3
- from .asset_factory import define_dagstermill_asset as define_dagstermill_asset
4
- from .context import DagstermillExecutionContext as DagstermillExecutionContext
5
- from .errors import DagstermillError as DagstermillError
6
- from .factory import define_dagstermill_op as define_dagstermill_op
7
- from .io_managers import (
8
- ConfigurableLocalOutputNotebookIOManager as ConfigurableLocalOutputNotebookIOManager,
9
- local_output_notebook_io_manager as local_output_notebook_io_manager,
10
- )
11
- from .manager import MANAGER_FOR_NOTEBOOK_INSTANCE as _MANAGER_FOR_NOTEBOOK_INSTANCE
12
- from .version import __version__ as __version__
13
-
14
- DagsterLibraryRegistry.register("dagstermill", __version__)
15
-
16
- get_context = _MANAGER_FOR_NOTEBOOK_INSTANCE.get_context
17
-
18
- yield_result = _MANAGER_FOR_NOTEBOOK_INSTANCE.yield_result
19
-
20
- yield_event = _MANAGER_FOR_NOTEBOOK_INSTANCE.yield_event
21
-
22
- _reconstitute_job_context = _MANAGER_FOR_NOTEBOOK_INSTANCE.reconstitute_job_context
23
-
24
- _teardown = _MANAGER_FOR_NOTEBOOK_INSTANCE.teardown_resources
25
-
26
- _load_input_parameter = _MANAGER_FOR_NOTEBOOK_INSTANCE.load_input_parameter
@@ -1,3 +0,0 @@
1
- from .cli import main
2
-
3
- main()
@@ -1 +0,0 @@
1
- from .repository import notebook_repo as notebook_repo
@@ -1 +0,0 @@
1
- __version__ = "0.20.2"
@@ -1,15 +0,0 @@
1
- Metadata-Version: 2.1
2
- Name: dagstermill
3
- Version: 0.20.2
4
- Summary: run notebooks using the Dagster tools
5
- Author: Elementl
6
- Author-email: hello@elementl.com
7
- License: Apache-2.0
8
- Classifier: Programming Language :: Python :: 3.8
9
- Classifier: Programming Language :: Python :: 3.9
10
- Classifier: Programming Language :: Python :: 3.10
11
- Classifier: Programming Language :: Python :: 3.11
12
- Classifier: License :: OSI Approved :: Apache Software License
13
- Classifier: Operating System :: OS Independent
14
- Provides-Extra: test
15
- License-File: LICENSE
File without changes
File without changes