dagstermill 0.19.2__py3-none-any.whl → 0.19.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dagstermill might be problematic. Click here for more details.

dagstermill/__init__.py CHANGED
@@ -19,7 +19,7 @@ yield_result = _MANAGER_FOR_NOTEBOOK_INSTANCE.yield_result
19
19
 
20
20
  yield_event = _MANAGER_FOR_NOTEBOOK_INSTANCE.yield_event
21
21
 
22
- _reconstitute_pipeline_context = _MANAGER_FOR_NOTEBOOK_INSTANCE.reconstitute_pipeline_context
22
+ _reconstitute_job_context = _MANAGER_FOR_NOTEBOOK_INSTANCE.reconstitute_job_context
23
23
 
24
24
  _teardown = _MANAGER_FOR_NOTEBOOK_INSTANCE.teardown_resources
25
25
 
dagstermill/context.py CHANGED
@@ -23,17 +23,15 @@ class DagstermillExecutionContext(AbstractComputeExecutionContext):
23
23
 
24
24
  def __init__(
25
25
  self,
26
- pipeline_context: PlanExecutionContext,
27
- pipeline_def: JobDefinition,
26
+ job_context: PlanExecutionContext,
27
+ job_def: JobDefinition,
28
28
  resource_keys_to_init: AbstractSet[str],
29
29
  op_name: str,
30
30
  node_handle: NodeHandle,
31
31
  op_config: Any = None,
32
32
  ):
33
- self._pipeline_context = check.inst_param(
34
- pipeline_context, "pipeline_context", PlanExecutionContext
35
- )
36
- self._pipeline_def = check.inst_param(pipeline_def, "pipeline_def", JobDefinition)
33
+ self._job_context = check.inst_param(job_context, "job_context", PlanExecutionContext)
34
+ self._job_def = check.inst_param(job_def, "job_def", JobDefinition)
37
35
  self._resource_keys_to_init = check.set_param(
38
36
  resource_keys_to_init, "resource_keys_to_init", of_type=str
39
37
  )
@@ -51,7 +49,7 @@ class DagstermillExecutionContext(AbstractComputeExecutionContext):
51
49
  bool
52
50
  """
53
51
  check.str_param(key, "key")
54
- return self._pipeline_context.has_tag(key)
52
+ return self._job_context.has_tag(key)
55
53
 
56
54
  def get_tag(self, key: str) -> Optional[str]:
57
55
  """Get a logging tag defined on the context.
@@ -63,44 +61,35 @@ class DagstermillExecutionContext(AbstractComputeExecutionContext):
63
61
  str
64
62
  """
65
63
  check.str_param(key, "key")
66
- return self._pipeline_context.get_tag(key)
64
+ return self._job_context.get_tag(key)
67
65
 
68
66
  @public
69
67
  @property
70
68
  def run_id(self) -> str:
71
69
  """str: The run_id for the context."""
72
- return self._pipeline_context.run_id
70
+ return self._job_context.run_id
73
71
 
74
72
  @public
75
73
  @property
76
74
  def run_config(self) -> Mapping[str, Any]:
77
75
  """dict: The run_config for the context."""
78
- return self._pipeline_context.run_config
76
+ return self._job_context.run_config
79
77
 
80
78
  @property
81
79
  def resolved_run_config(self) -> ResolvedRunConfig:
82
80
  """:class:`dagster.ResolvedRunConfig`: The resolved_run_config for the context."""
83
- return self._pipeline_context.resolved_run_config
81
+ return self._job_context.resolved_run_config
84
82
 
85
83
  @public
86
84
  @property
87
85
  def logging_tags(self) -> Mapping[str, str]:
88
86
  """dict: The logging tags for the context."""
89
- return self._pipeline_context.logging_tags
87
+ return self._job_context.logging_tags
90
88
 
91
89
  @public
92
90
  @property
93
91
  def job_name(self) -> str:
94
- return self._pipeline_context.job_name
95
-
96
- @property
97
- def pipeline_name(self) -> str:
98
- deprecation_warning(
99
- "DagstermillExecutionContext.pipeline_name",
100
- "0.17.0",
101
- "use the 'job_name' property instead.",
102
- )
103
- return self.job_name
92
+ return self._job_context.job_name
104
93
 
105
94
  @public
106
95
  @property
@@ -109,34 +98,14 @@ class DagstermillExecutionContext(AbstractComputeExecutionContext):
109
98
 
110
99
  This will be a dagstermill-specific shim.
111
100
  """
112
- return cast(
113
- JobDefinition,
114
- check.inst(
115
- self._pipeline_def,
116
- JobDefinition,
117
- "Accessing job_def inside a legacy pipeline. Use pipeline_def instead.",
118
- ),
119
- )
120
-
121
- @property
122
- def pipeline_def(self) -> JobDefinition:
123
- """:class:`dagster.PipelineDefinition`: The pipeline definition for the context.
124
-
125
- This will be a dagstermill-specific shim.
126
- """
127
- deprecation_warning(
128
- "DagstermillExecutionContext.pipeline_def",
129
- "0.17.0",
130
- "use the 'job_def' property instead.",
131
- )
132
- return self._pipeline_def
101
+ return self._job_def
133
102
 
134
103
  @property
135
104
  def resources(self) -> Any:
136
105
  """collections.namedtuple: A dynamically-created type whose properties allow access to
137
106
  resources.
138
107
  """
139
- return self._pipeline_context.scoped_resources_builder.build(
108
+ return self._job_context.scoped_resources_builder.build(
140
109
  required_resource_keys=self._resource_keys_to_init,
141
110
  )
142
111
 
@@ -144,16 +113,7 @@ class DagstermillExecutionContext(AbstractComputeExecutionContext):
144
113
  @property
145
114
  def run(self) -> DagsterRun:
146
115
  """:class:`dagster.DagsterRun`: The job run for the context."""
147
- return cast(DagsterRun, self._pipeline_context.dagster_run)
148
-
149
- @property
150
- def pipeline_run(self) -> DagsterRun:
151
- deprecation_warning(
152
- "DagstermillExecutionContext.pipeline_run",
153
- "0.17.0",
154
- "use the 'run' property instead.",
155
- )
156
- return self.run
116
+ return cast(DagsterRun, self._job_context.dagster_run)
157
117
 
158
118
  @property
159
119
  def log(self) -> DagsterLogManager:
@@ -161,7 +121,7 @@ class DagstermillExecutionContext(AbstractComputeExecutionContext):
161
121
 
162
122
  Call, e.g., ``log.info()`` to log messages through the Dagster machinery.
163
123
  """
164
- return self._pipeline_context.log
124
+ return self._job_context.log
165
125
 
166
126
  @public
167
127
  @property
@@ -171,7 +131,7 @@ class DagstermillExecutionContext(AbstractComputeExecutionContext):
171
131
  In interactive contexts, this may be a dagstermill-specific shim, depending whether an
172
132
  op definition was passed to ``dagstermill.get_context``.
173
133
  """
174
- return cast(OpDefinition, self._pipeline_def.node_def_named(self.op_name))
134
+ return cast(OpDefinition, self._job_def.node_def_named(self.op_name))
175
135
 
176
136
  @property
177
137
  def node(self) -> Node:
@@ -185,7 +145,7 @@ class DagstermillExecutionContext(AbstractComputeExecutionContext):
185
145
  "0.17.0",
186
146
  "use the 'op_def' property instead.",
187
147
  )
188
- return self.pipeline_def.get_node(self.node_handle)
148
+ return self.job_def.get_node(self.node_handle)
189
149
 
190
150
  @public
191
151
  @property
@@ -203,8 +163,8 @@ class DagstermillExecutionContext(AbstractComputeExecutionContext):
203
163
  class DagstermillRuntimeExecutionContext(DagstermillExecutionContext):
204
164
  def __init__(
205
165
  self,
206
- pipeline_context: PlanExecutionContext,
207
- pipeline_def: JobDefinition,
166
+ job_context: PlanExecutionContext,
167
+ job_def: JobDefinition,
208
168
  resource_keys_to_init: AbstractSet[str],
209
169
  op_name: str,
210
170
  step_context: StepExecutionContext,
@@ -213,8 +173,8 @@ class DagstermillRuntimeExecutionContext(DagstermillExecutionContext):
213
173
  ):
214
174
  self._step_context = check.inst_param(step_context, "step_context", StepExecutionContext)
215
175
  super().__init__(
216
- pipeline_context,
217
- pipeline_def,
176
+ job_context,
177
+ job_def,
218
178
  resource_keys_to_init,
219
179
  op_name,
220
180
  node_handle,
@@ -574,8 +574,8 @@ yield_event_asset = dagstermill.define_dagstermill_asset(
574
574
  )
575
575
 
576
576
 
577
- # this is hacky. We need a ReconstructablePipeline to run dagstermill, and
578
- # ReconstructablePipeline.for_module() find the jobs defined in this file. So we need to resolve all
577
+ # this is hacky. We need a ReconstructableJob to run dagstermill, and
578
+ # ReconstructableJob.for_module() find the jobs defined in this file. So we need to resolve all
579
579
  # of the asset jobs outside of the repository function.
580
580
  assets = with_resources(
581
581
  [
dagstermill/factory.py CHANGED
@@ -20,7 +20,7 @@ from dagster._config.pythonic_config import Config, infer_schema_from_config_cla
20
20
  from dagster._config.pythonic_config.utils import safe_is_subclass
21
21
  from dagster._core.definitions.events import AssetMaterialization, Failure, RetryRequested
22
22
  from dagster._core.definitions.metadata import MetadataValue
23
- from dagster._core.definitions.reconstruct import ReconstructablePipeline
23
+ from dagster._core.definitions.reconstruct import ReconstructableJob
24
24
  from dagster._core.definitions.utils import validate_tags
25
25
  from dagster._core.execution.context.compute import OpExecutionContext
26
26
  from dagster._core.execution.context.input import build_input_context
@@ -123,7 +123,7 @@ def get_papermill_parameters(
123
123
  marshal_dir = os.path.normpath(os.path.join(temp_dir, "dagstermill", str(run_id), "marshal"))
124
124
  mkdir_p(marshal_dir)
125
125
 
126
- if not isinstance(step_context.pipeline, ReconstructablePipeline):
126
+ if not isinstance(step_context.job, ReconstructableJob):
127
127
  if compute_descriptor == "asset":
128
128
  raise DagstermillError(
129
129
  "Can't execute a dagstermill asset that is not reconstructable. "
@@ -135,7 +135,7 @@ def get_papermill_parameters(
135
135
  "Use the reconstructable() function if executing from python"
136
136
  )
137
137
 
138
- dm_executable_dict = step_context.pipeline.to_dict()
138
+ dm_executable_dict = step_context.job.to_dict()
139
139
 
140
140
  dm_context_dict = {
141
141
  "output_log_path": output_log_path,
@@ -93,7 +93,7 @@ class ConfigurableLocalOutputNotebookIOManager(ConfigurableIOManagerFactory):
93
93
  ),
94
94
  )
95
95
  asset_key_prefix: List[str] = Field(
96
- [],
96
+ default=[],
97
97
  description=(
98
98
  "Asset key prefix to apply to assets materialized for output notebooks. Defaults to no"
99
99
  " prefix."
dagstermill/manager.py CHANGED
@@ -17,14 +17,14 @@ from dagster import (
17
17
  from dagster._core.definitions.dependency import NodeHandle
18
18
  from dagster._core.definitions.events import RetryRequested
19
19
  from dagster._core.definitions.graph_definition import GraphDefinition
20
+ from dagster._core.definitions.job_base import InMemoryJob
20
21
  from dagster._core.definitions.job_definition import JobDefinition
21
22
  from dagster._core.definitions.node_definition import NodeDefinition
22
23
  from dagster._core.definitions.op_definition import OpDefinition
23
- from dagster._core.definitions.pipeline_base import InMemoryPipeline
24
- from dagster._core.definitions.reconstruct import ReconstructablePipeline
24
+ from dagster._core.definitions.reconstruct import ReconstructableJob
25
25
  from dagster._core.definitions.resource_definition import ScopedResourcesBuilder
26
26
  from dagster._core.events import DagsterEvent
27
- from dagster._core.execution.api import scoped_pipeline_context
27
+ from dagster._core.execution.api import scoped_job_context
28
28
  from dagster._core.execution.plan.outputs import StepOutputHandle
29
29
  from dagster._core.execution.plan.plan import ExecutionPlan
30
30
  from dagster._core.execution.plan.step import ExecutionStep
@@ -35,7 +35,7 @@ from dagster._core.execution.resources_init import (
35
35
  from dagster._core.instance import DagsterInstance
36
36
  from dagster._core.instance.ref import InstanceRef
37
37
  from dagster._core.log_manager import DagsterLogManager
38
- from dagster._core.storage.pipeline_run import DagsterRun, DagsterRunStatus
38
+ from dagster._core.storage.dagster_run import DagsterRun, DagsterRunStatus
39
39
  from dagster._core.system_config.objects import ResolvedRunConfig, ResourceConfig
40
40
  from dagster._core.utils import make_new_run_id
41
41
  from dagster._loggers import colored_console_logger
@@ -67,9 +67,9 @@ class DagstermillResourceEventGenerationManager(EventGenerationManager):
67
67
 
68
68
  class Manager:
69
69
  def __init__(self):
70
- self.pipeline = None
70
+ self.job = None
71
71
  self.op_def: Optional[NodeDefinition] = None
72
- self.in_pipeline: bool = False
72
+ self.in_job: bool = False
73
73
  self.marshal_dir: Optional[str] = None
74
74
  self.context = None
75
75
  self.resource_manager = None
@@ -104,10 +104,10 @@ class Manager:
104
104
  )
105
105
  return self.resource_manager
106
106
 
107
- def reconstitute_pipeline_context(
107
+ def reconstitute_job_context(
108
108
  self,
109
109
  executable_dict: Mapping[str, Any],
110
- pipeline_run_dict: Mapping[str, Any],
110
+ job_run_dict: Mapping[str, Any],
111
111
  node_handle_kwargs: Mapping[str, Any],
112
112
  instance_ref_dict: Mapping[str, Any],
113
113
  step_key: str,
@@ -117,26 +117,26 @@ class Manager:
117
117
  ):
118
118
  """Reconstitutes a context for dagstermill-managed execution.
119
119
 
120
- You'll see this function called to reconstruct a pipeline context within the ``injected
120
+ You'll see this function called to reconstruct a job context within the ``injected
121
121
  parameters`` cell of a dagstermill output notebook. Users should not call this function
122
122
  interactively except when debugging output notebooks.
123
123
 
124
124
  Use :func:`dagstermill.get_context` in the ``parameters`` cell of your notebook to define a
125
125
  context for interactive exploration and development. This call will be replaced by one to
126
- :func:`dagstermill.reconstitute_pipeline_context` when the notebook is executed by
126
+ :func:`dagstermill.reconstitute_job_context` when the notebook is executed by
127
127
  dagstermill.
128
128
  """
129
129
  check.opt_str_param(output_log_path, "output_log_path")
130
130
  check.opt_str_param(marshal_dir, "marshal_dir")
131
131
  run_config = check.opt_mapping_param(run_config, "run_config", key_type=str)
132
- check.mapping_param(pipeline_run_dict, "pipeline_run_dict")
132
+ check.mapping_param(job_run_dict, "job_run_dict")
133
133
  check.mapping_param(executable_dict, "executable_dict")
134
134
  check.mapping_param(node_handle_kwargs, "node_handle_kwargs")
135
135
  check.mapping_param(instance_ref_dict, "instance_ref_dict")
136
136
  check.str_param(step_key, "step_key")
137
137
 
138
- pipeline = ReconstructablePipeline.from_dict(executable_dict)
139
- pipeline_def = pipeline.get_definition()
138
+ job = ReconstructableJob.from_dict(executable_dict)
139
+ job_def = job.get_definition()
140
140
 
141
141
  try:
142
142
  instance_ref = unpack_value(instance_ref_dict, InstanceRef)
@@ -146,49 +146,49 @@ class Manager:
146
146
  "Error when attempting to resolve DagsterInstance from serialized InstanceRef"
147
147
  ) from err
148
148
 
149
- dagster_run = unpack_value(pipeline_run_dict, DagsterRun)
149
+ dagster_run = unpack_value(job_run_dict, DagsterRun)
150
150
 
151
151
  node_handle = NodeHandle.from_dict(node_handle_kwargs)
152
- op = pipeline_def.get_node(node_handle)
152
+ op = job_def.get_node(node_handle)
153
153
  op_def = op.definition
154
154
 
155
155
  self.marshal_dir = marshal_dir
156
- self.in_pipeline = True
156
+ self.in_job = True
157
157
  self.op_def = op_def
158
- self.pipeline = pipeline
158
+ self.job = job
159
159
 
160
- resolved_run_config = ResolvedRunConfig.build(pipeline_def, run_config)
160
+ resolved_run_config = ResolvedRunConfig.build(job_def, run_config)
161
161
 
162
162
  execution_plan = ExecutionPlan.build(
163
- self.pipeline,
163
+ self.job,
164
164
  resolved_run_config,
165
165
  step_keys_to_execute=dagster_run.step_keys_to_execute,
166
166
  )
167
167
 
168
- with scoped_pipeline_context(
168
+ with scoped_job_context(
169
169
  execution_plan,
170
- pipeline,
170
+ job,
171
171
  run_config,
172
172
  dagster_run,
173
173
  instance,
174
174
  scoped_resources_builder_cm=self._setup_resources,
175
175
  # Set this flag even though we're not in test for clearer error reporting
176
176
  raise_on_error=True,
177
- ) as pipeline_context:
177
+ ) as job_context:
178
178
  self.context = DagstermillRuntimeExecutionContext(
179
- pipeline_context=pipeline_context,
180
- pipeline_def=pipeline_def,
179
+ job_context=job_context,
180
+ job_def=job_def,
181
181
  op_config=run_config.get("ops", {}).get(op.name, {}).get("config"),
182
182
  resource_keys_to_init=get_required_resource_keys_to_init(
183
183
  execution_plan,
184
- pipeline_def,
184
+ job_def,
185
185
  resolved_run_config,
186
186
  ),
187
187
  op_name=op.name,
188
188
  node_handle=node_handle,
189
189
  step_context=cast(
190
190
  StepExecutionContext,
191
- pipeline_context.for_step(
191
+ job_context.for_step(
192
192
  cast(ExecutionStep, execution_plan.get_step_by_key(step_key))
193
193
  ),
194
194
  ),
@@ -238,7 +238,7 @@ class Manager:
238
238
  required_resource_keys=set(resource_defs.keys()),
239
239
  )
240
240
 
241
- pipeline_def = JobDefinition(
241
+ job_def = JobDefinition(
242
242
  graph_def=GraphDefinition(name="ephemeral_dagstermill_pipeline", node_defs=[op_def]),
243
243
  logger_defs=logger_defs,
244
244
  resource_defs=resource_defs,
@@ -246,11 +246,11 @@ class Manager:
246
246
 
247
247
  run_id = make_new_run_id()
248
248
 
249
- # construct stubbed PipelineRun for notebook exploration...
250
- # The actual pipeline run during pipeline execution will be serialized and reconstituted
251
- # in the `reconstitute_pipeline_context` call
252
- pipeline_run = DagsterRun(
253
- pipeline_name=pipeline_def.name,
249
+ # construct stubbed DagsterRun for notebook exploration...
250
+ # The actual dagster run during job execution will be serialized and reconstituted
251
+ # in the `reconstitute_job_context` call
252
+ dagster_run = DagsterRun(
253
+ job_name=job_def.name,
254
254
  run_id=run_id,
255
255
  run_config=run_config,
256
256
  step_keys_to_execute=None,
@@ -258,30 +258,30 @@ class Manager:
258
258
  tags=None,
259
259
  )
260
260
 
261
- self.in_pipeline = False
261
+ self.in_job = False
262
262
  self.op_def = op_def
263
- self.pipeline = pipeline_def
263
+ self.job = job_def
264
264
 
265
- resolved_run_config = ResolvedRunConfig.build(pipeline_def, run_config)
265
+ resolved_run_config = ResolvedRunConfig.build(job_def, run_config)
266
266
 
267
- pipeline = InMemoryPipeline(pipeline_def)
268
- execution_plan = ExecutionPlan.build(pipeline, resolved_run_config)
267
+ job = InMemoryJob(job_def)
268
+ execution_plan = ExecutionPlan.build(job, resolved_run_config)
269
269
 
270
- with scoped_pipeline_context(
270
+ with scoped_job_context(
271
271
  execution_plan,
272
- pipeline,
272
+ job,
273
273
  run_config,
274
- pipeline_run,
274
+ dagster_run,
275
275
  DagsterInstance.ephemeral(),
276
276
  scoped_resources_builder_cm=self._setup_resources,
277
- ) as pipeline_context:
277
+ ) as job_context:
278
278
  self.context = DagstermillExecutionContext(
279
- pipeline_context=pipeline_context,
280
- pipeline_def=pipeline_def,
279
+ job_context=job_context,
280
+ job_def=job_def,
281
281
  op_config=op_config,
282
282
  resource_keys_to_init=get_required_resource_keys_to_init(
283
283
  execution_plan,
284
- pipeline_def,
284
+ job_def,
285
285
  resolved_run_config,
286
286
  ),
287
287
  op_name=op_def.name,
@@ -299,7 +299,7 @@ class Manager:
299
299
  value (Any): The value to yield.
300
300
  output_name (Optional[str]): The name of the result to yield (default: ``'result'``).
301
301
  """
302
- if not self.in_pipeline:
302
+ if not self.in_job:
303
303
  return value
304
304
 
305
305
  # deferred import for perf
@@ -356,7 +356,7 @@ class Manager:
356
356
  f" type, one of {valid_types}."
357
357
  )
358
358
 
359
- if not self.in_pipeline:
359
+ if not self.in_job:
360
360
  return dagster_event
361
361
 
362
362
  # deferred import for perf
dagstermill/translator.py CHANGED
@@ -18,11 +18,11 @@ INJECTED_BOILERPLATE = """
18
18
  # Injected parameters
19
19
  from dagster import seven as __dm_seven
20
20
  import dagstermill as __dm_dagstermill
21
- context = __dm_dagstermill._reconstitute_pipeline_context(
21
+ context = __dm_dagstermill._reconstitute_job_context(
22
22
  **{{
23
23
  key: __dm_seven.json.loads(value)
24
24
  for key, value
25
- in {pipeline_context_args}.items()
25
+ in {job_context_args}.items()
26
26
  }}
27
27
  )
28
28
  """
@@ -41,19 +41,19 @@ class DagsterTranslator(papermill.translators.PythonTranslator):
41
41
  assert "__dm_input_names" in parameters
42
42
 
43
43
  context_args = parameters["__dm_context"]
44
- pipeline_context_args = dict(
44
+ job_context_args = dict(
45
45
  executable_dict=parameters["__dm_executable_dict"],
46
- pipeline_run_dict=parameters["__dm_pipeline_run_dict"],
46
+ job_run_dict=parameters["__dm_pipeline_run_dict"],
47
47
  node_handle_kwargs=parameters["__dm_node_handle_kwargs"],
48
48
  instance_ref_dict=parameters["__dm_instance_ref_dict"],
49
49
  step_key=parameters["__dm_step_key"],
50
50
  **context_args,
51
51
  )
52
52
 
53
- for key in pipeline_context_args:
54
- pipeline_context_args[key] = _seven.json.dumps(pipeline_context_args[key])
53
+ for key in job_context_args:
54
+ job_context_args[key] = _seven.json.dumps(job_context_args[key])
55
55
 
56
- content = INJECTED_BOILERPLATE.format(pipeline_context_args=pipeline_context_args)
56
+ content = INJECTED_BOILERPLATE.format(job_context_args=job_context_args)
57
57
 
58
58
  for input_name in parameters["__dm_input_names"]:
59
59
  dm_load_input_call = f"__dm_dagstermill._load_input_parameter('{input_name}')"
dagstermill/version.py CHANGED
@@ -1 +1 @@
1
- __version__ = "0.19.2"
1
+ __version__ = "0.19.3"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dagstermill
3
- Version: 0.19.2
3
+ Version: 0.19.3
4
4
  Summary: run notebooks using the Dagster tools
5
5
  Author: Elementl
6
6
  Author-email: hello@elementl.com
@@ -13,7 +13,7 @@ Classifier: Programming Language :: Python :: 3.11
13
13
  Classifier: License :: OSI Approved :: Apache Software License
14
14
  Classifier: Operating System :: OS Independent
15
15
  License-File: LICENSE
16
- Requires-Dist: dagster (==1.3.2)
16
+ Requires-Dist: dagster (==1.3.3)
17
17
  Requires-Dist: ipykernel (!=5.4.0,!=5.4.1,>=4.9.0)
18
18
  Requires-Dist: ipython-genutils (>=0.2.0)
19
19
  Requires-Dist: packaging (>=20.9)
@@ -0,0 +1,22 @@
1
+ dagstermill/__init__.py,sha256=WZRYSjL1yAM1w5cqmC1t2T7hvuW-RGGY5tV9vZGHZwU,1133
2
+ dagstermill/__main__.py,sha256=bYt9eEaoRQWdejEHFD8REx9jxVEdZptECFsV7F49Ink,30
3
+ dagstermill/asset_factory.py,sha256=Qz2vcCjGLUE0IyUn9ceGSTV9f_eTojWhm89jMjxYg8U,9134
4
+ dagstermill/cli.py,sha256=NluBLUhAcf47DaQ7o1W9IhF9Ps8HCUc6v5xMYqO_BBk,4569
5
+ dagstermill/compat.py,sha256=GCfUaGC3eIEhlZP_VFtua8hFKNtwXocOAfYxuKZ5X3I,526
6
+ dagstermill/context.py,sha256=uyyfwMxxuataxLlwBS3SV1AtC5Y8_-TwC45GQ17oxok,5965
7
+ dagstermill/engine.py,sha256=4CZW-eni4TnG8C8VhPkZZgj6djItdOUKhHPzvaVhaYo,5843
8
+ dagstermill/errors.py,sha256=WOmpAGp-J1XhyGK_LT3ZZKsBwF5dvrWbqSaSldtoh6Y,141
9
+ dagstermill/factory.py,sha256=Hl-X6AiQ3RXRF7piW5atWxq7AwM2zD3MUmIz5EohTiE,18747
10
+ dagstermill/io_managers.py,sha256=kk36zAvxnnDot6he9CBTm8-VTDsbx9v1MUY6BaZeeKQ,4258
11
+ dagstermill/manager.py,sha256=_XrERGVBDfgKEOT7GeOqRm4kdSYkFe3by6pPTVarNr4,15497
12
+ dagstermill/serialize.py,sha256=eXW3c26CiILT_uebyFcAKBnsiNxnjyGT_ch3PfGyjek,188
13
+ dagstermill/translator.py,sha256=h1VPAOWtdjLKzFjRmyN9hO_R6qJuAETNkJydfdgwWGM,2170
14
+ dagstermill/version.py,sha256=LpBqSbK1KuDbSUey7PHJzdDwxD-diUSlq4z4kUt_6cU,23
15
+ dagstermill/examples/__init__.py,sha256=kzan-9zFjxaJ8o9bqUso44gcGiOmJrlq4JYO-yIBQao,55
16
+ dagstermill/examples/repository.py,sha256=OY0x-4nJRBKxOsS1hrFFhx9cRX0NQ0YdYvXgg5n9Dco,15725
17
+ dagstermill-0.19.3.dist-info/LICENSE,sha256=-gtoVIAZYUHYmNHISZg982FI4Oh19mV1nxgTVW8eCB8,11344
18
+ dagstermill-0.19.3.dist-info/METADATA,sha256=uS9-dMCqtniDBVR8BbcwMCTImZQP_1ST1M7rD3z-F54,1017
19
+ dagstermill-0.19.3.dist-info/WHEEL,sha256=p46_5Uhzqz6AzeSosiOnxK-zmFja1i22CrQCjmYe8ec,92
20
+ dagstermill-0.19.3.dist-info/entry_points.txt,sha256=885a7vvhABYWEj7W28elkzSmIcKO3REkdd5h4Z4DEJs,53
21
+ dagstermill-0.19.3.dist-info/top_level.txt,sha256=YDelJKdA5YIIrjsObdd8U4E9YhuXJLRe9NKfUzud9Uc,12
22
+ dagstermill-0.19.3.dist-info/RECORD,,
@@ -1,22 +0,0 @@
1
- dagstermill/__init__.py,sha256=FGZPY1zbdJ50WBzxhPWnIN8yEda57S0iuwOJN4ODBDk,1143
2
- dagstermill/__main__.py,sha256=bYt9eEaoRQWdejEHFD8REx9jxVEdZptECFsV7F49Ink,30
3
- dagstermill/asset_factory.py,sha256=Qz2vcCjGLUE0IyUn9ceGSTV9f_eTojWhm89jMjxYg8U,9134
4
- dagstermill/cli.py,sha256=NluBLUhAcf47DaQ7o1W9IhF9Ps8HCUc6v5xMYqO_BBk,4569
5
- dagstermill/compat.py,sha256=GCfUaGC3eIEhlZP_VFtua8hFKNtwXocOAfYxuKZ5X3I,526
6
- dagstermill/context.py,sha256=VjurhMFLMPGfUC-xH6xpN6AcUING6omEm4QtiZAFO74,7240
7
- dagstermill/engine.py,sha256=4CZW-eni4TnG8C8VhPkZZgj6djItdOUKhHPzvaVhaYo,5843
8
- dagstermill/errors.py,sha256=WOmpAGp-J1XhyGK_LT3ZZKsBwF5dvrWbqSaSldtoh6Y,141
9
- dagstermill/factory.py,sha256=EqVAHOoLy67KvyLbYekqACCOXFYmQyEXgBlOazCzNbA,18767
10
- dagstermill/io_managers.py,sha256=8542cAYEEOR7_5dor5C49NPBl8S_RP8ITk9Qz7KOV5w,4250
11
- dagstermill/manager.py,sha256=n3O5exnLs6BO9uMoBkgczsN3jaHGZUfHAkfuLBnN3Qw,15777
12
- dagstermill/serialize.py,sha256=eXW3c26CiILT_uebyFcAKBnsiNxnjyGT_ch3PfGyjek,188
13
- dagstermill/translator.py,sha256=LpnD_4-4SgRAsHPh7REywkiXRr_yvoszcECk_d_JuUQ,2215
14
- dagstermill/version.py,sha256=qZNYBPCf5y9eOSOCnc71mXgsDo0Xwb9v1HTFq7X3sdU,23
15
- dagstermill/examples/__init__.py,sha256=kzan-9zFjxaJ8o9bqUso44gcGiOmJrlq4JYO-yIBQao,55
16
- dagstermill/examples/repository.py,sha256=74dXL42t3utBz81OEtbvPzM2t_PwTDiSEjUaOoehS8M,15735
17
- dagstermill-0.19.2.dist-info/LICENSE,sha256=-gtoVIAZYUHYmNHISZg982FI4Oh19mV1nxgTVW8eCB8,11344
18
- dagstermill-0.19.2.dist-info/METADATA,sha256=A9ZaaULpWjyfqX44upGIVvuuAUTniJSu1_JeK7nmQE4,1017
19
- dagstermill-0.19.2.dist-info/WHEEL,sha256=p46_5Uhzqz6AzeSosiOnxK-zmFja1i22CrQCjmYe8ec,92
20
- dagstermill-0.19.2.dist-info/entry_points.txt,sha256=885a7vvhABYWEj7W28elkzSmIcKO3REkdd5h4Z4DEJs,53
21
- dagstermill-0.19.2.dist-info/top_level.txt,sha256=YDelJKdA5YIIrjsObdd8U4E9YhuXJLRe9NKfUzud9Uc,12
22
- dagstermill-0.19.2.dist-info/RECORD,,