dagstermill 0.17.19__py3-none-any.whl → 0.17.21__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dagstermill might be problematic. Click here for more details.

dagstermill/__init__.py CHANGED
@@ -1,4 +1,4 @@
1
- from dagster._core.utils import check_dagster_package_version
1
+ from dagster._core.libraries import DagsterLibraryRegistry
2
2
 
3
3
  from .asset_factory import define_dagstermill_asset as define_dagstermill_asset
4
4
  from .context import DagstermillExecutionContext as DagstermillExecutionContext
@@ -8,7 +8,7 @@ from .io_managers import local_output_notebook_io_manager as local_output_notebo
8
8
  from .manager import MANAGER_FOR_NOTEBOOK_INSTANCE as _MANAGER_FOR_NOTEBOOK_INSTANCE
9
9
  from .version import __version__ as __version__
10
10
 
11
- check_dagster_package_version("dagstermill", __version__)
11
+ DagsterLibraryRegistry.register("dagstermill", __version__)
12
12
 
13
13
  get_context = _MANAGER_FOR_NOTEBOOK_INSTANCE.get_context
14
14
 
dagstermill/context.py CHANGED
@@ -27,9 +27,9 @@ class DagstermillExecutionContext(AbstractComputeExecutionContext):
27
27
  pipeline_context: PlanExecutionContext,
28
28
  pipeline_def: PipelineDefinition,
29
29
  resource_keys_to_init: AbstractSet[str],
30
- solid_name: str,
31
- solid_handle: NodeHandle,
32
- solid_config: Any = None,
30
+ op_name: str,
31
+ node_handle: NodeHandle,
32
+ op_config: Any = None,
33
33
  ):
34
34
  self._pipeline_context = check.inst_param(
35
35
  pipeline_context, "pipeline_context", PlanExecutionContext
@@ -38,9 +38,9 @@ class DagstermillExecutionContext(AbstractComputeExecutionContext):
38
38
  self._resource_keys_to_init = check.set_param(
39
39
  resource_keys_to_init, "resource_keys_to_init", of_type=str
40
40
  )
41
- self.solid_name = check.str_param(solid_name, "solid_name")
42
- self.solid_handle = check.inst_param(solid_handle, "solid_handle", NodeHandle)
43
- self._solid_config = solid_config
41
+ self.op_name = check.str_param(op_name, "op_name")
42
+ self.node_handle = check.inst_param(node_handle, "node_handle", NodeHandle)
43
+ self._op_config = op_config
44
44
 
45
45
  def has_tag(self, key: str) -> bool:
46
46
  """Check if a logging tag is defined on the context.
@@ -145,7 +145,7 @@ class DagstermillExecutionContext(AbstractComputeExecutionContext):
145
145
  @property
146
146
  def run(self) -> DagsterRun:
147
147
  """:class:`dagster.DagsterRun`: The job run for the context."""
148
- return cast(DagsterRun, self._pipeline_context.pipeline_run)
148
+ return cast(DagsterRun, self._pipeline_context.dagster_run)
149
149
 
150
150
  @property
151
151
  def pipeline_run(self) -> DagsterRun:
@@ -172,35 +172,21 @@ class DagstermillExecutionContext(AbstractComputeExecutionContext):
172
172
  In interactive contexts, this may be a dagstermill-specific shim, depending whether an
173
173
  op definition was passed to ``dagstermill.get_context``.
174
174
  """
175
- return cast(OpDefinition, self._pipeline_def.solid_def_named(self.solid_name))
175
+ return cast(OpDefinition, self._pipeline_def.solid_def_named(self.op_name))
176
176
 
177
177
  @property
178
- def solid_def(self) -> OpDefinition:
179
- """:class:`dagster.SolidDefinition`: The solid definition for the context.
178
+ def node(self) -> Node:
179
+ """:class:`dagster.Node`: The node for the context.
180
180
 
181
- In interactive contexts, this may be a dagstermill-specific shim, depending whether a
182
- solid definition was passed to ``dagstermill.get_context``.
183
- """
184
- deprecation_warning(
185
- "DagstermillExecutionContext.solid_def",
186
- "0.17.0",
187
- "use the 'op_def' property instead.",
188
- )
189
- return cast(OpDefinition, self._pipeline_def.solid_def_named(self.solid_name))
190
-
191
- @property
192
- def solid(self) -> Node:
193
- """:class:`dagster.Node`: The solid for the context.
194
-
195
- In interactive contexts, this may be a dagstermill-specific shim, depending whether a
196
- solid definition was passed to ``dagstermill.get_context``.
181
+ In interactive contexts, this may be a dagstermill-specific shim, depending whether an
182
+ op definition was passed to ``dagstermill.get_context``.
197
183
  """
198
184
  deprecation_warning(
199
185
  "DagstermillExecutionContext.solid_def",
200
186
  "0.17.0",
201
187
  "use the 'op_def' property instead.",
202
188
  )
203
- return self.pipeline_def.get_solid(self.solid_handle)
189
+ return self.pipeline_def.get_solid(self.node_handle)
204
190
 
205
191
  @public
206
192
  @property
@@ -208,23 +194,11 @@ class DagstermillExecutionContext(AbstractComputeExecutionContext):
208
194
  """collections.namedtuple: A dynamically-created type whose properties allow access to
209
195
  op-specific config.
210
196
  """
211
- if self._solid_config:
212
- return self._solid_config
213
-
214
- solid_config = self.resolved_run_config.solids.get(self.solid_name)
215
- return solid_config.config if solid_config else None
197
+ if self._op_config:
198
+ return self._op_config
216
199
 
217
- @property
218
- def solid_config(self) -> Any:
219
- """collections.namedtuple: A dynamically-created type whose properties allow access to
220
- solid-specific config.
221
- """
222
- deprecation_warning(
223
- "DagstermillExecutionContext.solid_config",
224
- "0.17.0",
225
- "use the 'op_config' property instead.",
226
- )
227
- return self.op_config
200
+ op_config = self.resolved_run_config.ops.get(self.op_name)
201
+ return op_config.config if op_config else None
228
202
 
229
203
 
230
204
  class DagstermillRuntimeExecutionContext(DagstermillExecutionContext):
@@ -233,19 +207,19 @@ class DagstermillRuntimeExecutionContext(DagstermillExecutionContext):
233
207
  pipeline_context: PlanExecutionContext,
234
208
  pipeline_def: PipelineDefinition,
235
209
  resource_keys_to_init: AbstractSet[str],
236
- solid_name: str,
210
+ op_name: str,
237
211
  step_context: StepExecutionContext,
238
- solid_handle: NodeHandle,
239
- solid_config: Any = None,
212
+ node_handle: NodeHandle,
213
+ op_config: Any = None,
240
214
  ):
241
215
  self._step_context = check.inst_param(step_context, "step_context", StepExecutionContext)
242
216
  super().__init__(
243
217
  pipeline_context,
244
218
  pipeline_def,
245
219
  resource_keys_to_init,
246
- solid_name,
247
- solid_handle,
248
- solid_config,
220
+ op_name,
221
+ node_handle,
222
+ op_config,
249
223
  )
250
224
 
251
225
  @property
dagstermill/factory.py CHANGED
@@ -141,15 +141,15 @@ def get_papermill_parameters(
141
141
  "run_config": step_context.run_config,
142
142
  }
143
143
 
144
- dm_solid_handle_kwargs = step_context.solid_handle._asdict()
144
+ dm_node_handle_kwargs = step_context.node_handle._asdict()
145
145
  dm_step_key = step_context.step.key
146
146
 
147
147
  parameters = {}
148
148
 
149
149
  parameters["__dm_context"] = dm_context_dict
150
150
  parameters["__dm_executable_dict"] = dm_executable_dict
151
- parameters["__dm_pipeline_run_dict"] = pack_value(step_context.pipeline_run)
152
- parameters["__dm_solid_handle_kwargs"] = dm_solid_handle_kwargs
151
+ parameters["__dm_pipeline_run_dict"] = pack_value(step_context.dagster_run)
152
+ parameters["__dm_node_handle_kwargs"] = dm_node_handle_kwargs
153
153
  parameters["__dm_instance_ref_dict"] = pack_value(step_context.instance.get_ref())
154
154
  parameters["__dm_step_key"] = dm_step_key
155
155
  parameters["__dm_input_names"] = list(inputs.keys())
@@ -235,7 +235,7 @@ def _handle_events_from_notebook(
235
235
 
236
236
  output_nb = scrapbook.read_notebook(executed_notebook_path)
237
237
 
238
- for output_name, _ in step_context.solid_def.output_dict.items():
238
+ for output_name, _ in step_context.op_def.output_dict.items():
239
239
  data_dict = output_nb.scraps.data_dict
240
240
  if output_name in data_dict:
241
241
  # read outputs that were passed out of process via io manager from `yield_result`
dagstermill/manager.py CHANGED
@@ -32,6 +32,7 @@ from dagster._core.execution.resources_init import (
32
32
  resource_initialization_event_generator,
33
33
  )
34
34
  from dagster._core.instance import DagsterInstance
35
+ from dagster._core.instance.ref import InstanceRef
35
36
  from dagster._core.log_manager import DagsterLogManager
36
37
  from dagster._core.storage.pipeline_run import DagsterRun, DagsterRunStatus
37
38
  from dagster._core.system_config.objects import ResolvedRunConfig, ResourceConfig
@@ -39,8 +40,8 @@ from dagster._core.utils import make_new_run_id
39
40
  from dagster._legacy import Materialization, ModeDefinition, PipelineDefinition
40
41
  from dagster._loggers import colored_console_logger
41
42
  from dagster._serdes import unpack_value
42
- from dagster._utils import EventGenerationManager, ensure_gen
43
- from dagster._utils.backcompat import canonicalize_backcompat_args, deprecation_warning
43
+ from dagster._utils import EventGenerationManager
44
+ from dagster._utils.backcompat import deprecation_warning
44
45
 
45
46
  from .context import DagstermillExecutionContext, DagstermillRuntimeExecutionContext
46
47
  from .errors import DagstermillError
@@ -68,7 +69,7 @@ class DagstermillResourceEventGenerationManager(EventGenerationManager):
68
69
  class Manager:
69
70
  def __init__(self):
70
71
  self.pipeline = None
71
- self.solid_def: Optional[NodeDefinition] = None
72
+ self.op_def: Optional[NodeDefinition] = None
72
73
  self.in_pipeline: bool = False
73
74
  self.marshal_dir: Optional[str] = None
74
75
  self.context = None
@@ -80,7 +81,7 @@ class Manager:
80
81
  resource_configs: Mapping[str, ResourceConfig],
81
82
  log_manager: DagsterLogManager,
82
83
  execution_plan: Optional[ExecutionPlan],
83
- pipeline_run: Optional[DagsterRun],
84
+ dagster_run: Optional[DagsterRun],
84
85
  resource_keys_to_init: Optional[AbstractSet[str]],
85
86
  instance: Optional[DagsterInstance],
86
87
  emit_persistent_events: Optional[bool],
@@ -95,7 +96,7 @@ class Manager:
95
96
  resource_configs=resource_configs,
96
97
  log_manager=log_manager,
97
98
  execution_plan=execution_plan,
98
- pipeline_run=pipeline_run,
99
+ dagster_run=dagster_run,
99
100
  resource_keys_to_init=resource_keys_to_init,
100
101
  instance=instance,
101
102
  emit_persistent_events=emit_persistent_events,
@@ -108,8 +109,8 @@ class Manager:
108
109
  def reconstitute_pipeline_context(
109
110
  self,
110
111
  executable_dict: Mapping[str, Any],
111
- pipeline_run_dict: Mapping[str, DagsterRun],
112
- solid_handle_kwargs: Mapping[str, Any],
112
+ pipeline_run_dict: Mapping[str, Any],
113
+ node_handle_kwargs: Mapping[str, Any],
113
114
  instance_ref_dict: Mapping[str, Any],
114
115
  step_key: str,
115
116
  output_log_path: Optional[str] = None,
@@ -132,7 +133,7 @@ class Manager:
132
133
  run_config = check.opt_mapping_param(run_config, "run_config", key_type=str)
133
134
  check.mapping_param(pipeline_run_dict, "pipeline_run_dict")
134
135
  check.mapping_param(executable_dict, "executable_dict")
135
- check.mapping_param(solid_handle_kwargs, "solid_handle_kwargs")
136
+ check.mapping_param(node_handle_kwargs, "node_handle_kwargs")
136
137
  check.mapping_param(instance_ref_dict, "instance_ref_dict")
137
138
  check.str_param(step_key, "step_key")
138
139
 
@@ -140,22 +141,22 @@ class Manager:
140
141
  pipeline_def = pipeline.get_definition()
141
142
 
142
143
  try:
143
- instance_ref = unpack_value(instance_ref_dict)
144
+ instance_ref = cast(InstanceRef, unpack_value(instance_ref_dict))
144
145
  instance = DagsterInstance.from_ref(instance_ref)
145
146
  except Exception as err:
146
147
  raise DagstermillError(
147
148
  "Error when attempting to resolve DagsterInstance from serialized InstanceRef"
148
149
  ) from err
149
150
 
150
- pipeline_run = unpack_value(pipeline_run_dict)
151
+ pipeline_run = cast(DagsterRun, unpack_value(pipeline_run_dict))
151
152
 
152
- solid_handle = NodeHandle.from_dict(solid_handle_kwargs)
153
- solid = pipeline_def.get_solid(solid_handle)
154
- solid_def = solid.definition
153
+ node_handle = NodeHandle.from_dict(node_handle_kwargs)
154
+ op = pipeline_def.get_solid(node_handle)
155
+ op_def = op.definition
155
156
 
156
157
  self.marshal_dir = marshal_dir
157
158
  self.in_pipeline = True
158
- self.solid_def = solid_def
159
+ self.op_def = op_def
159
160
  self.pipeline = pipeline
160
161
 
161
162
  resolved_run_config = ResolvedRunConfig.build(
@@ -181,14 +182,14 @@ class Manager:
181
182
  self.context = DagstermillRuntimeExecutionContext(
182
183
  pipeline_context=pipeline_context,
183
184
  pipeline_def=pipeline_def,
184
- solid_config=run_config.get("ops", {}).get(solid.name, {}).get("config"),
185
+ op_config=run_config.get("ops", {}).get(op.name, {}).get("config"),
185
186
  resource_keys_to_init=get_required_resource_keys_to_init(
186
187
  execution_plan,
187
188
  pipeline_def,
188
189
  resolved_run_config,
189
190
  ),
190
- solid_name=solid.name,
191
- solid_handle=solid_handle,
191
+ op_name=op.name,
192
+ node_handle=node_handle,
192
193
  step_context=cast(
193
194
  StepExecutionContext,
194
195
  pipeline_context.for_step(
@@ -204,7 +205,6 @@ class Manager:
204
205
  op_config: Any = None,
205
206
  resource_defs: Optional[Mapping[str, ResourceDefinition]] = None,
206
207
  logger_defs: Optional[Mapping[str, LoggerDefinition]] = None,
207
- solid_config: Any = None,
208
208
  mode_def: Optional[ModeDefinition] = None,
209
209
  run_config: Optional[dict] = None,
210
210
  ) -> DagstermillExecutionContext:
@@ -236,10 +236,6 @@ class Manager:
236
236
  " `dagstermill.get_context`. Please provide one or the other."
237
237
  )
238
238
 
239
- solid_config = canonicalize_backcompat_args(
240
- op_config, "op_config", solid_config, "solid_config", "0.17.0"
241
- )
242
-
243
239
  if mode_def:
244
240
  deprecation_warning(
245
241
  "mode_def argument to dagstermill.get_context",
@@ -265,15 +261,15 @@ class Manager:
265
261
  resource_defs = check.opt_mapping_param(resource_defs, "resource_defs")
266
262
  mode_def = ModeDefinition(logger_defs=logger_defs, resource_defs=resource_defs)
267
263
 
268
- solid_def = OpDefinition(
269
- name="this_solid",
264
+ op_def = OpDefinition(
265
+ name="this_op",
270
266
  compute_fn=lambda *args, **kwargs: None,
271
- description="Ephemeral solid constructed by dagstermill.get_context()",
267
+ description="Ephemeral op constructed by dagstermill.get_context()",
272
268
  required_resource_keys=mode_def.resource_key_set,
273
269
  )
274
270
 
275
271
  pipeline_def = PipelineDefinition(
276
- [solid_def], mode_defs=[mode_def], name="ephemeral_dagstermill_pipeline"
272
+ [op_def], mode_defs=[mode_def], name="ephemeral_dagstermill_pipeline"
277
273
  )
278
274
 
279
275
  run_id = make_new_run_id()
@@ -292,7 +288,7 @@ class Manager:
292
288
  )
293
289
 
294
290
  self.in_pipeline = False
295
- self.solid_def = solid_def
291
+ self.op_def = op_def
296
292
  self.pipeline = pipeline_def
297
293
 
298
294
  resolved_run_config = ResolvedRunConfig.build(pipeline_def, run_config, mode=mode_def.name)
@@ -311,14 +307,14 @@ class Manager:
311
307
  self.context = DagstermillExecutionContext(
312
308
  pipeline_context=pipeline_context,
313
309
  pipeline_def=pipeline_def,
314
- solid_config=solid_config,
310
+ op_config=op_config,
315
311
  resource_keys_to_init=get_required_resource_keys_to_init(
316
312
  execution_plan,
317
313
  pipeline_def,
318
314
  resolved_run_config,
319
315
  ),
320
- solid_name=solid_def.name,
321
- solid_handle=NodeHandle(solid_def.name, parent=None),
316
+ op_name=op_def.name,
317
+ node_handle=NodeHandle(op_def.name, parent=None),
322
318
  )
323
319
 
324
320
  return self.context
@@ -338,10 +334,10 @@ class Manager:
338
334
  # deferred import for perf
339
335
  import scrapbook
340
336
 
341
- if not self.solid_def.has_output(output_name):
337
+ if not self.op_def.has_output(output_name):
342
338
  raise DagstermillError(
343
- f"Op {self.solid_def.name} does not have output named {output_name}.Expected one of"
344
- f" {[str(output_def.name) for output_def in self.solid_def.output_defs]}"
339
+ f"Op {self.op_def.name} does not have output named {output_name}.Expected one of"
340
+ f" {[str(output_def.name) for output_def in self.op_def.output_defs]}"
345
341
  )
346
342
 
347
343
  # pass output value cross process boundary using io manager
@@ -414,10 +410,8 @@ class Manager:
414
410
  check.failed("Expected DagstermillRuntimeExecutionContext")
415
411
  step_context = dm_context.step_context # pylint: disable=protected-access
416
412
  step_input = step_context.step.step_input_named(input_name)
417
- input_def = step_context.solid_def.input_def_named(input_name)
418
- for event_or_input_value in ensure_gen(
419
- step_input.source.load_input_object(step_context, input_def)
420
- ):
413
+ input_def = step_context.op_def.input_def_named(input_name)
414
+ for event_or_input_value in step_input.source.load_input_object(step_context, input_def):
421
415
  if isinstance(event_or_input_value, DagsterEvent):
422
416
  continue
423
417
  else:
dagstermill/translator.py CHANGED
@@ -8,7 +8,7 @@ RESERVED_INPUT_NAMES = [
8
8
  "__dm_executable_dict",
9
9
  "__dm_json",
10
10
  "__dm_pipeline_run_dict",
11
- "__dm_solid_handle_kwargs",
11
+ "__dm_node_handle_kwargs",
12
12
  "__dm_instance_ref_dict",
13
13
  "__dm_step_key",
14
14
  "__dm_input_names",
@@ -35,7 +35,7 @@ class DagsterTranslator(papermill.translators.PythonTranslator):
35
35
  assert "__dm_context" in parameters
36
36
  assert "__dm_executable_dict" in parameters
37
37
  assert "__dm_pipeline_run_dict" in parameters
38
- assert "__dm_solid_handle_kwargs" in parameters
38
+ assert "__dm_node_handle_kwargs" in parameters
39
39
  assert "__dm_instance_ref_dict" in parameters
40
40
  assert "__dm_step_key" in parameters
41
41
  assert "__dm_input_names" in parameters
@@ -44,7 +44,7 @@ class DagsterTranslator(papermill.translators.PythonTranslator):
44
44
  pipeline_context_args = dict(
45
45
  executable_dict=parameters["__dm_executable_dict"],
46
46
  pipeline_run_dict=parameters["__dm_pipeline_run_dict"],
47
- solid_handle_kwargs=parameters["__dm_solid_handle_kwargs"],
47
+ node_handle_kwargs=parameters["__dm_node_handle_kwargs"],
48
48
  instance_ref_dict=parameters["__dm_instance_ref_dict"],
49
49
  step_key=parameters["__dm_step_key"],
50
50
  **context_args,
dagstermill/version.py CHANGED
@@ -1 +1 @@
1
- __version__ = "0.17.19"
1
+ __version__ = "0.17.21"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dagstermill
3
- Version: 0.17.19
3
+ Version: 0.17.21
4
4
  Summary: run notebooks using the Dagster tools
5
5
  Home-page: UNKNOWN
6
6
  Author: Elementl
@@ -14,7 +14,7 @@ Classifier: Programming Language :: Python :: 3.10
14
14
  Classifier: License :: OSI Approved :: Apache Software License
15
15
  Classifier: Operating System :: OS Independent
16
16
  License-File: LICENSE
17
- Requires-Dist: dagster (==1.1.19)
17
+ Requires-Dist: dagster (==1.1.21)
18
18
  Requires-Dist: ipykernel (!=5.4.0,!=5.4.1,>=4.9.0)
19
19
  Requires-Dist: ipython-genutils (>=0.2.0)
20
20
  Requires-Dist: packaging (>=20.9)
@@ -1,22 +1,22 @@
1
- dagstermill/__init__.py,sha256=h-l8uokGSDjw1gG6LGX5bngZXtT7mdm_cOaOgPPXwIo,1045
1
+ dagstermill/__init__.py,sha256=3xNVyJruilLSc6JYrf0W5v5KF_tzUCDQBpYf1nel378,1044
2
2
  dagstermill/__main__.py,sha256=bYt9eEaoRQWdejEHFD8REx9jxVEdZptECFsV7F49Ink,30
3
3
  dagstermill/asset_factory.py,sha256=4cVAD1DybnMiRi-O6VgRvgP7JYcJnJo-Gs7zmA8JyH8,8833
4
4
  dagstermill/cli.py,sha256=AcrZS9NAeZ1OScmjrFBvXNdrTHXhEfjMjLPU9pO442I,4656
5
5
  dagstermill/compat.py,sha256=m2Sbq1kr0yfr1WJuKFF5YMAcZTIk_aD5DJivwZjJQvU,558
6
- dagstermill/context.py,sha256=sLvwY3NYsWRf-OwdLN9a9FLgfdUSaJM_ddTtebLVR_k,8331
6
+ dagstermill/context.py,sha256=lc01TEufiSv9yQFXlXMSuhiR1KvsiMe3xUgLxIV2ZuM,7309
7
7
  dagstermill/engine.py,sha256=0VJfMpi8tXmt8oljvOXAzHYP2xpPTbOxxFuvAS_AwLo,5928
8
8
  dagstermill/errors.py,sha256=WOmpAGp-J1XhyGK_LT3ZZKsBwF5dvrWbqSaSldtoh6Y,141
9
- dagstermill/factory.py,sha256=Qi7W-bau5T42pe25pZ-32WBdF82HIUkFyTcpyZCKQ0Q,18480
9
+ dagstermill/factory.py,sha256=tF5bE097lkPINa9OqHEAA92JPiOQULWKM4UVsA2_JKA,18472
10
10
  dagstermill/io_managers.py,sha256=OGUjSH--pN8qsTDBgGPLWdeamXwEZOjfNX9rQfNsQEs,3533
11
- dagstermill/manager.py,sha256=o4wLxOW_be-LfgbedT_wKItXEGrPAvxmBlBVl6_sKWI,17372
11
+ dagstermill/manager.py,sha256=A7wPb_75ayExpsN8HV396Zxce5tyiinSqU7CprsXFMQ,17117
12
12
  dagstermill/serialize.py,sha256=eXW3c26CiILT_uebyFcAKBnsiNxnjyGT_ch3PfGyjek,188
13
- dagstermill/translator.py,sha256=ej2an-UwRBzVCN4aKTQ3-b3FEvNMsUTc4OHIytrIloA,2227
14
- dagstermill/version.py,sha256=LKtJnPhS5GRuhOh9obHGnDlncpF9oAEamdbIKdiL3Tw,24
13
+ dagstermill/translator.py,sha256=go_5ocu575epGPmGJcHqAqH7N7-aSdtiaMHw5T9CXvs,2223
14
+ dagstermill/version.py,sha256=02pERK8eGbKCmhhaP6R4kszX1AZhworPgk9xCzHc0Ak,24
15
15
  dagstermill/examples/__init__.py,sha256=kzan-9zFjxaJ8o9bqUso44gcGiOmJrlq4JYO-yIBQao,55
16
16
  dagstermill/examples/repository.py,sha256=LoQSv5TaOYwlz9RAWtfChm6ELQKhMIrvb3owvG-xRyY,14808
17
- dagstermill-0.17.19.dist-info/LICENSE,sha256=-gtoVIAZYUHYmNHISZg982FI4Oh19mV1nxgTVW8eCB8,11344
18
- dagstermill-0.17.19.dist-info/METADATA,sha256=PLYLGCkC4YkNPiJUIzsaP17HgF1kRfKrdP6sBch1jiE,1014
19
- dagstermill-0.17.19.dist-info/WHEEL,sha256=p46_5Uhzqz6AzeSosiOnxK-zmFja1i22CrQCjmYe8ec,92
20
- dagstermill-0.17.19.dist-info/entry_points.txt,sha256=BAnkw0uZ7RK7YSz8L0Vz_2lO26XSs2sdtIXsL6wjmSk,54
21
- dagstermill-0.17.19.dist-info/top_level.txt,sha256=YDelJKdA5YIIrjsObdd8U4E9YhuXJLRe9NKfUzud9Uc,12
22
- dagstermill-0.17.19.dist-info/RECORD,,
17
+ dagstermill-0.17.21.dist-info/LICENSE,sha256=-gtoVIAZYUHYmNHISZg982FI4Oh19mV1nxgTVW8eCB8,11344
18
+ dagstermill-0.17.21.dist-info/METADATA,sha256=PAYnhVGoR85NNtxMv5kSVNXBGqywzp3OM0ZHwnYyoMY,1014
19
+ dagstermill-0.17.21.dist-info/WHEEL,sha256=p46_5Uhzqz6AzeSosiOnxK-zmFja1i22CrQCjmYe8ec,92
20
+ dagstermill-0.17.21.dist-info/entry_points.txt,sha256=BAnkw0uZ7RK7YSz8L0Vz_2lO26XSs2sdtIXsL6wjmSk,54
21
+ dagstermill-0.17.21.dist-info/top_level.txt,sha256=YDelJKdA5YIIrjsObdd8U4E9YhuXJLRe9NKfUzud9Uc,12
22
+ dagstermill-0.17.21.dist-info/RECORD,,