dagster 1.12.10__py3-none-any.whl → 1.12.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dagster/_core/definitions/assets/graph/remote_asset_graph.py +2 -2
- dagster/_core/definitions/inference.py +10 -7
- dagster/_core/definitions/input.py +36 -60
- dagster/_core/definitions/metadata/metadata_set.py +14 -3
- dagster/_grpc/types.py +218 -542
- dagster/version.py +1 -1
- {dagster-1.12.10.dist-info → dagster-1.12.11.dist-info}/METADATA +3 -3
- {dagster-1.12.10.dist-info → dagster-1.12.11.dist-info}/RECORD +12 -12
- {dagster-1.12.10.dist-info → dagster-1.12.11.dist-info}/WHEEL +0 -0
- {dagster-1.12.10.dist-info → dagster-1.12.11.dist-info}/entry_points.txt +0 -0
- {dagster-1.12.10.dist-info → dagster-1.12.11.dist-info}/licenses/LICENSE +0 -0
- {dagster-1.12.10.dist-info → dagster-1.12.11.dist-info}/top_level.txt +0 -0
|
@@ -520,7 +520,7 @@ class RemoteAssetGraph(BaseAssetGraph[TRemoteAssetNode], ABC, Generic[TRemoteAss
|
|
|
520
520
|
|
|
521
521
|
by_table_name = defaultdict(set)
|
|
522
522
|
for node in self.asset_nodes:
|
|
523
|
-
normalized_table_name = TableMetadataSet.
|
|
523
|
+
normalized_table_name = TableMetadataSet.extract_normalized_table_name(node.metadata)
|
|
524
524
|
if normalized_table_name:
|
|
525
525
|
by_table_name[normalized_table_name.lower()].add(node.key)
|
|
526
526
|
|
|
@@ -547,7 +547,7 @@ class RemoteAssetGraph(BaseAssetGraph[TRemoteAssetNode], ABC, Generic[TRemoteAss
|
|
|
547
547
|
from dagster._core.definitions.metadata.metadata_set import TableMetadataSet
|
|
548
548
|
|
|
549
549
|
input_node = self.get(asset_key)
|
|
550
|
-
input_table_name = TableMetadataSet.
|
|
550
|
+
input_table_name = TableMetadataSet.extract_normalized_table_name(input_node.metadata)
|
|
551
551
|
|
|
552
552
|
if not input_table_name:
|
|
553
553
|
return set()
|
|
@@ -1,14 +1,16 @@
|
|
|
1
1
|
from collections.abc import Callable, Mapping, Sequence
|
|
2
2
|
from inspect import Parameter, Signature, isgeneratorfunction, signature
|
|
3
|
-
from typing import Any,
|
|
3
|
+
from typing import Any, Optional
|
|
4
4
|
|
|
5
|
+
from dagster_shared.record import record
|
|
5
6
|
from docstring_parser import parse
|
|
6
7
|
|
|
7
8
|
from dagster._core.decorator_utils import get_type_hints
|
|
8
9
|
from dagster._core.definitions.utils import NoValueSentinel
|
|
9
10
|
|
|
10
11
|
|
|
11
|
-
|
|
12
|
+
@record
|
|
13
|
+
class InferredInputProps:
|
|
12
14
|
"""The information about an input that can be inferred from the function signature."""
|
|
13
15
|
|
|
14
16
|
name: str
|
|
@@ -17,7 +19,8 @@ class InferredInputProps(NamedTuple):
|
|
|
17
19
|
default_value: Any = NoValueSentinel
|
|
18
20
|
|
|
19
21
|
|
|
20
|
-
|
|
22
|
+
@record
|
|
23
|
+
class InferredOutputProps:
|
|
21
24
|
"""The information about an input that can be inferred from the function signature."""
|
|
22
25
|
|
|
23
26
|
annotation: Any
|
|
@@ -80,15 +83,15 @@ def _infer_inputs_from_params(
|
|
|
80
83
|
for param in params:
|
|
81
84
|
if param.default is not Parameter.empty:
|
|
82
85
|
input_def = InferredInputProps(
|
|
83
|
-
param.name,
|
|
84
|
-
type_hints.get(param.name, param.annotation),
|
|
86
|
+
name=param.name,
|
|
87
|
+
annotation=type_hints.get(param.name, param.annotation),
|
|
85
88
|
default_value=param.default,
|
|
86
89
|
description=_descriptions.get(param.name),
|
|
87
90
|
)
|
|
88
91
|
else:
|
|
89
92
|
input_def = InferredInputProps(
|
|
90
|
-
param.name,
|
|
91
|
-
type_hints.get(param.name, param.annotation),
|
|
93
|
+
name=param.name,
|
|
94
|
+
annotation=type_hints.get(param.name, param.annotation),
|
|
92
95
|
description=_descriptions.get(param.name),
|
|
93
96
|
)
|
|
94
97
|
|
|
@@ -1,12 +1,13 @@
|
|
|
1
1
|
import inspect
|
|
2
2
|
from collections.abc import Callable, Mapping
|
|
3
|
-
from types import
|
|
4
|
-
from typing import TYPE_CHECKING, Any,
|
|
3
|
+
from types import UnionType
|
|
4
|
+
from typing import TYPE_CHECKING, Any, Optional, TypeVar, Union
|
|
5
5
|
|
|
6
6
|
from dagster_shared.error import DagsterError
|
|
7
|
+
from dagster_shared.record import IHaveNew, record, record_custom
|
|
7
8
|
|
|
8
9
|
import dagster._check as check
|
|
9
|
-
from dagster._annotations import PublicAttr,
|
|
10
|
+
from dagster._annotations import PublicAttr, public, superseded
|
|
10
11
|
from dagster._core.definitions.events import AssetKey
|
|
11
12
|
from dagster._core.definitions.inference import InferredInputProps
|
|
12
13
|
from dagster._core.definitions.metadata import (
|
|
@@ -309,39 +310,22 @@ def _checked_inferred_type(inferred: InferredInputProps) -> DagsterType:
|
|
|
309
310
|
return resolved_type
|
|
310
311
|
|
|
311
312
|
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
check.str_param(node_name, "node_name"),
|
|
317
|
-
check.str_param(input_name, "input_name"),
|
|
318
|
-
)
|
|
313
|
+
@record(kw_only=False)
|
|
314
|
+
class InputPointer:
|
|
315
|
+
node_name: str
|
|
316
|
+
input_name: str
|
|
319
317
|
|
|
320
318
|
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
def __new__(cls, node_name: str, input_name: str, fan_in_index: int):
|
|
327
|
-
return super().__new__(
|
|
328
|
-
cls,
|
|
329
|
-
check.str_param(node_name, "node_name"),
|
|
330
|
-
check.str_param(input_name, "input_name"),
|
|
331
|
-
check.int_param(fan_in_index, "fan_in_index"),
|
|
332
|
-
)
|
|
319
|
+
@record(kw_only=False)
|
|
320
|
+
class FanInInputPointer:
|
|
321
|
+
node_name: str
|
|
322
|
+
input_name: str
|
|
323
|
+
fan_in_index: int
|
|
333
324
|
|
|
334
325
|
|
|
335
|
-
@deprecated_param(
|
|
336
|
-
param="dagster_type",
|
|
337
|
-
breaking_version="2.0",
|
|
338
|
-
additional_warn_text="Any defined `dagster_type` should come from the upstream op `Output`.",
|
|
339
|
-
# Disabling warning here since we're passing this internally and I'm not sure whether it is
|
|
340
|
-
# actually used or discarded.
|
|
341
|
-
emit_runtime_warning=False,
|
|
342
|
-
)
|
|
343
326
|
@public
|
|
344
|
-
|
|
327
|
+
@record
|
|
328
|
+
class InputMapping:
|
|
345
329
|
"""Defines an input mapping for a graph.
|
|
346
330
|
|
|
347
331
|
Args:
|
|
@@ -350,8 +334,8 @@ class InputMapping(NamedTuple):
|
|
|
350
334
|
mapped_node_input_name (str): Name of the input in the node (op/graph) that is being mapped to.
|
|
351
335
|
fan_in_index (Optional[int]): The index in to a fanned input, otherwise None.
|
|
352
336
|
graph_input_description (Optional[str]): A description of the input in the graph being mapped from.
|
|
353
|
-
dagster_type (Optional[DagsterType]):
|
|
354
|
-
being mapped from.
|
|
337
|
+
dagster_type (Optional[DagsterType]): DEPRECATED, to be removed in 2.0.
|
|
338
|
+
The dagster type of the graph's input being mapped from.
|
|
355
339
|
|
|
356
340
|
Examples:
|
|
357
341
|
.. code-block:: python
|
|
@@ -384,6 +368,8 @@ class InputMapping(NamedTuple):
|
|
|
384
368
|
mapped_node_input_name: str
|
|
385
369
|
fan_in_index: Optional[int] = None
|
|
386
370
|
graph_input_description: Optional[str] = None
|
|
371
|
+
|
|
372
|
+
# DEPRECATED: Any defined `dagster_type` should come from the upstream op `Output`.
|
|
387
373
|
dagster_type: Optional[DagsterType] = None
|
|
388
374
|
|
|
389
375
|
@property
|
|
@@ -411,26 +397,8 @@ class InputMapping(NamedTuple):
|
|
|
411
397
|
|
|
412
398
|
|
|
413
399
|
@public
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
"_In",
|
|
417
|
-
[
|
|
418
|
-
("dagster_type", PublicAttr[Union[DagsterType, type[NoValueSentinel]]]),
|
|
419
|
-
("description", PublicAttr[Optional[str]]),
|
|
420
|
-
("default_value", PublicAttr[Any]),
|
|
421
|
-
("metadata", PublicAttr[Optional[Mapping[str, Any]]]),
|
|
422
|
-
(
|
|
423
|
-
"asset_key",
|
|
424
|
-
PublicAttr[Optional[Union[AssetKey, Callable[["InputContext"], AssetKey]]]],
|
|
425
|
-
),
|
|
426
|
-
(
|
|
427
|
-
"asset_partitions",
|
|
428
|
-
PublicAttr[Optional[Union[set[str], Callable[["InputContext"], set[str]]]]],
|
|
429
|
-
),
|
|
430
|
-
("input_manager_key", PublicAttr[Optional[str]]),
|
|
431
|
-
],
|
|
432
|
-
)
|
|
433
|
-
):
|
|
400
|
+
@record_custom
|
|
401
|
+
class In(IHaveNew):
|
|
434
402
|
"""Defines an argument to an op's compute function.
|
|
435
403
|
|
|
436
404
|
Inputs may flow from previous op's outputs, or be stubbed using config. They may optionally
|
|
@@ -454,6 +422,14 @@ class In(
|
|
|
454
422
|
upstream output.
|
|
455
423
|
"""
|
|
456
424
|
|
|
425
|
+
dagster_type: PublicAttr[Union[DagsterType, type[NoValueSentinel]]]
|
|
426
|
+
description: PublicAttr[Optional[str]]
|
|
427
|
+
default_value: PublicAttr[Any]
|
|
428
|
+
metadata: PublicAttr[Mapping[str, Any]]
|
|
429
|
+
asset_key: PublicAttr[Optional[Union[AssetKey, Callable[["InputContext"], AssetKey]]]]
|
|
430
|
+
asset_partitions: PublicAttr[Optional[Union[set[str], Callable[["InputContext"], set[str]]]]]
|
|
431
|
+
input_manager_key: PublicAttr[Optional[str]]
|
|
432
|
+
|
|
457
433
|
def __new__(
|
|
458
434
|
cls,
|
|
459
435
|
dagster_type: Union[type, UnionType, DagsterType] = NoValueSentinel,
|
|
@@ -471,12 +447,12 @@ class In(
|
|
|
471
447
|
if dagster_type is NoValueSentinel
|
|
472
448
|
else resolve_dagster_type(dagster_type)
|
|
473
449
|
),
|
|
474
|
-
description=
|
|
450
|
+
description=description,
|
|
475
451
|
default_value=default_value,
|
|
476
|
-
metadata=
|
|
477
|
-
asset_key=
|
|
452
|
+
metadata={} if metadata is None else metadata,
|
|
453
|
+
asset_key=asset_key,
|
|
478
454
|
asset_partitions=asset_partitions,
|
|
479
|
-
input_manager_key=
|
|
455
|
+
input_manager_key=input_manager_key,
|
|
480
456
|
)
|
|
481
457
|
|
|
482
458
|
@staticmethod
|
|
@@ -506,15 +482,15 @@ class In(
|
|
|
506
482
|
|
|
507
483
|
|
|
508
484
|
@public
|
|
509
|
-
|
|
485
|
+
@record
|
|
486
|
+
class GraphIn:
|
|
510
487
|
"""Represents information about an input that a graph maps.
|
|
511
488
|
|
|
512
489
|
Args:
|
|
513
490
|
description (Optional[str]): Human-readable description of the input.
|
|
514
491
|
"""
|
|
515
492
|
|
|
516
|
-
|
|
517
|
-
return super().__new__(cls, description=description)
|
|
493
|
+
description: PublicAttr[Optional[str]] = None
|
|
518
494
|
|
|
519
495
|
def to_definition(self, name: str) -> InputDefinition:
|
|
520
496
|
return InputDefinition(name=name, description=self.description)
|
|
@@ -197,9 +197,20 @@ class TableMetadataSet(NamespacedMetadataSet):
|
|
|
197
197
|
def current_key_by_legacy_key(cls) -> Mapping[str, str]:
|
|
198
198
|
return {"relation_identifier": "table_name"}
|
|
199
199
|
|
|
200
|
-
@
|
|
201
|
-
def
|
|
202
|
-
|
|
200
|
+
@classmethod
|
|
201
|
+
def extract_normalized_table_name(cls, metadata: Mapping[str, Any]) -> Optional[str]:
|
|
202
|
+
from pydantic import ValidationError
|
|
203
|
+
|
|
204
|
+
metadata_subset = {
|
|
205
|
+
key: metadata[key]
|
|
206
|
+
for key in {"dagster/table_name", "dagster/relation_identifier"}
|
|
207
|
+
if key in metadata
|
|
208
|
+
}
|
|
209
|
+
try:
|
|
210
|
+
table_name = TableMetadataSet.extract(metadata_subset).table_name
|
|
211
|
+
return table_name.lower() if table_name else None
|
|
212
|
+
except ValidationError:
|
|
213
|
+
return None
|
|
203
214
|
|
|
204
215
|
|
|
205
216
|
class UriMetadataSet(NamespacedMetadataSet):
|
dagster/_grpc/types.py
CHANGED
|
@@ -1,8 +1,9 @@
|
|
|
1
1
|
import base64
|
|
2
2
|
import zlib
|
|
3
3
|
from collections.abc import Mapping, Sequence
|
|
4
|
-
from typing import AbstractSet, Any,
|
|
4
|
+
from typing import AbstractSet, Any, Optional # noqa: UP035
|
|
5
5
|
|
|
6
|
+
from dagster_shared.record import IHaveNew, copy, record, record_custom
|
|
6
7
|
from dagster_shared.serdes.objects.models.defs_state_info import DefsStateInfo
|
|
7
8
|
from dagster_shared.serdes.serdes import SetToSequenceFieldSerializer
|
|
8
9
|
|
|
@@ -30,55 +31,18 @@ from dagster._utils.error import SerializableErrorInfo
|
|
|
30
31
|
"op_selection": "solid_selection",
|
|
31
32
|
}
|
|
32
33
|
)
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
("asset_check_selection", Optional[AbstractSet[AssetCheckKey]]),
|
|
46
|
-
("mode", str),
|
|
47
|
-
],
|
|
48
|
-
)
|
|
49
|
-
):
|
|
50
|
-
def __new__(
|
|
51
|
-
cls,
|
|
52
|
-
job_origin: RemoteJobOrigin,
|
|
53
|
-
op_selection: Sequence[str],
|
|
54
|
-
run_config: Mapping[str, object],
|
|
55
|
-
step_keys_to_execute: Optional[Sequence[str]],
|
|
56
|
-
job_snapshot_id: str,
|
|
57
|
-
known_state: Optional[KnownExecutionState] = None,
|
|
58
|
-
instance_ref: Optional[InstanceRef] = None,
|
|
59
|
-
asset_selection: Optional[AbstractSet[AssetKey]] = None,
|
|
60
|
-
asset_check_selection: Optional[AbstractSet[AssetCheckKey]] = None,
|
|
61
|
-
mode: str = DEFAULT_MODE_NAME,
|
|
62
|
-
):
|
|
63
|
-
return super().__new__(
|
|
64
|
-
cls,
|
|
65
|
-
job_origin=check.inst_param(job_origin, "job_origin", RemoteJobOrigin),
|
|
66
|
-
op_selection=check.opt_sequence_param(op_selection, "op_selection", of_type=str),
|
|
67
|
-
run_config=check.mapping_param(run_config, "run_config", key_type=str),
|
|
68
|
-
mode=check.str_param(mode, "mode"),
|
|
69
|
-
step_keys_to_execute=check.opt_nullable_sequence_param(
|
|
70
|
-
step_keys_to_execute, "step_keys_to_execute", of_type=str
|
|
71
|
-
),
|
|
72
|
-
job_snapshot_id=check.str_param(job_snapshot_id, "job_snapshot_id"),
|
|
73
|
-
known_state=check.opt_inst_param(known_state, "known_state", KnownExecutionState),
|
|
74
|
-
instance_ref=check.opt_inst_param(instance_ref, "instance_ref", InstanceRef),
|
|
75
|
-
asset_selection=check.opt_nullable_set_param(
|
|
76
|
-
asset_selection, "asset_selection", of_type=AssetKey
|
|
77
|
-
),
|
|
78
|
-
asset_check_selection=check.opt_nullable_set_param(
|
|
79
|
-
asset_check_selection, "asset_check_selection", of_type=AssetCheckKey
|
|
80
|
-
),
|
|
81
|
-
)
|
|
34
|
+
@record
|
|
35
|
+
class ExecutionPlanSnapshotArgs:
|
|
36
|
+
job_origin: RemoteJobOrigin
|
|
37
|
+
op_selection: Sequence[str]
|
|
38
|
+
run_config: Mapping[str, object]
|
|
39
|
+
step_keys_to_execute: Optional[Sequence[str]]
|
|
40
|
+
job_snapshot_id: str
|
|
41
|
+
known_state: Optional[KnownExecutionState] = None
|
|
42
|
+
instance_ref: Optional[InstanceRef] = None
|
|
43
|
+
asset_selection: Optional[AbstractSet[AssetKey]] = None
|
|
44
|
+
asset_check_selection: Optional[AbstractSet[AssetCheckKey]] = None
|
|
45
|
+
mode: str = DEFAULT_MODE_NAME
|
|
82
46
|
|
|
83
47
|
|
|
84
48
|
def _get_entry_point(origin: JobPythonOrigin):
|
|
@@ -95,18 +59,14 @@ def _get_entry_point(origin: JobPythonOrigin):
|
|
|
95
59
|
"run_id": "pipeline_run_id",
|
|
96
60
|
}
|
|
97
61
|
)
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
("set_exit_code_on_failure", Optional[bool]),
|
|
107
|
-
],
|
|
108
|
-
)
|
|
109
|
-
):
|
|
62
|
+
@record_custom
|
|
63
|
+
class ExecuteRunArgs(IHaveNew):
|
|
64
|
+
# Deprecated, only needed for back-compat since it can be pulled from the PipelineRun
|
|
65
|
+
job_origin: JobPythonOrigin
|
|
66
|
+
run_id: str
|
|
67
|
+
instance_ref: Optional[InstanceRef]
|
|
68
|
+
set_exit_code_on_failure: Optional[bool]
|
|
69
|
+
|
|
110
70
|
def __new__(
|
|
111
71
|
cls,
|
|
112
72
|
job_origin: JobPythonOrigin,
|
|
@@ -116,19 +76,11 @@ class ExecuteRunArgs(
|
|
|
116
76
|
):
|
|
117
77
|
return super().__new__(
|
|
118
78
|
cls,
|
|
119
|
-
job_origin=
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
run_id=check.str_param(run_id, "run_id"),
|
|
125
|
-
instance_ref=check.opt_inst_param(instance_ref, "instance_ref", InstanceRef),
|
|
126
|
-
set_exit_code_on_failure=(
|
|
127
|
-
True
|
|
128
|
-
if check.opt_bool_param(set_exit_code_on_failure, "set_exit_code_on_failure")
|
|
129
|
-
is True
|
|
130
|
-
else None
|
|
131
|
-
), # for back-compat
|
|
79
|
+
job_origin=job_origin,
|
|
80
|
+
run_id=run_id,
|
|
81
|
+
instance_ref=instance_ref,
|
|
82
|
+
# for back-compat: only True or None allowed
|
|
83
|
+
set_exit_code_on_failure=True if set_exit_code_on_failure is True else None,
|
|
132
84
|
)
|
|
133
85
|
|
|
134
86
|
def get_command_args(self) -> Sequence[str]:
|
|
@@ -146,18 +98,14 @@ class ExecuteRunArgs(
|
|
|
146
98
|
"run_id": "pipeline_run_id",
|
|
147
99
|
}
|
|
148
100
|
)
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
("set_exit_code_on_failure", Optional[bool]),
|
|
158
|
-
],
|
|
159
|
-
)
|
|
160
|
-
):
|
|
101
|
+
@record_custom
|
|
102
|
+
class ResumeRunArgs(IHaveNew):
|
|
103
|
+
# Deprecated, only needed for back-compat since it can be pulled from the DagsterRun
|
|
104
|
+
job_origin: JobPythonOrigin
|
|
105
|
+
run_id: str
|
|
106
|
+
instance_ref: Optional[InstanceRef]
|
|
107
|
+
set_exit_code_on_failure: Optional[bool]
|
|
108
|
+
|
|
161
109
|
def __new__(
|
|
162
110
|
cls,
|
|
163
111
|
job_origin: JobPythonOrigin,
|
|
@@ -167,19 +115,11 @@ class ResumeRunArgs(
|
|
|
167
115
|
):
|
|
168
116
|
return super().__new__(
|
|
169
117
|
cls,
|
|
170
|
-
job_origin=
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
run_id=check.str_param(run_id, "run_id"),
|
|
176
|
-
instance_ref=check.opt_inst_param(instance_ref, "instance_ref", InstanceRef),
|
|
177
|
-
set_exit_code_on_failure=(
|
|
178
|
-
True
|
|
179
|
-
if check.opt_bool_param(set_exit_code_on_failure, "set_exit_code_on_failure")
|
|
180
|
-
is True
|
|
181
|
-
else None
|
|
182
|
-
), # for back-compat
|
|
118
|
+
job_origin=job_origin,
|
|
119
|
+
run_id=run_id,
|
|
120
|
+
instance_ref=instance_ref,
|
|
121
|
+
# for back-compat: only True or None allowed
|
|
122
|
+
set_exit_code_on_failure=True if set_exit_code_on_failure is True else None,
|
|
183
123
|
)
|
|
184
124
|
|
|
185
125
|
def get_command_args(self) -> Sequence[str]:
|
|
@@ -198,32 +138,11 @@ class ResumeRunArgs(
|
|
|
198
138
|
"run_id": "pipeline_run_id",
|
|
199
139
|
},
|
|
200
140
|
)
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
("run_id", str),
|
|
207
|
-
("instance_ref", Optional[InstanceRef]),
|
|
208
|
-
],
|
|
209
|
-
)
|
|
210
|
-
):
|
|
211
|
-
def __new__(
|
|
212
|
-
cls,
|
|
213
|
-
job_origin: RemoteJobOrigin,
|
|
214
|
-
run_id: str,
|
|
215
|
-
instance_ref: Optional[InstanceRef],
|
|
216
|
-
):
|
|
217
|
-
return super().__new__(
|
|
218
|
-
cls,
|
|
219
|
-
job_origin=check.inst_param(
|
|
220
|
-
job_origin,
|
|
221
|
-
"job_origin",
|
|
222
|
-
RemoteJobOrigin,
|
|
223
|
-
),
|
|
224
|
-
run_id=check.str_param(run_id, "run_id"),
|
|
225
|
-
instance_ref=check.opt_inst_param(instance_ref, "instance_ref", InstanceRef),
|
|
226
|
-
)
|
|
141
|
+
@record
|
|
142
|
+
class ExecuteExternalJobArgs:
|
|
143
|
+
job_origin: RemoteJobOrigin
|
|
144
|
+
run_id: str
|
|
145
|
+
instance_ref: Optional[InstanceRef]
|
|
227
146
|
|
|
228
147
|
|
|
229
148
|
@whitelist_for_serdes(
|
|
@@ -232,22 +151,18 @@ class ExecuteExternalJobArgs(
|
|
|
232
151
|
"run_id": "pipeline_run_id",
|
|
233
152
|
}
|
|
234
153
|
)
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
("print_serialized_events", bool),
|
|
248
|
-
],
|
|
249
|
-
)
|
|
250
|
-
):
|
|
154
|
+
@record_custom
|
|
155
|
+
class ExecuteStepArgs(IHaveNew):
|
|
156
|
+
# Deprecated, only needed for back-compat since it can be pulled from the DagsterRun
|
|
157
|
+
job_origin: JobPythonOrigin
|
|
158
|
+
run_id: str
|
|
159
|
+
step_keys_to_execute: Optional[Sequence[str]]
|
|
160
|
+
instance_ref: Optional[InstanceRef]
|
|
161
|
+
retry_mode: Optional[RetryMode]
|
|
162
|
+
known_state: Optional[KnownExecutionState]
|
|
163
|
+
should_verify_step: Optional[bool]
|
|
164
|
+
print_serialized_events: bool
|
|
165
|
+
|
|
251
166
|
def __new__(
|
|
252
167
|
cls,
|
|
253
168
|
job_origin: JobPythonOrigin,
|
|
@@ -261,19 +176,15 @@ class ExecuteStepArgs(
|
|
|
261
176
|
):
|
|
262
177
|
return super().__new__(
|
|
263
178
|
cls,
|
|
264
|
-
job_origin=
|
|
265
|
-
run_id=
|
|
266
|
-
step_keys_to_execute=
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
should_verify_step, "should_verify_step", False
|
|
274
|
-
),
|
|
275
|
-
print_serialized_events=check.opt_bool_param(
|
|
276
|
-
print_serialized_events, "print_serialized_events", False
|
|
179
|
+
job_origin=job_origin,
|
|
180
|
+
run_id=run_id,
|
|
181
|
+
step_keys_to_execute=step_keys_to_execute,
|
|
182
|
+
instance_ref=instance_ref,
|
|
183
|
+
retry_mode=retry_mode,
|
|
184
|
+
known_state=known_state,
|
|
185
|
+
should_verify_step=should_verify_step if should_verify_step is not None else False,
|
|
186
|
+
print_serialized_events=(
|
|
187
|
+
print_serialized_events if print_serialized_events is not None else False
|
|
277
188
|
),
|
|
278
189
|
)
|
|
279
190
|
|
|
@@ -306,33 +217,24 @@ class ExecuteStepArgs(
|
|
|
306
217
|
|
|
307
218
|
|
|
308
219
|
@whitelist_for_serdes
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
return super().__new__(
|
|
314
|
-
cls,
|
|
315
|
-
repository_name=check.str_param(repository_name, "repository_name"),
|
|
316
|
-
attribute=check.str_param(attribute, "attribute"),
|
|
317
|
-
)
|
|
220
|
+
@record
|
|
221
|
+
class LoadableRepositorySymbol:
|
|
222
|
+
repository_name: str
|
|
223
|
+
attribute: str
|
|
318
224
|
|
|
319
225
|
|
|
320
226
|
@whitelist_for_serdes
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
("defs_state_info", Optional[DefsStateInfo]),
|
|
333
|
-
],
|
|
334
|
-
)
|
|
335
|
-
):
|
|
227
|
+
@record_custom
|
|
228
|
+
class ListRepositoriesResponse(IHaveNew):
|
|
229
|
+
repository_symbols: Sequence[LoadableRepositorySymbol]
|
|
230
|
+
executable_path: Optional[str]
|
|
231
|
+
repository_code_pointer_dict: Mapping[str, CodePointer]
|
|
232
|
+
entry_point: Optional[Sequence[str]]
|
|
233
|
+
container_image: Optional[str]
|
|
234
|
+
container_context: Optional[Mapping[str, Any]]
|
|
235
|
+
dagster_library_versions: Optional[Mapping[str, str]]
|
|
236
|
+
defs_state_info: Optional[DefsStateInfo]
|
|
237
|
+
|
|
336
238
|
def __new__(
|
|
337
239
|
cls,
|
|
338
240
|
repository_symbols: Sequence[LoadableRepositorySymbol],
|
|
@@ -340,52 +242,33 @@ class ListRepositoriesResponse(
|
|
|
340
242
|
repository_code_pointer_dict: Optional[Mapping[str, CodePointer]] = None,
|
|
341
243
|
entry_point: Optional[Sequence[str]] = None,
|
|
342
244
|
container_image: Optional[str] = None,
|
|
343
|
-
container_context: Optional[Mapping] = None,
|
|
245
|
+
container_context: Optional[Mapping[str, Any]] = None,
|
|
344
246
|
dagster_library_versions: Optional[Mapping[str, str]] = None,
|
|
345
247
|
defs_state_info: Optional[DefsStateInfo] = None,
|
|
346
248
|
):
|
|
347
249
|
return super().__new__(
|
|
348
250
|
cls,
|
|
349
|
-
repository_symbols=
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
entry_point=(
|
|
360
|
-
check.sequence_param(entry_point, "entry_point", of_type=str)
|
|
361
|
-
if entry_point is not None
|
|
362
|
-
else None
|
|
363
|
-
),
|
|
364
|
-
container_image=check.opt_str_param(container_image, "container_image"),
|
|
365
|
-
container_context=(
|
|
366
|
-
check.dict_param(container_context, "container_context")
|
|
367
|
-
if container_context is not None
|
|
368
|
-
else None
|
|
369
|
-
),
|
|
370
|
-
dagster_library_versions=check.opt_nullable_mapping_param(
|
|
371
|
-
dagster_library_versions, "dagster_library_versions"
|
|
372
|
-
),
|
|
373
|
-
defs_state_info=check.opt_inst_param(defs_state_info, "defs_state_info", DefsStateInfo),
|
|
251
|
+
repository_symbols=repository_symbols,
|
|
252
|
+
executable_path=executable_path,
|
|
253
|
+
repository_code_pointer_dict=(
|
|
254
|
+
repository_code_pointer_dict if repository_code_pointer_dict is not None else {}
|
|
255
|
+
),
|
|
256
|
+
entry_point=entry_point,
|
|
257
|
+
container_image=container_image,
|
|
258
|
+
container_context=container_context,
|
|
259
|
+
dagster_library_versions=dagster_library_versions,
|
|
260
|
+
defs_state_info=defs_state_info,
|
|
374
261
|
)
|
|
375
262
|
|
|
376
263
|
|
|
377
264
|
@whitelist_for_serdes
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
("attribute", Optional[str]),
|
|
386
|
-
],
|
|
387
|
-
)
|
|
388
|
-
):
|
|
265
|
+
@record_custom
|
|
266
|
+
class ListRepositoriesInput(IHaveNew):
|
|
267
|
+
module_name: Optional[str]
|
|
268
|
+
python_file: Optional[str]
|
|
269
|
+
working_directory: Optional[str]
|
|
270
|
+
attribute: Optional[str]
|
|
271
|
+
|
|
389
272
|
def __new__(
|
|
390
273
|
cls,
|
|
391
274
|
module_name: Optional[str],
|
|
@@ -397,47 +280,22 @@ class ListRepositoriesInput(
|
|
|
397
280
|
check.invariant(module_name or python_file, "Must set at least one")
|
|
398
281
|
return super().__new__(
|
|
399
282
|
cls,
|
|
400
|
-
module_name=
|
|
401
|
-
python_file=
|
|
402
|
-
working_directory=
|
|
403
|
-
attribute=
|
|
283
|
+
module_name=module_name,
|
|
284
|
+
python_file=python_file,
|
|
285
|
+
working_directory=working_directory,
|
|
286
|
+
attribute=attribute,
|
|
404
287
|
)
|
|
405
288
|
|
|
406
289
|
|
|
407
290
|
@whitelist_for_serdes
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
("job_name", Optional[str]),
|
|
417
|
-
("instance_ref", Optional[InstanceRef]),
|
|
418
|
-
],
|
|
419
|
-
)
|
|
420
|
-
):
|
|
421
|
-
def __new__(
|
|
422
|
-
cls,
|
|
423
|
-
repository_origin: RemoteRepositoryOrigin,
|
|
424
|
-
partition_set_name: str,
|
|
425
|
-
partition_name: str,
|
|
426
|
-
job_name: Optional[str] = None,
|
|
427
|
-
instance_ref: Optional[InstanceRef] = None,
|
|
428
|
-
):
|
|
429
|
-
return super().__new__(
|
|
430
|
-
cls,
|
|
431
|
-
repository_origin=check.inst_param(
|
|
432
|
-
repository_origin,
|
|
433
|
-
"repository_origin",
|
|
434
|
-
RemoteRepositoryOrigin,
|
|
435
|
-
),
|
|
436
|
-
partition_set_name=check.str_param(partition_set_name, "partition_set_name"),
|
|
437
|
-
job_name=check.opt_str_param(job_name, "job_name"),
|
|
438
|
-
partition_name=check.str_param(partition_name, "partition_name"),
|
|
439
|
-
instance_ref=check.opt_inst_param(instance_ref, "instance_ref", InstanceRef),
|
|
440
|
-
)
|
|
291
|
+
@record
|
|
292
|
+
class PartitionArgs:
|
|
293
|
+
repository_origin: RemoteRepositoryOrigin
|
|
294
|
+
# This is here for backcompat. it's expected to always be f"{job_name}_partition_set".
|
|
295
|
+
partition_set_name: str
|
|
296
|
+
partition_name: str
|
|
297
|
+
job_name: Optional[str] = None
|
|
298
|
+
instance_ref: Optional[InstanceRef] = None
|
|
441
299
|
|
|
442
300
|
def get_job_name(self) -> str:
|
|
443
301
|
if self.job_name:
|
|
@@ -447,35 +305,16 @@ class PartitionArgs(
|
|
|
447
305
|
|
|
448
306
|
|
|
449
307
|
@whitelist_for_serdes
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
# own is enough to specify which PartitionsDefinition to use.
|
|
461
|
-
("job_name", Optional[str]),
|
|
462
|
-
],
|
|
463
|
-
)
|
|
464
|
-
):
|
|
465
|
-
def __new__(
|
|
466
|
-
cls,
|
|
467
|
-
repository_origin: RemoteRepositoryOrigin,
|
|
468
|
-
partition_set_name: str,
|
|
469
|
-
job_name: Optional[str] = None,
|
|
470
|
-
):
|
|
471
|
-
return super().__new__(
|
|
472
|
-
cls,
|
|
473
|
-
repository_origin=check.inst_param(
|
|
474
|
-
repository_origin, "repository_origin", RemoteRepositoryOrigin
|
|
475
|
-
),
|
|
476
|
-
job_name=check.opt_str_param(job_name, "job_name"),
|
|
477
|
-
partition_set_name=check.str_param(partition_set_name, "partition_set_name"),
|
|
478
|
-
)
|
|
308
|
+
@record
|
|
309
|
+
class PartitionNamesArgs:
|
|
310
|
+
repository_origin: RemoteRepositoryOrigin
|
|
311
|
+
# This is here for backcompat. it's expected to always be f"{job_name}_partition_set".
|
|
312
|
+
partition_set_name: str
|
|
313
|
+
# This is introduced in the same release that we're making it possible for an asset job
|
|
314
|
+
# to target assets with different PartitionsDefinitions. Prior user code versions can
|
|
315
|
+
# (and do) safely ignore this parameter, because, in those versions, the job name on its
|
|
316
|
+
# own is enough to specify which PartitionsDefinition to use.
|
|
317
|
+
job_name: Optional[str] = None
|
|
479
318
|
|
|
480
319
|
def get_job_name(self) -> str:
|
|
481
320
|
if self.job_name:
|
|
@@ -485,33 +324,12 @@ class PartitionNamesArgs(
|
|
|
485
324
|
|
|
486
325
|
|
|
487
326
|
@whitelist_for_serdes
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
("partition_names", Sequence[str]),
|
|
495
|
-
("instance_ref", Optional[InstanceRef]),
|
|
496
|
-
],
|
|
497
|
-
)
|
|
498
|
-
):
|
|
499
|
-
def __new__(
|
|
500
|
-
cls,
|
|
501
|
-
repository_origin: RemoteRepositoryOrigin,
|
|
502
|
-
partition_set_name: str,
|
|
503
|
-
partition_names: Sequence[str],
|
|
504
|
-
instance_ref: Optional[InstanceRef] = None,
|
|
505
|
-
):
|
|
506
|
-
return super().__new__(
|
|
507
|
-
cls,
|
|
508
|
-
repository_origin=check.inst_param(
|
|
509
|
-
repository_origin, "repository_origin", RemoteRepositoryOrigin
|
|
510
|
-
),
|
|
511
|
-
partition_set_name=check.str_param(partition_set_name, "partition_set_name"),
|
|
512
|
-
partition_names=check.sequence_param(partition_names, "partition_names", of_type=str),
|
|
513
|
-
instance_ref=check.opt_inst_param(instance_ref, "instance_ref", InstanceRef),
|
|
514
|
-
)
|
|
327
|
+
@record
|
|
328
|
+
class PartitionSetExecutionParamArgs:
|
|
329
|
+
repository_origin: RemoteRepositoryOrigin
|
|
330
|
+
partition_set_name: str
|
|
331
|
+
partition_names: Sequence[str]
|
|
332
|
+
instance_ref: Optional[InstanceRef] = None
|
|
515
333
|
|
|
516
334
|
|
|
517
335
|
@whitelist_for_serdes(
|
|
@@ -523,18 +341,14 @@ class PartitionSetExecutionParamArgs(
|
|
|
523
341
|
# asset_selection previously was erroneously represented as a sequence
|
|
524
342
|
field_serializers={"asset_selection": SetToSequenceFieldSerializer},
|
|
525
343
|
)
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
("include_parent_snapshot", bool),
|
|
535
|
-
],
|
|
536
|
-
)
|
|
537
|
-
):
|
|
344
|
+
@record_custom
|
|
345
|
+
class JobSubsetSnapshotArgs(IHaveNew):
|
|
346
|
+
job_origin: RemoteJobOrigin
|
|
347
|
+
op_selection: Optional[Sequence[str]]
|
|
348
|
+
asset_selection: Optional[AbstractSet[AssetKey]]
|
|
349
|
+
asset_check_selection: Optional[AbstractSet[AssetCheckKey]]
|
|
350
|
+
include_parent_snapshot: bool
|
|
351
|
+
|
|
538
352
|
def __new__(
|
|
539
353
|
cls,
|
|
540
354
|
job_origin: RemoteJobOrigin,
|
|
@@ -545,14 +359,10 @@ class JobSubsetSnapshotArgs(
|
|
|
545
359
|
):
|
|
546
360
|
return super().__new__(
|
|
547
361
|
cls,
|
|
548
|
-
job_origin=
|
|
549
|
-
op_selection=
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
asset_selection=check.opt_nullable_set_param(asset_selection, "asset_selection"),
|
|
553
|
-
asset_check_selection=check.opt_nullable_set_param(
|
|
554
|
-
asset_check_selection, "asset_check_selection"
|
|
555
|
-
),
|
|
362
|
+
job_origin=job_origin,
|
|
363
|
+
op_selection=op_selection,
|
|
364
|
+
asset_selection=asset_selection,
|
|
365
|
+
asset_check_selection=asset_check_selection,
|
|
556
366
|
include_parent_snapshot=(
|
|
557
367
|
include_parent_snapshot if include_parent_snapshot is not None else True
|
|
558
368
|
),
|
|
@@ -561,37 +371,23 @@ class JobSubsetSnapshotArgs(
|
|
|
561
371
|
|
|
562
372
|
# Different storage field name for backcompat
|
|
563
373
|
@whitelist_for_serdes(storage_field_names={"code_location_origin": "repository_location_origin"})
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
|
|
568
|
-
)
|
|
569
|
-
):
|
|
570
|
-
def __new__(cls, code_location_origin: CodeLocationOrigin, notebook_path: str):
|
|
571
|
-
return super().__new__(
|
|
572
|
-
cls,
|
|
573
|
-
code_location_origin=check.inst_param(
|
|
574
|
-
code_location_origin, "code_location_origin", CodeLocationOrigin
|
|
575
|
-
),
|
|
576
|
-
notebook_path=check.str_param(notebook_path, "notebook_path"),
|
|
577
|
-
)
|
|
374
|
+
@record
|
|
375
|
+
class NotebookPathArgs:
|
|
376
|
+
code_location_origin: CodeLocationOrigin
|
|
377
|
+
notebook_path: str
|
|
578
378
|
|
|
579
379
|
|
|
580
380
|
@whitelist_for_serdes
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
("timeout", Optional[int]),
|
|
592
|
-
],
|
|
593
|
-
)
|
|
594
|
-
):
|
|
381
|
+
@record_custom
|
|
382
|
+
class ExternalScheduleExecutionArgs(IHaveNew):
|
|
383
|
+
repository_origin: RemoteRepositoryOrigin
|
|
384
|
+
instance_ref: Optional[InstanceRef]
|
|
385
|
+
schedule_name: str
|
|
386
|
+
scheduled_execution_timestamp: Optional[float]
|
|
387
|
+
scheduled_execution_timezone: Optional[str]
|
|
388
|
+
log_key: Sequence[str]
|
|
389
|
+
timeout: Optional[int]
|
|
390
|
+
|
|
595
391
|
def __new__(
|
|
596
392
|
cls,
|
|
597
393
|
repository_origin: RemoteRepositoryOrigin,
|
|
@@ -604,42 +400,31 @@ class ExternalScheduleExecutionArgs(
|
|
|
604
400
|
):
|
|
605
401
|
return super().__new__(
|
|
606
402
|
cls,
|
|
607
|
-
repository_origin=
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
),
|
|
615
|
-
scheduled_execution_timezone=check.opt_str_param(
|
|
616
|
-
scheduled_execution_timezone,
|
|
617
|
-
"scheduled_execution_timezone",
|
|
618
|
-
),
|
|
619
|
-
log_key=check.opt_list_param(log_key, "log_key", of_type=str),
|
|
620
|
-
timeout=check.opt_int_param(timeout, "timeout"),
|
|
403
|
+
repository_origin=repository_origin,
|
|
404
|
+
instance_ref=instance_ref,
|
|
405
|
+
schedule_name=schedule_name,
|
|
406
|
+
scheduled_execution_timestamp=scheduled_execution_timestamp,
|
|
407
|
+
scheduled_execution_timezone=scheduled_execution_timezone,
|
|
408
|
+
log_key=log_key if log_key is not None else [],
|
|
409
|
+
timeout=timeout,
|
|
621
410
|
)
|
|
622
411
|
|
|
623
412
|
|
|
624
413
|
@whitelist_for_serdes
|
|
625
|
-
|
|
626
|
-
|
|
627
|
-
|
|
628
|
-
|
|
629
|
-
|
|
630
|
-
|
|
631
|
-
|
|
632
|
-
|
|
633
|
-
|
|
634
|
-
|
|
635
|
-
|
|
636
|
-
|
|
637
|
-
|
|
638
|
-
|
|
639
|
-
("last_completion_time", Optional[float]),
|
|
640
|
-
],
|
|
641
|
-
)
|
|
642
|
-
):
|
|
414
|
+
@record_custom
|
|
415
|
+
class SensorExecutionArgs(IHaveNew):
|
|
416
|
+
repository_origin: RemoteRepositoryOrigin
|
|
417
|
+
instance_ref: Optional[InstanceRef]
|
|
418
|
+
sensor_name: str
|
|
419
|
+
last_tick_completion_time: Optional[float]
|
|
420
|
+
last_run_key: Optional[str]
|
|
421
|
+
cursor: Optional[str]
|
|
422
|
+
log_key: Sequence[str]
|
|
423
|
+
timeout: Optional[int]
|
|
424
|
+
last_sensor_start_time: Optional[float]
|
|
425
|
+
# deprecated
|
|
426
|
+
last_completion_time: Optional[float]
|
|
427
|
+
|
|
643
428
|
def __new__(
|
|
644
429
|
cls,
|
|
645
430
|
repository_origin: RemoteRepositoryOrigin,
|
|
@@ -663,192 +448,83 @@ class SensorExecutionArgs(
|
|
|
663
448
|
)
|
|
664
449
|
return super().__new__(
|
|
665
450
|
cls,
|
|
666
|
-
repository_origin=
|
|
667
|
-
|
|
668
|
-
|
|
669
|
-
instance_ref=check.opt_inst_param(instance_ref, "instance_ref", InstanceRef),
|
|
670
|
-
sensor_name=check.str_param(sensor_name, "sensor_name"),
|
|
451
|
+
repository_origin=repository_origin,
|
|
452
|
+
instance_ref=instance_ref,
|
|
453
|
+
sensor_name=sensor_name,
|
|
671
454
|
last_tick_completion_time=normalized_last_tick_completion_time,
|
|
672
|
-
last_run_key=
|
|
673
|
-
cursor=
|
|
674
|
-
log_key=
|
|
455
|
+
last_run_key=last_run_key,
|
|
456
|
+
cursor=cursor,
|
|
457
|
+
log_key=log_key if log_key is not None else [],
|
|
675
458
|
timeout=timeout,
|
|
676
|
-
last_sensor_start_time=
|
|
677
|
-
last_sensor_start_time, "last_sensor_start_time"
|
|
678
|
-
),
|
|
459
|
+
last_sensor_start_time=last_sensor_start_time,
|
|
679
460
|
last_completion_time=normalized_last_tick_completion_time,
|
|
680
461
|
)
|
|
681
462
|
|
|
682
463
|
def with_default_timeout(self, timeout: int) -> "SensorExecutionArgs":
|
|
683
464
|
"""If the timeout is not explicitly set, provides a default timeout which is used for the sensor execution."""
|
|
684
465
|
if self.timeout is None:
|
|
685
|
-
return self
|
|
466
|
+
return copy(self, timeout=timeout)
|
|
686
467
|
return self
|
|
687
468
|
|
|
688
469
|
|
|
689
470
|
@whitelist_for_serdes
|
|
690
|
-
|
|
691
|
-
|
|
692
|
-
|
|
693
|
-
|
|
694
|
-
|
|
695
|
-
("instance_ref", InstanceRef),
|
|
696
|
-
("name", str),
|
|
697
|
-
],
|
|
698
|
-
)
|
|
699
|
-
):
|
|
700
|
-
def __new__(
|
|
701
|
-
cls, repository_origin: RemoteRepositoryOrigin, instance_ref: InstanceRef, name: str
|
|
702
|
-
):
|
|
703
|
-
return super().__new__(
|
|
704
|
-
cls,
|
|
705
|
-
repository_origin=check.inst_param(
|
|
706
|
-
repository_origin, "repository_origin", RemoteRepositoryOrigin
|
|
707
|
-
),
|
|
708
|
-
instance_ref=check.inst_param(instance_ref, "instance_ref", InstanceRef),
|
|
709
|
-
name=check.str_param(name, "name"),
|
|
710
|
-
)
|
|
471
|
+
@record
|
|
472
|
+
class ExternalJobArgs:
|
|
473
|
+
repository_origin: RemoteRepositoryOrigin
|
|
474
|
+
instance_ref: InstanceRef
|
|
475
|
+
name: str
|
|
711
476
|
|
|
712
477
|
|
|
713
478
|
@whitelist_for_serdes
|
|
714
|
-
|
|
715
|
-
|
|
716
|
-
|
|
717
|
-
|
|
718
|
-
)
|
|
719
|
-
):
|
|
720
|
-
def __new__(cls, success: bool, serializable_error_info: Optional[SerializableErrorInfo]):
|
|
721
|
-
return super().__new__(
|
|
722
|
-
cls,
|
|
723
|
-
success=check.bool_param(success, "success"),
|
|
724
|
-
serializable_error_info=check.opt_inst_param(
|
|
725
|
-
serializable_error_info, "serializable_error_info", SerializableErrorInfo
|
|
726
|
-
),
|
|
727
|
-
)
|
|
479
|
+
@record
|
|
480
|
+
class ShutdownServerResult:
|
|
481
|
+
success: bool
|
|
482
|
+
serializable_error_info: Optional[SerializableErrorInfo]
|
|
728
483
|
|
|
729
484
|
|
|
730
485
|
@whitelist_for_serdes
|
|
731
|
-
|
|
732
|
-
|
|
733
|
-
|
|
734
|
-
cls,
|
|
735
|
-
run_id=check.str_param(run_id, "run_id"),
|
|
736
|
-
)
|
|
486
|
+
@record
|
|
487
|
+
class CancelExecutionRequest:
|
|
488
|
+
run_id: str
|
|
737
489
|
|
|
738
490
|
|
|
739
491
|
@whitelist_for_serdes
|
|
740
|
-
|
|
741
|
-
|
|
742
|
-
|
|
743
|
-
|
|
744
|
-
|
|
745
|
-
("message", Optional[str]),
|
|
746
|
-
("serializable_error_info", Optional[SerializableErrorInfo]),
|
|
747
|
-
],
|
|
748
|
-
)
|
|
749
|
-
):
|
|
750
|
-
def __new__(
|
|
751
|
-
cls,
|
|
752
|
-
success: bool,
|
|
753
|
-
message: Optional[str],
|
|
754
|
-
serializable_error_info: Optional[SerializableErrorInfo],
|
|
755
|
-
):
|
|
756
|
-
return super().__new__(
|
|
757
|
-
cls,
|
|
758
|
-
success=check.bool_param(success, "success"),
|
|
759
|
-
message=check.opt_str_param(message, "message"),
|
|
760
|
-
serializable_error_info=check.opt_inst_param(
|
|
761
|
-
serializable_error_info, "serializable_error_info", SerializableErrorInfo
|
|
762
|
-
),
|
|
763
|
-
)
|
|
492
|
+
@record
|
|
493
|
+
class CancelExecutionResult:
|
|
494
|
+
success: bool
|
|
495
|
+
message: Optional[str]
|
|
496
|
+
serializable_error_info: Optional[SerializableErrorInfo]
|
|
764
497
|
|
|
765
498
|
|
|
766
499
|
@whitelist_for_serdes
|
|
767
|
-
|
|
768
|
-
|
|
769
|
-
|
|
770
|
-
cls,
|
|
771
|
-
run_id=check.str_param(run_id, "run_id"),
|
|
772
|
-
)
|
|
500
|
+
@record
|
|
501
|
+
class CanCancelExecutionRequest:
|
|
502
|
+
run_id: str
|
|
773
503
|
|
|
774
504
|
|
|
775
505
|
@whitelist_for_serdes
|
|
776
|
-
|
|
777
|
-
|
|
778
|
-
|
|
779
|
-
cls,
|
|
780
|
-
can_cancel=check.bool_param(can_cancel, "can_cancel"),
|
|
781
|
-
)
|
|
506
|
+
@record
|
|
507
|
+
class CanCancelExecutionResult:
|
|
508
|
+
can_cancel: bool
|
|
782
509
|
|
|
783
510
|
|
|
784
511
|
@whitelist_for_serdes
|
|
785
|
-
|
|
786
|
-
|
|
787
|
-
|
|
788
|
-
|
|
789
|
-
|
|
790
|
-
("message", Optional[str]),
|
|
791
|
-
("serializable_error_info", Optional[SerializableErrorInfo]),
|
|
792
|
-
],
|
|
793
|
-
)
|
|
794
|
-
):
|
|
795
|
-
def __new__(
|
|
796
|
-
cls,
|
|
797
|
-
success: bool,
|
|
798
|
-
message: Optional[str],
|
|
799
|
-
serializable_error_info: Optional[SerializableErrorInfo],
|
|
800
|
-
):
|
|
801
|
-
return super().__new__(
|
|
802
|
-
cls,
|
|
803
|
-
success=check.bool_param(success, "success"),
|
|
804
|
-
message=check.opt_str_param(message, "message"),
|
|
805
|
-
serializable_error_info=check.opt_inst_param(
|
|
806
|
-
serializable_error_info, "serializable_error_info", SerializableErrorInfo
|
|
807
|
-
),
|
|
808
|
-
)
|
|
512
|
+
@record
|
|
513
|
+
class StartRunResult:
|
|
514
|
+
success: bool
|
|
515
|
+
message: Optional[str]
|
|
516
|
+
serializable_error_info: Optional[SerializableErrorInfo]
|
|
809
517
|
|
|
810
518
|
|
|
811
519
|
@whitelist_for_serdes
|
|
812
|
-
|
|
813
|
-
|
|
814
|
-
|
|
815
|
-
|
|
816
|
-
("current_image", Optional[str]),
|
|
817
|
-
("serializable_error_info", Optional[SerializableErrorInfo]),
|
|
818
|
-
],
|
|
819
|
-
)
|
|
820
|
-
):
|
|
821
|
-
def __new__(
|
|
822
|
-
cls, current_image: Optional[str], serializable_error_info: Optional[SerializableErrorInfo]
|
|
823
|
-
):
|
|
824
|
-
return super().__new__(
|
|
825
|
-
cls,
|
|
826
|
-
current_image=check.opt_str_param(current_image, "current_image"),
|
|
827
|
-
serializable_error_info=check.opt_inst_param(
|
|
828
|
-
serializable_error_info, "serializable_error_info", SerializableErrorInfo
|
|
829
|
-
),
|
|
830
|
-
)
|
|
520
|
+
@record
|
|
521
|
+
class GetCurrentImageResult:
|
|
522
|
+
current_image: Optional[str]
|
|
523
|
+
serializable_error_info: Optional[SerializableErrorInfo]
|
|
831
524
|
|
|
832
525
|
|
|
833
526
|
@whitelist_for_serdes
|
|
834
|
-
|
|
835
|
-
|
|
836
|
-
|
|
837
|
-
|
|
838
|
-
("current_runs", Sequence[str]),
|
|
839
|
-
("serializable_error_info", Optional[SerializableErrorInfo]),
|
|
840
|
-
],
|
|
841
|
-
)
|
|
842
|
-
):
|
|
843
|
-
def __new__(
|
|
844
|
-
cls,
|
|
845
|
-
current_runs: Sequence[str],
|
|
846
|
-
serializable_error_info: Optional[SerializableErrorInfo],
|
|
847
|
-
):
|
|
848
|
-
return super().__new__(
|
|
849
|
-
cls,
|
|
850
|
-
current_runs=check.list_param(current_runs, "current_runs", of_type=str),
|
|
851
|
-
serializable_error_info=check.opt_inst_param(
|
|
852
|
-
serializable_error_info, "serializable_error_info", SerializableErrorInfo
|
|
853
|
-
),
|
|
854
|
-
)
|
|
527
|
+
@record
|
|
528
|
+
class GetCurrentRunsResult:
|
|
529
|
+
current_runs: Sequence[str]
|
|
530
|
+
serializable_error_info: Optional[SerializableErrorInfo]
|
dagster/version.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = "1.12.
|
|
1
|
+
__version__ = "1.12.11"
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: dagster
|
|
3
|
-
Version: 1.12.
|
|
3
|
+
Version: 1.12.11
|
|
4
4
|
Summary: Dagster is an orchestration platform for the development, production, and observation of data assets.
|
|
5
5
|
Author: Dagster Labs
|
|
6
6
|
Author-email: hello@dagsterlabs.com
|
|
@@ -60,8 +60,8 @@ Requires-Dist: universal_pathlib; python_version < "3.12"
|
|
|
60
60
|
Requires-Dist: universal_pathlib>=0.2.0; python_version >= "3.12"
|
|
61
61
|
Requires-Dist: rich
|
|
62
62
|
Requires-Dist: filelock
|
|
63
|
-
Requires-Dist: dagster-pipes==1.12.
|
|
64
|
-
Requires-Dist: dagster-shared==1.12.
|
|
63
|
+
Requires-Dist: dagster-pipes==1.12.11
|
|
64
|
+
Requires-Dist: dagster-shared==1.12.11
|
|
65
65
|
Requires-Dist: antlr4-python3-runtime
|
|
66
66
|
Provides-Extra: docker
|
|
67
67
|
Requires-Dist: docker; extra == "docker"
|
|
@@ -4,7 +4,7 @@ dagster/_annotations.py,sha256=GC7Rc8ZJZS9EpUuiCMyrtLZ5lsGGjPPkVtlmaClkt2o,1610
|
|
|
4
4
|
dagster/_builtins.py,sha256=J6A1CE28JV0itz73hCaHIKoUknb1j5B3QO5Frx_hQuU,471
|
|
5
5
|
dagster/_module_alias_map.py,sha256=KsLPXRga52UbPcEjFpOie8tvb7sGdNnikl3MUelYtVA,3349
|
|
6
6
|
dagster/py.typed,sha256=la67KBlbjXN-_-DfGNcdOcjYumVpKG_Tkw-8n5dnGB4,8
|
|
7
|
-
dagster/version.py,sha256=
|
|
7
|
+
dagster/version.py,sha256=V9Hrmon3VMx6Fm-BKFFV3DZcAfdU2IO-xs1MB4F4LlI,24
|
|
8
8
|
dagster/_api/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
9
9
|
dagster/_api/get_server_id.py,sha256=sBjhjcHgB__iIN567QCJmTtBK6-v31VfjDsYNZIohVw,731
|
|
10
10
|
dagster/_api/list_repositories.py,sha256=fbjMobgFKzwoMN825GEgDeCx1jzUlW2vPhPDuYIg93g,3905
|
|
@@ -117,8 +117,8 @@ dagster/_core/definitions/freshness_policy.py,sha256=cZwymSwKoLP0BpLQtASZvmD6HPA
|
|
|
117
117
|
dagster/_core/definitions/graph_definition.py,sha256=E-t2_gb8DnNLBzK5urdpvhg0o0Iv-wXAycQ9fWc4ANE,52914
|
|
118
118
|
dagster/_core/definitions/hook_definition.py,sha256=yVMCMHg1UTiGswM0LlvWWkKiDx-cfLON5SDKK13_p4s,6709
|
|
119
119
|
dagster/_core/definitions/hook_invocation.py,sha256=QgG12U_H7krChdSl_slv6HY_H6L_NLbPvpiKuUj_ikU,1640
|
|
120
|
-
dagster/_core/definitions/inference.py,sha256=
|
|
121
|
-
dagster/_core/definitions/input.py,sha256=
|
|
120
|
+
dagster/_core/definitions/inference.py,sha256=NbpIuSwziHTx2g5x_fEWmsNQY_lA69Ap-66x1QXc5iY,3375
|
|
121
|
+
dagster/_core/definitions/input.py,sha256=lJIsJMNp8FNVKCHSKCwe6KmvNxYD6CbbjQ1no_GbJiQ,19723
|
|
122
122
|
dagster/_core/definitions/instigation_logger.py,sha256=n0l2a7uWo6qlDB__zzONRNW-U6bnCuZeHxsV_kuIbX8,7396
|
|
123
123
|
dagster/_core/definitions/job_base.py,sha256=o5FBiqml5le14TAiDPxmYRLSoW4-_G0N2HzeXvakME8,4293
|
|
124
124
|
dagster/_core/definitions/job_definition.py,sha256=Hmon_NOwLvRCKSqPkaqr0AVjjhcQBOCRUqHma0ZPxNQ,62303
|
|
@@ -196,7 +196,7 @@ dagster/_core/definitions/assets/graph/asset_graph.py,sha256=7uC3IgCqjvVLlJ86G9K
|
|
|
196
196
|
dagster/_core/definitions/assets/graph/asset_graph_differ.py,sha256=ZgyJa3tD-58a5y8SbU10jn60FHQECGA3-FrspkMPyyY,8338
|
|
197
197
|
dagster/_core/definitions/assets/graph/asset_graph_subset.py,sha256=T44B8C2_J1UdnjCv-wROZHbAqACbGag06QM6dvpVgoI,17374
|
|
198
198
|
dagster/_core/definitions/assets/graph/base_asset_graph.py,sha256=kCFBREioJyltAXCwIqbQYUC7QL9VZHn1zgwB54Gho4I,32842
|
|
199
|
-
dagster/_core/definitions/assets/graph/remote_asset_graph.py,sha256=
|
|
199
|
+
dagster/_core/definitions/assets/graph/remote_asset_graph.py,sha256=8ibrcDnJHM16dr4jORVMoZL8qX5etw70q5Uqn1VmI8s,35164
|
|
200
200
|
dagster/_core/definitions/assets/job/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
201
201
|
dagster/_core/definitions/assets/job/asset_in.py,sha256=c70Sx5_lH8C4aSRmOtL6mn2Jk18BFhpds-VWUcbMQBw,4065
|
|
202
202
|
dagster/_core/definitions/assets/job/asset_job.py,sha256=FgJzVfsfDYUU8bHvNcTooGn3ROOnolrnjcsch8Wuk3o,30964
|
|
@@ -241,7 +241,7 @@ dagster/_core/definitions/decorators/sensor_decorator.py,sha256=BaieF1GcKdaONu3O
|
|
|
241
241
|
dagster/_core/definitions/decorators/source_asset_decorator.py,sha256=KE9oCVlzaRNiUck_aP8n5vX7Ah8JQEOoXirwRW2E5YQ,14912
|
|
242
242
|
dagster/_core/definitions/metadata/__init__.py,sha256=3udptTJSppffelN6ehwrArbfpS-LFwHa9MqYArTJqiA,10784
|
|
243
243
|
dagster/_core/definitions/metadata/external_metadata.py,sha256=DphSm10HGxHACurNFhegFcAM8lOx67QePUsOyek6Yac,5040
|
|
244
|
-
dagster/_core/definitions/metadata/metadata_set.py,sha256=
|
|
244
|
+
dagster/_core/definitions/metadata/metadata_set.py,sha256=28HSSrJrfmh2Oj7akTuGyjTBMXddQe6L4xO7pdaucIg,8204
|
|
245
245
|
dagster/_core/definitions/metadata/metadata_value.py,sha256=vQdvYxfqWUWk7xe3sdNWmErTcyOx3iSJ7OSwuv-MJk8,33394
|
|
246
246
|
dagster/_core/definitions/metadata/source_code.py,sha256=sc21_k_eH7ZaUeTQMWF3XYrBtCa3-yGSy9_gTiE149s,16350
|
|
247
247
|
dagster/_core/definitions/metadata/table.py,sha256=PwXMIyKMn-eotPThhX6OPkyQqrealjs5KExo8WkTWU8,11097
|
|
@@ -670,7 +670,7 @@ dagster/_grpc/impl.py,sha256=KCQc0n1ySN4sB5PssKUg9jPXdOEMdAInXpGOEORuo_A,21868
|
|
|
670
670
|
dagster/_grpc/proxy_server.py,sha256=2zpPc4i1c5yRL1VTZTYxuc1_SWv5VxyMTZI47uHkEVU,16166
|
|
671
671
|
dagster/_grpc/server.py,sha256=WOy9Wq94d8f1Y8-7qFZmRCH5CT8k9kK_BpbTWRwKu7A,71129
|
|
672
672
|
dagster/_grpc/server_watcher.py,sha256=Po38DqropGefgEztDsiRBR8iHCNoLxMlSJYcWf9t-HU,5793
|
|
673
|
-
dagster/_grpc/types.py,sha256=
|
|
673
|
+
dagster/_grpc/types.py,sha256=7U8E1N30BZRg3OTKAXC7nqoHDt8ndf1AAdBs017rNNs,17421
|
|
674
674
|
dagster/_grpc/utils.py,sha256=1x9U8V7xdetXb48oKk04wggH52DVdoVgTWt41kBPO7Q,4438
|
|
675
675
|
dagster/_grpc/__generated__/__init__.py,sha256=rnEgpZcRbLz3VvKFrCQgwdnZc5S9MV1fUhxvwrxEroU,190
|
|
676
676
|
dagster/_grpc/__generated__/dagster_api_pb2.py,sha256=pYXoY-0mt7J548Pd1BxZrVkhshJ7lY3bbMXZr3WKAKc,13495
|
|
@@ -824,9 +824,9 @@ dagster/components/utils/translation.py,sha256=gKal6ZdMbYImiBLZVa9E2pz4690j8Hukf
|
|
|
824
824
|
dagster/deprecated/__init__.py,sha256=fkuCwd_79EmS-Voox0YlWEHWxZwQ0ZM_V0viwxw5isw,127
|
|
825
825
|
dagster/preview/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
826
826
|
dagster/preview/freshness/__init__.py,sha256=zq0UU-3mnxycgJDtAZ9DFWiBh4eXxKCaKbeuRmUl3_Y,276
|
|
827
|
-
dagster-1.12.
|
|
828
|
-
dagster-1.12.
|
|
829
|
-
dagster-1.12.
|
|
830
|
-
dagster-1.12.
|
|
831
|
-
dagster-1.12.
|
|
832
|
-
dagster-1.12.
|
|
827
|
+
dagster-1.12.11.dist-info/licenses/LICENSE,sha256=4lsMW-RCvfVD4_F57wrmpe3vX1xwUk_OAKKmV_XT7Z0,11348
|
|
828
|
+
dagster-1.12.11.dist-info/METADATA,sha256=TiJ3mrw3erh1Ys5sxTcbYqH-trU8MRo9nChnNe48IXY,12218
|
|
829
|
+
dagster-1.12.11.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
830
|
+
dagster-1.12.11.dist-info/entry_points.txt,sha256=D4W0jf1lM8zq82j3DJd9JkZEmHdFz5gkz8ddRzOEzpc,139
|
|
831
|
+
dagster-1.12.11.dist-info/top_level.txt,sha256=Gx3NqlMQh6AsfIZaJJXEfep5yh-e9pUxkzOlUV3s6CM,8
|
|
832
|
+
dagster-1.12.11.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|