dagster 1.12.10__py3-none-any.whl → 1.12.12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dagster/_cli/asset.py +15 -4
- dagster/_cli/job.py +8 -3
- dagster/_core/asset_graph_view/serializable_entity_subset.py +9 -0
- dagster/_core/code_pointer.py +8 -1
- dagster/_core/definitions/assets/graph/remote_asset_graph.py +2 -2
- dagster/_core/definitions/declarative_automation/serialized_objects.py +36 -0
- dagster/_core/definitions/decorators/asset_decorator.py +13 -13
- dagster/_core/definitions/inference.py +10 -7
- dagster/_core/definitions/input.py +36 -60
- dagster/_core/definitions/metadata/metadata_set.py +14 -3
- dagster/_core/execution/run_cancellation_thread.py +1 -0
- dagster/_core/telemetry.py +3 -0
- dagster/_generate/download.py +1 -0
- dagster/_grpc/types.py +218 -542
- dagster/components/list/list.py +4 -1
- dagster/version.py +1 -1
- {dagster-1.12.10.dist-info → dagster-1.12.12.dist-info}/METADATA +4 -4
- {dagster-1.12.10.dist-info → dagster-1.12.12.dist-info}/RECORD +22 -22
- {dagster-1.12.10.dist-info → dagster-1.12.12.dist-info}/WHEEL +1 -1
- {dagster-1.12.10.dist-info → dagster-1.12.12.dist-info}/entry_points.txt +0 -0
- {dagster-1.12.10.dist-info → dagster-1.12.12.dist-info}/licenses/LICENSE +0 -0
- {dagster-1.12.10.dist-info → dagster-1.12.12.dist-info}/top_level.txt +0 -0
dagster/_grpc/types.py
CHANGED
|
@@ -1,8 +1,9 @@
|
|
|
1
1
|
import base64
|
|
2
2
|
import zlib
|
|
3
3
|
from collections.abc import Mapping, Sequence
|
|
4
|
-
from typing import AbstractSet, Any,
|
|
4
|
+
from typing import AbstractSet, Any, Optional # noqa: UP035
|
|
5
5
|
|
|
6
|
+
from dagster_shared.record import IHaveNew, copy, record, record_custom
|
|
6
7
|
from dagster_shared.serdes.objects.models.defs_state_info import DefsStateInfo
|
|
7
8
|
from dagster_shared.serdes.serdes import SetToSequenceFieldSerializer
|
|
8
9
|
|
|
@@ -30,55 +31,18 @@ from dagster._utils.error import SerializableErrorInfo
|
|
|
30
31
|
"op_selection": "solid_selection",
|
|
31
32
|
}
|
|
32
33
|
)
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
("asset_check_selection", Optional[AbstractSet[AssetCheckKey]]),
|
|
46
|
-
("mode", str),
|
|
47
|
-
],
|
|
48
|
-
)
|
|
49
|
-
):
|
|
50
|
-
def __new__(
|
|
51
|
-
cls,
|
|
52
|
-
job_origin: RemoteJobOrigin,
|
|
53
|
-
op_selection: Sequence[str],
|
|
54
|
-
run_config: Mapping[str, object],
|
|
55
|
-
step_keys_to_execute: Optional[Sequence[str]],
|
|
56
|
-
job_snapshot_id: str,
|
|
57
|
-
known_state: Optional[KnownExecutionState] = None,
|
|
58
|
-
instance_ref: Optional[InstanceRef] = None,
|
|
59
|
-
asset_selection: Optional[AbstractSet[AssetKey]] = None,
|
|
60
|
-
asset_check_selection: Optional[AbstractSet[AssetCheckKey]] = None,
|
|
61
|
-
mode: str = DEFAULT_MODE_NAME,
|
|
62
|
-
):
|
|
63
|
-
return super().__new__(
|
|
64
|
-
cls,
|
|
65
|
-
job_origin=check.inst_param(job_origin, "job_origin", RemoteJobOrigin),
|
|
66
|
-
op_selection=check.opt_sequence_param(op_selection, "op_selection", of_type=str),
|
|
67
|
-
run_config=check.mapping_param(run_config, "run_config", key_type=str),
|
|
68
|
-
mode=check.str_param(mode, "mode"),
|
|
69
|
-
step_keys_to_execute=check.opt_nullable_sequence_param(
|
|
70
|
-
step_keys_to_execute, "step_keys_to_execute", of_type=str
|
|
71
|
-
),
|
|
72
|
-
job_snapshot_id=check.str_param(job_snapshot_id, "job_snapshot_id"),
|
|
73
|
-
known_state=check.opt_inst_param(known_state, "known_state", KnownExecutionState),
|
|
74
|
-
instance_ref=check.opt_inst_param(instance_ref, "instance_ref", InstanceRef),
|
|
75
|
-
asset_selection=check.opt_nullable_set_param(
|
|
76
|
-
asset_selection, "asset_selection", of_type=AssetKey
|
|
77
|
-
),
|
|
78
|
-
asset_check_selection=check.opt_nullable_set_param(
|
|
79
|
-
asset_check_selection, "asset_check_selection", of_type=AssetCheckKey
|
|
80
|
-
),
|
|
81
|
-
)
|
|
34
|
+
@record
|
|
35
|
+
class ExecutionPlanSnapshotArgs:
|
|
36
|
+
job_origin: RemoteJobOrigin
|
|
37
|
+
op_selection: Sequence[str]
|
|
38
|
+
run_config: Mapping[str, object]
|
|
39
|
+
step_keys_to_execute: Optional[Sequence[str]]
|
|
40
|
+
job_snapshot_id: str
|
|
41
|
+
known_state: Optional[KnownExecutionState] = None
|
|
42
|
+
instance_ref: Optional[InstanceRef] = None
|
|
43
|
+
asset_selection: Optional[AbstractSet[AssetKey]] = None
|
|
44
|
+
asset_check_selection: Optional[AbstractSet[AssetCheckKey]] = None
|
|
45
|
+
mode: str = DEFAULT_MODE_NAME
|
|
82
46
|
|
|
83
47
|
|
|
84
48
|
def _get_entry_point(origin: JobPythonOrigin):
|
|
@@ -95,18 +59,14 @@ def _get_entry_point(origin: JobPythonOrigin):
|
|
|
95
59
|
"run_id": "pipeline_run_id",
|
|
96
60
|
}
|
|
97
61
|
)
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
("set_exit_code_on_failure", Optional[bool]),
|
|
107
|
-
],
|
|
108
|
-
)
|
|
109
|
-
):
|
|
62
|
+
@record_custom
|
|
63
|
+
class ExecuteRunArgs(IHaveNew):
|
|
64
|
+
# Deprecated, only needed for back-compat since it can be pulled from the PipelineRun
|
|
65
|
+
job_origin: JobPythonOrigin
|
|
66
|
+
run_id: str
|
|
67
|
+
instance_ref: Optional[InstanceRef]
|
|
68
|
+
set_exit_code_on_failure: Optional[bool]
|
|
69
|
+
|
|
110
70
|
def __new__(
|
|
111
71
|
cls,
|
|
112
72
|
job_origin: JobPythonOrigin,
|
|
@@ -116,19 +76,11 @@ class ExecuteRunArgs(
|
|
|
116
76
|
):
|
|
117
77
|
return super().__new__(
|
|
118
78
|
cls,
|
|
119
|
-
job_origin=
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
run_id=check.str_param(run_id, "run_id"),
|
|
125
|
-
instance_ref=check.opt_inst_param(instance_ref, "instance_ref", InstanceRef),
|
|
126
|
-
set_exit_code_on_failure=(
|
|
127
|
-
True
|
|
128
|
-
if check.opt_bool_param(set_exit_code_on_failure, "set_exit_code_on_failure")
|
|
129
|
-
is True
|
|
130
|
-
else None
|
|
131
|
-
), # for back-compat
|
|
79
|
+
job_origin=job_origin,
|
|
80
|
+
run_id=run_id,
|
|
81
|
+
instance_ref=instance_ref,
|
|
82
|
+
# for back-compat: only True or None allowed
|
|
83
|
+
set_exit_code_on_failure=True if set_exit_code_on_failure is True else None,
|
|
132
84
|
)
|
|
133
85
|
|
|
134
86
|
def get_command_args(self) -> Sequence[str]:
|
|
@@ -146,18 +98,14 @@ class ExecuteRunArgs(
|
|
|
146
98
|
"run_id": "pipeline_run_id",
|
|
147
99
|
}
|
|
148
100
|
)
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
("set_exit_code_on_failure", Optional[bool]),
|
|
158
|
-
],
|
|
159
|
-
)
|
|
160
|
-
):
|
|
101
|
+
@record_custom
|
|
102
|
+
class ResumeRunArgs(IHaveNew):
|
|
103
|
+
# Deprecated, only needed for back-compat since it can be pulled from the DagsterRun
|
|
104
|
+
job_origin: JobPythonOrigin
|
|
105
|
+
run_id: str
|
|
106
|
+
instance_ref: Optional[InstanceRef]
|
|
107
|
+
set_exit_code_on_failure: Optional[bool]
|
|
108
|
+
|
|
161
109
|
def __new__(
|
|
162
110
|
cls,
|
|
163
111
|
job_origin: JobPythonOrigin,
|
|
@@ -167,19 +115,11 @@ class ResumeRunArgs(
|
|
|
167
115
|
):
|
|
168
116
|
return super().__new__(
|
|
169
117
|
cls,
|
|
170
|
-
job_origin=
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
run_id=check.str_param(run_id, "run_id"),
|
|
176
|
-
instance_ref=check.opt_inst_param(instance_ref, "instance_ref", InstanceRef),
|
|
177
|
-
set_exit_code_on_failure=(
|
|
178
|
-
True
|
|
179
|
-
if check.opt_bool_param(set_exit_code_on_failure, "set_exit_code_on_failure")
|
|
180
|
-
is True
|
|
181
|
-
else None
|
|
182
|
-
), # for back-compat
|
|
118
|
+
job_origin=job_origin,
|
|
119
|
+
run_id=run_id,
|
|
120
|
+
instance_ref=instance_ref,
|
|
121
|
+
# for back-compat: only True or None allowed
|
|
122
|
+
set_exit_code_on_failure=True if set_exit_code_on_failure is True else None,
|
|
183
123
|
)
|
|
184
124
|
|
|
185
125
|
def get_command_args(self) -> Sequence[str]:
|
|
@@ -198,32 +138,11 @@ class ResumeRunArgs(
|
|
|
198
138
|
"run_id": "pipeline_run_id",
|
|
199
139
|
},
|
|
200
140
|
)
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
("run_id", str),
|
|
207
|
-
("instance_ref", Optional[InstanceRef]),
|
|
208
|
-
],
|
|
209
|
-
)
|
|
210
|
-
):
|
|
211
|
-
def __new__(
|
|
212
|
-
cls,
|
|
213
|
-
job_origin: RemoteJobOrigin,
|
|
214
|
-
run_id: str,
|
|
215
|
-
instance_ref: Optional[InstanceRef],
|
|
216
|
-
):
|
|
217
|
-
return super().__new__(
|
|
218
|
-
cls,
|
|
219
|
-
job_origin=check.inst_param(
|
|
220
|
-
job_origin,
|
|
221
|
-
"job_origin",
|
|
222
|
-
RemoteJobOrigin,
|
|
223
|
-
),
|
|
224
|
-
run_id=check.str_param(run_id, "run_id"),
|
|
225
|
-
instance_ref=check.opt_inst_param(instance_ref, "instance_ref", InstanceRef),
|
|
226
|
-
)
|
|
141
|
+
@record
|
|
142
|
+
class ExecuteExternalJobArgs:
|
|
143
|
+
job_origin: RemoteJobOrigin
|
|
144
|
+
run_id: str
|
|
145
|
+
instance_ref: Optional[InstanceRef]
|
|
227
146
|
|
|
228
147
|
|
|
229
148
|
@whitelist_for_serdes(
|
|
@@ -232,22 +151,18 @@ class ExecuteExternalJobArgs(
|
|
|
232
151
|
"run_id": "pipeline_run_id",
|
|
233
152
|
}
|
|
234
153
|
)
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
("print_serialized_events", bool),
|
|
248
|
-
],
|
|
249
|
-
)
|
|
250
|
-
):
|
|
154
|
+
@record_custom
|
|
155
|
+
class ExecuteStepArgs(IHaveNew):
|
|
156
|
+
# Deprecated, only needed for back-compat since it can be pulled from the DagsterRun
|
|
157
|
+
job_origin: JobPythonOrigin
|
|
158
|
+
run_id: str
|
|
159
|
+
step_keys_to_execute: Optional[Sequence[str]]
|
|
160
|
+
instance_ref: Optional[InstanceRef]
|
|
161
|
+
retry_mode: Optional[RetryMode]
|
|
162
|
+
known_state: Optional[KnownExecutionState]
|
|
163
|
+
should_verify_step: Optional[bool]
|
|
164
|
+
print_serialized_events: bool
|
|
165
|
+
|
|
251
166
|
def __new__(
|
|
252
167
|
cls,
|
|
253
168
|
job_origin: JobPythonOrigin,
|
|
@@ -261,19 +176,15 @@ class ExecuteStepArgs(
|
|
|
261
176
|
):
|
|
262
177
|
return super().__new__(
|
|
263
178
|
cls,
|
|
264
|
-
job_origin=
|
|
265
|
-
run_id=
|
|
266
|
-
step_keys_to_execute=
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
should_verify_step, "should_verify_step", False
|
|
274
|
-
),
|
|
275
|
-
print_serialized_events=check.opt_bool_param(
|
|
276
|
-
print_serialized_events, "print_serialized_events", False
|
|
179
|
+
job_origin=job_origin,
|
|
180
|
+
run_id=run_id,
|
|
181
|
+
step_keys_to_execute=step_keys_to_execute,
|
|
182
|
+
instance_ref=instance_ref,
|
|
183
|
+
retry_mode=retry_mode,
|
|
184
|
+
known_state=known_state,
|
|
185
|
+
should_verify_step=should_verify_step if should_verify_step is not None else False,
|
|
186
|
+
print_serialized_events=(
|
|
187
|
+
print_serialized_events if print_serialized_events is not None else False
|
|
277
188
|
),
|
|
278
189
|
)
|
|
279
190
|
|
|
@@ -306,33 +217,24 @@ class ExecuteStepArgs(
|
|
|
306
217
|
|
|
307
218
|
|
|
308
219
|
@whitelist_for_serdes
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
return super().__new__(
|
|
314
|
-
cls,
|
|
315
|
-
repository_name=check.str_param(repository_name, "repository_name"),
|
|
316
|
-
attribute=check.str_param(attribute, "attribute"),
|
|
317
|
-
)
|
|
220
|
+
@record
|
|
221
|
+
class LoadableRepositorySymbol:
|
|
222
|
+
repository_name: str
|
|
223
|
+
attribute: str
|
|
318
224
|
|
|
319
225
|
|
|
320
226
|
@whitelist_for_serdes
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
("defs_state_info", Optional[DefsStateInfo]),
|
|
333
|
-
],
|
|
334
|
-
)
|
|
335
|
-
):
|
|
227
|
+
@record_custom
|
|
228
|
+
class ListRepositoriesResponse(IHaveNew):
|
|
229
|
+
repository_symbols: Sequence[LoadableRepositorySymbol]
|
|
230
|
+
executable_path: Optional[str]
|
|
231
|
+
repository_code_pointer_dict: Mapping[str, CodePointer]
|
|
232
|
+
entry_point: Optional[Sequence[str]]
|
|
233
|
+
container_image: Optional[str]
|
|
234
|
+
container_context: Optional[Mapping[str, Any]]
|
|
235
|
+
dagster_library_versions: Optional[Mapping[str, str]]
|
|
236
|
+
defs_state_info: Optional[DefsStateInfo]
|
|
237
|
+
|
|
336
238
|
def __new__(
|
|
337
239
|
cls,
|
|
338
240
|
repository_symbols: Sequence[LoadableRepositorySymbol],
|
|
@@ -340,52 +242,33 @@ class ListRepositoriesResponse(
|
|
|
340
242
|
repository_code_pointer_dict: Optional[Mapping[str, CodePointer]] = None,
|
|
341
243
|
entry_point: Optional[Sequence[str]] = None,
|
|
342
244
|
container_image: Optional[str] = None,
|
|
343
|
-
container_context: Optional[Mapping] = None,
|
|
245
|
+
container_context: Optional[Mapping[str, Any]] = None,
|
|
344
246
|
dagster_library_versions: Optional[Mapping[str, str]] = None,
|
|
345
247
|
defs_state_info: Optional[DefsStateInfo] = None,
|
|
346
248
|
):
|
|
347
249
|
return super().__new__(
|
|
348
250
|
cls,
|
|
349
|
-
repository_symbols=
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
entry_point=(
|
|
360
|
-
check.sequence_param(entry_point, "entry_point", of_type=str)
|
|
361
|
-
if entry_point is not None
|
|
362
|
-
else None
|
|
363
|
-
),
|
|
364
|
-
container_image=check.opt_str_param(container_image, "container_image"),
|
|
365
|
-
container_context=(
|
|
366
|
-
check.dict_param(container_context, "container_context")
|
|
367
|
-
if container_context is not None
|
|
368
|
-
else None
|
|
369
|
-
),
|
|
370
|
-
dagster_library_versions=check.opt_nullable_mapping_param(
|
|
371
|
-
dagster_library_versions, "dagster_library_versions"
|
|
372
|
-
),
|
|
373
|
-
defs_state_info=check.opt_inst_param(defs_state_info, "defs_state_info", DefsStateInfo),
|
|
251
|
+
repository_symbols=repository_symbols,
|
|
252
|
+
executable_path=executable_path,
|
|
253
|
+
repository_code_pointer_dict=(
|
|
254
|
+
repository_code_pointer_dict if repository_code_pointer_dict is not None else {}
|
|
255
|
+
),
|
|
256
|
+
entry_point=entry_point,
|
|
257
|
+
container_image=container_image,
|
|
258
|
+
container_context=container_context,
|
|
259
|
+
dagster_library_versions=dagster_library_versions,
|
|
260
|
+
defs_state_info=defs_state_info,
|
|
374
261
|
)
|
|
375
262
|
|
|
376
263
|
|
|
377
264
|
@whitelist_for_serdes
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
("attribute", Optional[str]),
|
|
386
|
-
],
|
|
387
|
-
)
|
|
388
|
-
):
|
|
265
|
+
@record_custom
|
|
266
|
+
class ListRepositoriesInput(IHaveNew):
|
|
267
|
+
module_name: Optional[str]
|
|
268
|
+
python_file: Optional[str]
|
|
269
|
+
working_directory: Optional[str]
|
|
270
|
+
attribute: Optional[str]
|
|
271
|
+
|
|
389
272
|
def __new__(
|
|
390
273
|
cls,
|
|
391
274
|
module_name: Optional[str],
|
|
@@ -397,47 +280,22 @@ class ListRepositoriesInput(
|
|
|
397
280
|
check.invariant(module_name or python_file, "Must set at least one")
|
|
398
281
|
return super().__new__(
|
|
399
282
|
cls,
|
|
400
|
-
module_name=
|
|
401
|
-
python_file=
|
|
402
|
-
working_directory=
|
|
403
|
-
attribute=
|
|
283
|
+
module_name=module_name,
|
|
284
|
+
python_file=python_file,
|
|
285
|
+
working_directory=working_directory,
|
|
286
|
+
attribute=attribute,
|
|
404
287
|
)
|
|
405
288
|
|
|
406
289
|
|
|
407
290
|
@whitelist_for_serdes
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
("job_name", Optional[str]),
|
|
417
|
-
("instance_ref", Optional[InstanceRef]),
|
|
418
|
-
],
|
|
419
|
-
)
|
|
420
|
-
):
|
|
421
|
-
def __new__(
|
|
422
|
-
cls,
|
|
423
|
-
repository_origin: RemoteRepositoryOrigin,
|
|
424
|
-
partition_set_name: str,
|
|
425
|
-
partition_name: str,
|
|
426
|
-
job_name: Optional[str] = None,
|
|
427
|
-
instance_ref: Optional[InstanceRef] = None,
|
|
428
|
-
):
|
|
429
|
-
return super().__new__(
|
|
430
|
-
cls,
|
|
431
|
-
repository_origin=check.inst_param(
|
|
432
|
-
repository_origin,
|
|
433
|
-
"repository_origin",
|
|
434
|
-
RemoteRepositoryOrigin,
|
|
435
|
-
),
|
|
436
|
-
partition_set_name=check.str_param(partition_set_name, "partition_set_name"),
|
|
437
|
-
job_name=check.opt_str_param(job_name, "job_name"),
|
|
438
|
-
partition_name=check.str_param(partition_name, "partition_name"),
|
|
439
|
-
instance_ref=check.opt_inst_param(instance_ref, "instance_ref", InstanceRef),
|
|
440
|
-
)
|
|
291
|
+
@record
|
|
292
|
+
class PartitionArgs:
|
|
293
|
+
repository_origin: RemoteRepositoryOrigin
|
|
294
|
+
# This is here for backcompat. it's expected to always be f"{job_name}_partition_set".
|
|
295
|
+
partition_set_name: str
|
|
296
|
+
partition_name: str
|
|
297
|
+
job_name: Optional[str] = None
|
|
298
|
+
instance_ref: Optional[InstanceRef] = None
|
|
441
299
|
|
|
442
300
|
def get_job_name(self) -> str:
|
|
443
301
|
if self.job_name:
|
|
@@ -447,35 +305,16 @@ class PartitionArgs(
|
|
|
447
305
|
|
|
448
306
|
|
|
449
307
|
@whitelist_for_serdes
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
# own is enough to specify which PartitionsDefinition to use.
|
|
461
|
-
("job_name", Optional[str]),
|
|
462
|
-
],
|
|
463
|
-
)
|
|
464
|
-
):
|
|
465
|
-
def __new__(
|
|
466
|
-
cls,
|
|
467
|
-
repository_origin: RemoteRepositoryOrigin,
|
|
468
|
-
partition_set_name: str,
|
|
469
|
-
job_name: Optional[str] = None,
|
|
470
|
-
):
|
|
471
|
-
return super().__new__(
|
|
472
|
-
cls,
|
|
473
|
-
repository_origin=check.inst_param(
|
|
474
|
-
repository_origin, "repository_origin", RemoteRepositoryOrigin
|
|
475
|
-
),
|
|
476
|
-
job_name=check.opt_str_param(job_name, "job_name"),
|
|
477
|
-
partition_set_name=check.str_param(partition_set_name, "partition_set_name"),
|
|
478
|
-
)
|
|
308
|
+
@record
|
|
309
|
+
class PartitionNamesArgs:
|
|
310
|
+
repository_origin: RemoteRepositoryOrigin
|
|
311
|
+
# This is here for backcompat. it's expected to always be f"{job_name}_partition_set".
|
|
312
|
+
partition_set_name: str
|
|
313
|
+
# This is introduced in the same release that we're making it possible for an asset job
|
|
314
|
+
# to target assets with different PartitionsDefinitions. Prior user code versions can
|
|
315
|
+
# (and do) safely ignore this parameter, because, in those versions, the job name on its
|
|
316
|
+
# own is enough to specify which PartitionsDefinition to use.
|
|
317
|
+
job_name: Optional[str] = None
|
|
479
318
|
|
|
480
319
|
def get_job_name(self) -> str:
|
|
481
320
|
if self.job_name:
|
|
@@ -485,33 +324,12 @@ class PartitionNamesArgs(
|
|
|
485
324
|
|
|
486
325
|
|
|
487
326
|
@whitelist_for_serdes
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
("partition_names", Sequence[str]),
|
|
495
|
-
("instance_ref", Optional[InstanceRef]),
|
|
496
|
-
],
|
|
497
|
-
)
|
|
498
|
-
):
|
|
499
|
-
def __new__(
|
|
500
|
-
cls,
|
|
501
|
-
repository_origin: RemoteRepositoryOrigin,
|
|
502
|
-
partition_set_name: str,
|
|
503
|
-
partition_names: Sequence[str],
|
|
504
|
-
instance_ref: Optional[InstanceRef] = None,
|
|
505
|
-
):
|
|
506
|
-
return super().__new__(
|
|
507
|
-
cls,
|
|
508
|
-
repository_origin=check.inst_param(
|
|
509
|
-
repository_origin, "repository_origin", RemoteRepositoryOrigin
|
|
510
|
-
),
|
|
511
|
-
partition_set_name=check.str_param(partition_set_name, "partition_set_name"),
|
|
512
|
-
partition_names=check.sequence_param(partition_names, "partition_names", of_type=str),
|
|
513
|
-
instance_ref=check.opt_inst_param(instance_ref, "instance_ref", InstanceRef),
|
|
514
|
-
)
|
|
327
|
+
@record
|
|
328
|
+
class PartitionSetExecutionParamArgs:
|
|
329
|
+
repository_origin: RemoteRepositoryOrigin
|
|
330
|
+
partition_set_name: str
|
|
331
|
+
partition_names: Sequence[str]
|
|
332
|
+
instance_ref: Optional[InstanceRef] = None
|
|
515
333
|
|
|
516
334
|
|
|
517
335
|
@whitelist_for_serdes(
|
|
@@ -523,18 +341,14 @@ class PartitionSetExecutionParamArgs(
|
|
|
523
341
|
# asset_selection previously was erroneously represented as a sequence
|
|
524
342
|
field_serializers={"asset_selection": SetToSequenceFieldSerializer},
|
|
525
343
|
)
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
("include_parent_snapshot", bool),
|
|
535
|
-
],
|
|
536
|
-
)
|
|
537
|
-
):
|
|
344
|
+
@record_custom
|
|
345
|
+
class JobSubsetSnapshotArgs(IHaveNew):
|
|
346
|
+
job_origin: RemoteJobOrigin
|
|
347
|
+
op_selection: Optional[Sequence[str]]
|
|
348
|
+
asset_selection: Optional[AbstractSet[AssetKey]]
|
|
349
|
+
asset_check_selection: Optional[AbstractSet[AssetCheckKey]]
|
|
350
|
+
include_parent_snapshot: bool
|
|
351
|
+
|
|
538
352
|
def __new__(
|
|
539
353
|
cls,
|
|
540
354
|
job_origin: RemoteJobOrigin,
|
|
@@ -545,14 +359,10 @@ class JobSubsetSnapshotArgs(
|
|
|
545
359
|
):
|
|
546
360
|
return super().__new__(
|
|
547
361
|
cls,
|
|
548
|
-
job_origin=
|
|
549
|
-
op_selection=
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
asset_selection=check.opt_nullable_set_param(asset_selection, "asset_selection"),
|
|
553
|
-
asset_check_selection=check.opt_nullable_set_param(
|
|
554
|
-
asset_check_selection, "asset_check_selection"
|
|
555
|
-
),
|
|
362
|
+
job_origin=job_origin,
|
|
363
|
+
op_selection=op_selection,
|
|
364
|
+
asset_selection=asset_selection,
|
|
365
|
+
asset_check_selection=asset_check_selection,
|
|
556
366
|
include_parent_snapshot=(
|
|
557
367
|
include_parent_snapshot if include_parent_snapshot is not None else True
|
|
558
368
|
),
|
|
@@ -561,37 +371,23 @@ class JobSubsetSnapshotArgs(
|
|
|
561
371
|
|
|
562
372
|
# Different storage field name for backcompat
|
|
563
373
|
@whitelist_for_serdes(storage_field_names={"code_location_origin": "repository_location_origin"})
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
|
|
568
|
-
)
|
|
569
|
-
):
|
|
570
|
-
def __new__(cls, code_location_origin: CodeLocationOrigin, notebook_path: str):
|
|
571
|
-
return super().__new__(
|
|
572
|
-
cls,
|
|
573
|
-
code_location_origin=check.inst_param(
|
|
574
|
-
code_location_origin, "code_location_origin", CodeLocationOrigin
|
|
575
|
-
),
|
|
576
|
-
notebook_path=check.str_param(notebook_path, "notebook_path"),
|
|
577
|
-
)
|
|
374
|
+
@record
|
|
375
|
+
class NotebookPathArgs:
|
|
376
|
+
code_location_origin: CodeLocationOrigin
|
|
377
|
+
notebook_path: str
|
|
578
378
|
|
|
579
379
|
|
|
580
380
|
@whitelist_for_serdes
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
("timeout", Optional[int]),
|
|
592
|
-
],
|
|
593
|
-
)
|
|
594
|
-
):
|
|
381
|
+
@record_custom
|
|
382
|
+
class ExternalScheduleExecutionArgs(IHaveNew):
|
|
383
|
+
repository_origin: RemoteRepositoryOrigin
|
|
384
|
+
instance_ref: Optional[InstanceRef]
|
|
385
|
+
schedule_name: str
|
|
386
|
+
scheduled_execution_timestamp: Optional[float]
|
|
387
|
+
scheduled_execution_timezone: Optional[str]
|
|
388
|
+
log_key: Sequence[str]
|
|
389
|
+
timeout: Optional[int]
|
|
390
|
+
|
|
595
391
|
def __new__(
|
|
596
392
|
cls,
|
|
597
393
|
repository_origin: RemoteRepositoryOrigin,
|
|
@@ -604,42 +400,31 @@ class ExternalScheduleExecutionArgs(
|
|
|
604
400
|
):
|
|
605
401
|
return super().__new__(
|
|
606
402
|
cls,
|
|
607
|
-
repository_origin=
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
),
|
|
615
|
-
scheduled_execution_timezone=check.opt_str_param(
|
|
616
|
-
scheduled_execution_timezone,
|
|
617
|
-
"scheduled_execution_timezone",
|
|
618
|
-
),
|
|
619
|
-
log_key=check.opt_list_param(log_key, "log_key", of_type=str),
|
|
620
|
-
timeout=check.opt_int_param(timeout, "timeout"),
|
|
403
|
+
repository_origin=repository_origin,
|
|
404
|
+
instance_ref=instance_ref,
|
|
405
|
+
schedule_name=schedule_name,
|
|
406
|
+
scheduled_execution_timestamp=scheduled_execution_timestamp,
|
|
407
|
+
scheduled_execution_timezone=scheduled_execution_timezone,
|
|
408
|
+
log_key=log_key if log_key is not None else [],
|
|
409
|
+
timeout=timeout,
|
|
621
410
|
)
|
|
622
411
|
|
|
623
412
|
|
|
624
413
|
@whitelist_for_serdes
|
|
625
|
-
|
|
626
|
-
|
|
627
|
-
|
|
628
|
-
|
|
629
|
-
|
|
630
|
-
|
|
631
|
-
|
|
632
|
-
|
|
633
|
-
|
|
634
|
-
|
|
635
|
-
|
|
636
|
-
|
|
637
|
-
|
|
638
|
-
|
|
639
|
-
("last_completion_time", Optional[float]),
|
|
640
|
-
],
|
|
641
|
-
)
|
|
642
|
-
):
|
|
414
|
+
@record_custom
|
|
415
|
+
class SensorExecutionArgs(IHaveNew):
|
|
416
|
+
repository_origin: RemoteRepositoryOrigin
|
|
417
|
+
instance_ref: Optional[InstanceRef]
|
|
418
|
+
sensor_name: str
|
|
419
|
+
last_tick_completion_time: Optional[float]
|
|
420
|
+
last_run_key: Optional[str]
|
|
421
|
+
cursor: Optional[str]
|
|
422
|
+
log_key: Sequence[str]
|
|
423
|
+
timeout: Optional[int]
|
|
424
|
+
last_sensor_start_time: Optional[float]
|
|
425
|
+
# deprecated
|
|
426
|
+
last_completion_time: Optional[float]
|
|
427
|
+
|
|
643
428
|
def __new__(
|
|
644
429
|
cls,
|
|
645
430
|
repository_origin: RemoteRepositoryOrigin,
|
|
@@ -663,192 +448,83 @@ class SensorExecutionArgs(
|
|
|
663
448
|
)
|
|
664
449
|
return super().__new__(
|
|
665
450
|
cls,
|
|
666
|
-
repository_origin=
|
|
667
|
-
|
|
668
|
-
|
|
669
|
-
instance_ref=check.opt_inst_param(instance_ref, "instance_ref", InstanceRef),
|
|
670
|
-
sensor_name=check.str_param(sensor_name, "sensor_name"),
|
|
451
|
+
repository_origin=repository_origin,
|
|
452
|
+
instance_ref=instance_ref,
|
|
453
|
+
sensor_name=sensor_name,
|
|
671
454
|
last_tick_completion_time=normalized_last_tick_completion_time,
|
|
672
|
-
last_run_key=
|
|
673
|
-
cursor=
|
|
674
|
-
log_key=
|
|
455
|
+
last_run_key=last_run_key,
|
|
456
|
+
cursor=cursor,
|
|
457
|
+
log_key=log_key if log_key is not None else [],
|
|
675
458
|
timeout=timeout,
|
|
676
|
-
last_sensor_start_time=
|
|
677
|
-
last_sensor_start_time, "last_sensor_start_time"
|
|
678
|
-
),
|
|
459
|
+
last_sensor_start_time=last_sensor_start_time,
|
|
679
460
|
last_completion_time=normalized_last_tick_completion_time,
|
|
680
461
|
)
|
|
681
462
|
|
|
682
463
|
def with_default_timeout(self, timeout: int) -> "SensorExecutionArgs":
|
|
683
464
|
"""If the timeout is not explicitly set, provides a default timeout which is used for the sensor execution."""
|
|
684
465
|
if self.timeout is None:
|
|
685
|
-
return self
|
|
466
|
+
return copy(self, timeout=timeout)
|
|
686
467
|
return self
|
|
687
468
|
|
|
688
469
|
|
|
689
470
|
@whitelist_for_serdes
|
|
690
|
-
|
|
691
|
-
|
|
692
|
-
|
|
693
|
-
|
|
694
|
-
|
|
695
|
-
("instance_ref", InstanceRef),
|
|
696
|
-
("name", str),
|
|
697
|
-
],
|
|
698
|
-
)
|
|
699
|
-
):
|
|
700
|
-
def __new__(
|
|
701
|
-
cls, repository_origin: RemoteRepositoryOrigin, instance_ref: InstanceRef, name: str
|
|
702
|
-
):
|
|
703
|
-
return super().__new__(
|
|
704
|
-
cls,
|
|
705
|
-
repository_origin=check.inst_param(
|
|
706
|
-
repository_origin, "repository_origin", RemoteRepositoryOrigin
|
|
707
|
-
),
|
|
708
|
-
instance_ref=check.inst_param(instance_ref, "instance_ref", InstanceRef),
|
|
709
|
-
name=check.str_param(name, "name"),
|
|
710
|
-
)
|
|
471
|
+
@record
|
|
472
|
+
class ExternalJobArgs:
|
|
473
|
+
repository_origin: RemoteRepositoryOrigin
|
|
474
|
+
instance_ref: InstanceRef
|
|
475
|
+
name: str
|
|
711
476
|
|
|
712
477
|
|
|
713
478
|
@whitelist_for_serdes
|
|
714
|
-
|
|
715
|
-
|
|
716
|
-
|
|
717
|
-
|
|
718
|
-
)
|
|
719
|
-
):
|
|
720
|
-
def __new__(cls, success: bool, serializable_error_info: Optional[SerializableErrorInfo]):
|
|
721
|
-
return super().__new__(
|
|
722
|
-
cls,
|
|
723
|
-
success=check.bool_param(success, "success"),
|
|
724
|
-
serializable_error_info=check.opt_inst_param(
|
|
725
|
-
serializable_error_info, "serializable_error_info", SerializableErrorInfo
|
|
726
|
-
),
|
|
727
|
-
)
|
|
479
|
+
@record
|
|
480
|
+
class ShutdownServerResult:
|
|
481
|
+
success: bool
|
|
482
|
+
serializable_error_info: Optional[SerializableErrorInfo]
|
|
728
483
|
|
|
729
484
|
|
|
730
485
|
@whitelist_for_serdes
|
|
731
|
-
|
|
732
|
-
|
|
733
|
-
|
|
734
|
-
cls,
|
|
735
|
-
run_id=check.str_param(run_id, "run_id"),
|
|
736
|
-
)
|
|
486
|
+
@record
|
|
487
|
+
class CancelExecutionRequest:
|
|
488
|
+
run_id: str
|
|
737
489
|
|
|
738
490
|
|
|
739
491
|
@whitelist_for_serdes
|
|
740
|
-
|
|
741
|
-
|
|
742
|
-
|
|
743
|
-
|
|
744
|
-
|
|
745
|
-
("message", Optional[str]),
|
|
746
|
-
("serializable_error_info", Optional[SerializableErrorInfo]),
|
|
747
|
-
],
|
|
748
|
-
)
|
|
749
|
-
):
|
|
750
|
-
def __new__(
|
|
751
|
-
cls,
|
|
752
|
-
success: bool,
|
|
753
|
-
message: Optional[str],
|
|
754
|
-
serializable_error_info: Optional[SerializableErrorInfo],
|
|
755
|
-
):
|
|
756
|
-
return super().__new__(
|
|
757
|
-
cls,
|
|
758
|
-
success=check.bool_param(success, "success"),
|
|
759
|
-
message=check.opt_str_param(message, "message"),
|
|
760
|
-
serializable_error_info=check.opt_inst_param(
|
|
761
|
-
serializable_error_info, "serializable_error_info", SerializableErrorInfo
|
|
762
|
-
),
|
|
763
|
-
)
|
|
492
|
+
@record
|
|
493
|
+
class CancelExecutionResult:
|
|
494
|
+
success: bool
|
|
495
|
+
message: Optional[str]
|
|
496
|
+
serializable_error_info: Optional[SerializableErrorInfo]
|
|
764
497
|
|
|
765
498
|
|
|
766
499
|
@whitelist_for_serdes
|
|
767
|
-
|
|
768
|
-
|
|
769
|
-
|
|
770
|
-
cls,
|
|
771
|
-
run_id=check.str_param(run_id, "run_id"),
|
|
772
|
-
)
|
|
500
|
+
@record
|
|
501
|
+
class CanCancelExecutionRequest:
|
|
502
|
+
run_id: str
|
|
773
503
|
|
|
774
504
|
|
|
775
505
|
@whitelist_for_serdes
|
|
776
|
-
|
|
777
|
-
|
|
778
|
-
|
|
779
|
-
cls,
|
|
780
|
-
can_cancel=check.bool_param(can_cancel, "can_cancel"),
|
|
781
|
-
)
|
|
506
|
+
@record
|
|
507
|
+
class CanCancelExecutionResult:
|
|
508
|
+
can_cancel: bool
|
|
782
509
|
|
|
783
510
|
|
|
784
511
|
@whitelist_for_serdes
|
|
785
|
-
|
|
786
|
-
|
|
787
|
-
|
|
788
|
-
|
|
789
|
-
|
|
790
|
-
("message", Optional[str]),
|
|
791
|
-
("serializable_error_info", Optional[SerializableErrorInfo]),
|
|
792
|
-
],
|
|
793
|
-
)
|
|
794
|
-
):
|
|
795
|
-
def __new__(
|
|
796
|
-
cls,
|
|
797
|
-
success: bool,
|
|
798
|
-
message: Optional[str],
|
|
799
|
-
serializable_error_info: Optional[SerializableErrorInfo],
|
|
800
|
-
):
|
|
801
|
-
return super().__new__(
|
|
802
|
-
cls,
|
|
803
|
-
success=check.bool_param(success, "success"),
|
|
804
|
-
message=check.opt_str_param(message, "message"),
|
|
805
|
-
serializable_error_info=check.opt_inst_param(
|
|
806
|
-
serializable_error_info, "serializable_error_info", SerializableErrorInfo
|
|
807
|
-
),
|
|
808
|
-
)
|
|
512
|
+
@record
|
|
513
|
+
class StartRunResult:
|
|
514
|
+
success: bool
|
|
515
|
+
message: Optional[str]
|
|
516
|
+
serializable_error_info: Optional[SerializableErrorInfo]
|
|
809
517
|
|
|
810
518
|
|
|
811
519
|
@whitelist_for_serdes
|
|
812
|
-
|
|
813
|
-
|
|
814
|
-
|
|
815
|
-
|
|
816
|
-
("current_image", Optional[str]),
|
|
817
|
-
("serializable_error_info", Optional[SerializableErrorInfo]),
|
|
818
|
-
],
|
|
819
|
-
)
|
|
820
|
-
):
|
|
821
|
-
def __new__(
|
|
822
|
-
cls, current_image: Optional[str], serializable_error_info: Optional[SerializableErrorInfo]
|
|
823
|
-
):
|
|
824
|
-
return super().__new__(
|
|
825
|
-
cls,
|
|
826
|
-
current_image=check.opt_str_param(current_image, "current_image"),
|
|
827
|
-
serializable_error_info=check.opt_inst_param(
|
|
828
|
-
serializable_error_info, "serializable_error_info", SerializableErrorInfo
|
|
829
|
-
),
|
|
830
|
-
)
|
|
520
|
+
@record
|
|
521
|
+
class GetCurrentImageResult:
|
|
522
|
+
current_image: Optional[str]
|
|
523
|
+
serializable_error_info: Optional[SerializableErrorInfo]
|
|
831
524
|
|
|
832
525
|
|
|
833
526
|
@whitelist_for_serdes
|
|
834
|
-
|
|
835
|
-
|
|
836
|
-
|
|
837
|
-
|
|
838
|
-
("current_runs", Sequence[str]),
|
|
839
|
-
("serializable_error_info", Optional[SerializableErrorInfo]),
|
|
840
|
-
],
|
|
841
|
-
)
|
|
842
|
-
):
|
|
843
|
-
def __new__(
|
|
844
|
-
cls,
|
|
845
|
-
current_runs: Sequence[str],
|
|
846
|
-
serializable_error_info: Optional[SerializableErrorInfo],
|
|
847
|
-
):
|
|
848
|
-
return super().__new__(
|
|
849
|
-
cls,
|
|
850
|
-
current_runs=check.list_param(current_runs, "current_runs", of_type=str),
|
|
851
|
-
serializable_error_info=check.opt_inst_param(
|
|
852
|
-
serializable_error_info, "serializable_error_info", SerializableErrorInfo
|
|
853
|
-
),
|
|
854
|
-
)
|
|
527
|
+
@record
|
|
528
|
+
class GetCurrentRunsResult:
|
|
529
|
+
current_runs: Sequence[str]
|
|
530
|
+
serializable_error_info: Optional[SerializableErrorInfo]
|