dagster 1.12.2__py3-none-any.whl → 1.12.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. dagster/_config/pythonic_config/conversion_utils.py +2 -1
  2. dagster/_config/pythonic_config/resource.py +4 -2
  3. dagster/_config/source.py +17 -6
  4. dagster/_core/definitions/asset_daemon_cursor.py +4 -3
  5. dagster/_core/definitions/asset_sensor_definition.py +61 -1
  6. dagster/_core/definitions/assets/graph/remote_asset_graph.py +14 -11
  7. dagster/_core/definitions/automation_condition_sensor_definition.py +59 -2
  8. dagster/_core/definitions/declarative_automation/automation_condition.py +40 -6
  9. dagster/_core/definitions/declarative_automation/automation_context.py +8 -2
  10. dagster/_core/definitions/declarative_automation/legacy/legacy_context.py +10 -4
  11. dagster/_core/definitions/declarative_automation/legacy/rule_condition.py +8 -2
  12. dagster/_core/definitions/declarative_automation/operators/check_operators.py +18 -4
  13. dagster/_core/definitions/declarative_automation/operators/dep_operators.py +18 -4
  14. dagster/_core/definitions/declarative_automation/operators/newly_true_operator.py +27 -1
  15. dagster/_core/definitions/declarative_automation/operators/since_operator.py +27 -1
  16. dagster/_core/definitions/metadata/metadata_value.py +4 -3
  17. dagster/_core/definitions/multi_asset_sensor_definition.py +64 -2
  18. dagster/_core/definitions/op_definition.py +10 -2
  19. dagster/_core/definitions/reconstruct.py +0 -6
  20. dagster/_core/definitions/run_status_sensor_definition.py +79 -1
  21. dagster/_core/definitions/selector.py +4 -0
  22. dagster/_core/definitions/sensor_definition.py +32 -21
  23. dagster/_core/execution/backfill.py +29 -4
  24. dagster/_core/execution/context/output.py +26 -26
  25. dagster/_core/execution/plan/objects.py +3 -1
  26. dagster/_core/remote_representation/code_location.py +11 -13
  27. dagster/_core/remote_representation/handle.py +4 -2
  28. dagster/_core/workspace/context.py +9 -3
  29. dagster/_core/workspace/workspace.py +6 -0
  30. dagster/_daemon/asset_daemon.py +56 -11
  31. dagster/_daemon/sensor.py +11 -3
  32. dagster/_utils/error.py +1 -1
  33. dagster/components/component/component.py +31 -7
  34. dagster/components/core/component_tree.py +41 -28
  35. dagster/components/core/context.py +50 -15
  36. dagster/components/lib/definitions_component/__init__.py +2 -0
  37. dagster/components/lib/executable_component/function_component.py +26 -23
  38. dagster/components/lib/executable_component/python_script_component.py +2 -0
  39. dagster/components/lib/executable_component/uv_run_component.py +2 -0
  40. dagster/components/lib/sql_component/sql_component.py +1 -0
  41. dagster/components/list/list.py +1 -1
  42. dagster/components/resolved/context.py +15 -36
  43. dagster/components/resolved/scopes.py +161 -0
  44. dagster/components/testing/__init__.py +1 -0
  45. dagster/components/testing/utils.py +19 -18
  46. dagster/version.py +1 -1
  47. {dagster-1.12.2.dist-info → dagster-1.12.4.dist-info}/METADATA +3 -3
  48. {dagster-1.12.2.dist-info → dagster-1.12.4.dist-info}/RECORD +52 -51
  49. {dagster-1.12.2.dist-info → dagster-1.12.4.dist-info}/WHEEL +0 -0
  50. {dagster-1.12.2.dist-info → dagster-1.12.4.dist-info}/entry_points.txt +0 -0
  51. {dagster-1.12.2.dist-info → dagster-1.12.4.dist-info}/licenses/LICENSE +0 -0
  52. {dagster-1.12.2.dist-info → dagster-1.12.4.dist-info}/top_level.txt +0 -0
@@ -8,7 +8,7 @@ from typing_extensions import Self
8
8
 
9
9
  import dagster._check as check
10
10
  from dagster._annotations import public
11
- from dagster._core.definitions.asset_key import T_EntityKey
11
+ from dagster._core.definitions.asset_key import EntityKey, T_EntityKey
12
12
  from dagster._core.definitions.declarative_automation.automation_condition import (
13
13
  AutomationCondition,
14
14
  AutomationResult,
@@ -93,6 +93,32 @@ class SinceCondition(BuiltinAutomationCondition[T_EntityKey]):
93
93
  def children(self) -> Sequence[AutomationCondition[T_EntityKey]]:
94
94
  return [self.trigger_condition, self.reset_condition]
95
95
 
96
+ def get_node_unique_id(
97
+ self,
98
+ *,
99
+ parent_unique_id: Optional[str],
100
+ index: Optional[int],
101
+ target_key: Optional[EntityKey],
102
+ ) -> str:
103
+ # since conditions should have stable cursoring logic regardless of where they
104
+ # exist in the broader condition tree, as they're always evaluated over the entire
105
+ # subset
106
+ return self._get_stable_unique_id(target_key)
107
+
108
+ def get_backcompat_node_unique_ids(
109
+ self,
110
+ *,
111
+ parent_unique_id: Optional[str] = None,
112
+ index: Optional[int] = None,
113
+ target_key: Optional[EntityKey] = None,
114
+ ) -> Sequence[str]:
115
+ return [
116
+ # get the standard globally-aware unique id for backcompat purposes
117
+ super().get_node_unique_id(
118
+ parent_unique_id=parent_unique_id, index=index, target_key=target_key
119
+ )
120
+ ]
121
+
96
122
  async def evaluate( # pyright: ignore[reportIncompatibleMethodOverride]
97
123
  self, context: AutomationContext[T_EntityKey]
98
124
  ) -> AutomationResult[T_EntityKey]:
@@ -1,10 +1,10 @@
1
+ import json
1
2
  from abc import ABC, abstractmethod
2
3
  from collections.abc import Callable, Mapping, Sequence
3
4
  from datetime import datetime
4
5
  from os import PathLike
5
6
  from typing import Any, Generic, Optional, Union
6
7
 
7
- import dagster_shared.seven as seven
8
8
  from dagster_shared.record import IHaveNew, LegacyNamedTupleMixin, record, record_custom
9
9
  from dagster_shared.serdes.serdes import (
10
10
  FieldSerializer,
@@ -668,8 +668,9 @@ class JsonMetadataValue(
668
668
 
669
669
  def __new__(cls, data: Optional[Union[Sequence[Any], Mapping[str, Any]]]):
670
670
  try:
671
- # check that the value is JSON serializable
672
- seven.dumps(data)
671
+ # check that the value is JSON serializable (and do any transformation
672
+ # that json.dumps would do under the hood, like enums to string values)
673
+ data = json.loads(json.dumps(data))
673
674
  except TypeError:
674
675
  raise DagsterInvalidMetadata("Value is not JSON serializable.")
675
676
  return super().__new__(cls, data=data)
@@ -22,6 +22,7 @@ from dagster._core.definitions.sensor_definition import (
22
22
  SensorType,
23
23
  get_context_param_name,
24
24
  get_sensor_context_from_args_or_kwargs,
25
+ resolve_jobs_from_targets_for_with_attributes,
25
26
  validate_and_get_resource_dict,
26
27
  )
27
28
  from dagster._core.definitions.target import ExecutableDefinition
@@ -33,7 +34,7 @@ from dagster._core.errors import (
33
34
  )
34
35
  from dagster._core.instance import DagsterInstance
35
36
  from dagster._core.instance.ref import InstanceRef
36
- from dagster._utils import normalize_to_repository
37
+ from dagster._utils import IHasInternalInit, normalize_to_repository
37
38
  from dagster._utils.warnings import deprecation_warning, normalize_renamed_param
38
39
 
39
40
  if TYPE_CHECKING:
@@ -1103,7 +1104,7 @@ MultiAssetMaterializationFunction = Callable[
1103
1104
  "multi_asset_sensors may be used."
1104
1105
  )
1105
1106
  @public
1106
- class MultiAssetSensorDefinition(SensorDefinition):
1107
+ class MultiAssetSensorDefinition(SensorDefinition, IHasInternalInit):
1107
1108
  """Define an asset sensor that initiates a set of runs based on the materialization of a list of
1108
1109
  assets.
1109
1110
 
@@ -1245,6 +1246,9 @@ class MultiAssetSensorDefinition(SensorDefinition):
1245
1246
  return _fn
1246
1247
 
1247
1248
  self._raw_asset_materialization_fn = asset_materialization_fn
1249
+ self._monitored_assets = monitored_assets
1250
+ self._job_name = job_name
1251
+ self._raw_required_resource_keys = combined_required_resource_keys
1248
1252
 
1249
1253
  super().__init__(
1250
1254
  name=check_valid_name(name),
@@ -1288,3 +1292,61 @@ class MultiAssetSensorDefinition(SensorDefinition):
1288
1292
  @property
1289
1293
  def sensor_type(self) -> SensorType:
1290
1294
  return SensorType.MULTI_ASSET
1295
+
1296
+ @staticmethod
1297
+ def dagster_internal_init( # type: ignore
1298
+ *,
1299
+ name: str,
1300
+ monitored_assets: Union[Sequence[AssetKey], AssetSelection],
1301
+ job_name: Optional[str],
1302
+ asset_materialization_fn: MultiAssetMaterializationFunction,
1303
+ minimum_interval_seconds: Optional[int],
1304
+ description: Optional[str],
1305
+ job: Optional[ExecutableDefinition],
1306
+ jobs: Optional[Sequence[ExecutableDefinition]],
1307
+ default_status: DefaultSensorStatus,
1308
+ request_assets: Optional[AssetSelection],
1309
+ required_resource_keys: Optional[set[str]],
1310
+ tags: Optional[Mapping[str, str]],
1311
+ metadata: Optional[RawMetadataMapping],
1312
+ ) -> "MultiAssetSensorDefinition":
1313
+ return MultiAssetSensorDefinition(
1314
+ name=name,
1315
+ monitored_assets=monitored_assets,
1316
+ job_name=job_name,
1317
+ asset_materialization_fn=asset_materialization_fn,
1318
+ minimum_interval_seconds=minimum_interval_seconds,
1319
+ description=description,
1320
+ job=job,
1321
+ jobs=jobs,
1322
+ default_status=default_status,
1323
+ request_assets=request_assets,
1324
+ required_resource_keys=required_resource_keys,
1325
+ tags=tags,
1326
+ metadata=metadata,
1327
+ )
1328
+
1329
+ def with_attributes(
1330
+ self,
1331
+ *,
1332
+ jobs: Optional[Sequence[ExecutableDefinition]] = None,
1333
+ metadata: Optional[RawMetadataMapping] = None,
1334
+ ) -> "MultiAssetSensorDefinition":
1335
+ """Returns a copy of this sensor with the attributes replaced."""
1336
+ job_name, new_job, new_jobs = resolve_jobs_from_targets_for_with_attributes(self, jobs)
1337
+
1338
+ return MultiAssetSensorDefinition.dagster_internal_init(
1339
+ name=self.name,
1340
+ monitored_assets=self._monitored_assets,
1341
+ job_name=job_name,
1342
+ asset_materialization_fn=self._raw_asset_materialization_fn,
1343
+ minimum_interval_seconds=self.minimum_interval_seconds,
1344
+ description=self.description,
1345
+ job=new_job,
1346
+ jobs=new_jobs,
1347
+ default_status=self.default_status,
1348
+ request_assets=self.asset_selection,
1349
+ required_resource_keys=self._raw_required_resource_keys,
1350
+ tags=self._tags,
1351
+ metadata=metadata if metadata is not None else self._metadata,
1352
+ )
@@ -1,4 +1,5 @@
1
1
  import inspect
2
+ import re
2
3
  from collections.abc import Iterator, Mapping, Sequence, Set
3
4
  from typing import ( # noqa: UP035
4
5
  TYPE_CHECKING,
@@ -36,7 +37,7 @@ from dagster._core.definitions.resource_requirement import (
36
37
  ResourceRequirement,
37
38
  )
38
39
  from dagster._core.definitions.result import MaterializeResult, ObserveResult
39
- from dagster._core.definitions.utils import DEFAULT_IO_MANAGER_KEY, check_valid_chars
40
+ from dagster._core.definitions.utils import DEFAULT_IO_MANAGER_KEY
40
41
  from dagster._core.errors import (
41
42
  DagsterInvalidDefinitionError,
42
43
  DagsterInvalidInvocationError,
@@ -611,12 +612,19 @@ def _is_result_object_type(ttype):
611
612
  return ttype in (MaterializeResult, ObserveResult, AssetCheckResult)
612
613
 
613
614
 
615
+ VALID_POOL_NAME_REGEX_STR = r"^[A-Za-z0-9_\/]+$" # standard name regex with slashes
616
+ VALID_POOL_NAME_REGEX = re.compile(VALID_POOL_NAME_REGEX_STR)
617
+
618
+
614
619
  def _validate_pool(pool, tags):
615
620
  check.opt_str_param(pool, "pool")
616
621
  if not pool:
617
622
  return None
618
623
 
619
- check_valid_chars(pool)
624
+ if not VALID_POOL_NAME_REGEX.match(pool):
625
+ raise DagsterInvalidDefinitionError(
626
+ f'Pool "{pool}" is not a valid pool name. It must match the regex {VALID_POOL_NAME_REGEX_STR}.'
627
+ )
620
628
 
621
629
  tags = check.opt_mapping_param(tags, "tags")
622
630
  tag_concurrency_key = tags.get(GLOBAL_CONCURRENCY_TAG)
@@ -154,9 +154,6 @@ class ReconstructableRepository(
154
154
  container_context=self.container_context,
155
155
  )
156
156
 
157
- def get_python_origin_id(self) -> str:
158
- return self.get_python_origin().get_id()
159
-
160
157
  # Allow this to be hashed for use in `lru_cache`. This is needed because:
161
158
  # - `ReconstructableJob` uses `lru_cache`
162
159
  # - `ReconstructableJob` has a `ReconstructableRepository` attribute
@@ -313,9 +310,6 @@ class ReconstructableJob( # pyright: ignore[reportIncompatibleVariableOverride]
313
310
  def get_python_origin(self) -> JobPythonOrigin:
314
311
  return JobPythonOrigin(self.job_name, self.repository.get_python_origin())
315
312
 
316
- def get_python_origin_id(self) -> str:
317
- return self.get_python_origin().get_id()
318
-
319
313
  def get_module(self) -> Optional[str]:
320
314
  """Return the module the job is found in, the origin is a module code pointer."""
321
315
  pointer = self.get_python_origin().get_repo_pointer()
@@ -30,6 +30,7 @@ from dagster._core.definitions.sensor_definition import (
30
30
  SkipReason,
31
31
  get_context_param_name,
32
32
  get_or_create_sensor_context,
33
+ resolve_jobs_from_targets_for_with_attributes,
33
34
  validate_and_get_resource_dict,
34
35
  )
35
36
  from dagster._core.definitions.target import ExecutableDefinition
@@ -46,6 +47,7 @@ from dagster._core.instance import DagsterInstance
46
47
  from dagster._core.storage.dagster_run import DagsterRun, DagsterRunStatus, RunsFilter
47
48
  from dagster._serdes import serialize_value, whitelist_for_serdes
48
49
  from dagster._time import datetime_from_timestamp, parse_time_string
50
+ from dagster._utils import IHasInternalInit
49
51
  from dagster._utils.error import serializable_error_info_from_exc_info
50
52
  from dagster._utils.warnings import normalize_renamed_param
51
53
 
@@ -583,7 +585,7 @@ def run_failure_sensor(
583
585
 
584
586
 
585
587
  @public
586
- class RunStatusSensorDefinition(SensorDefinition):
588
+ class RunStatusSensorDefinition(SensorDefinition, IHasInternalInit):
587
589
  """Define a sensor that reacts to a given status of job execution, where the decorated
588
590
  function will be evaluated when a run is at the given status.
589
591
 
@@ -685,6 +687,10 @@ class RunStatusSensorDefinition(SensorDefinition):
685
687
  self._run_status_sensor_fn = check.callable_param(
686
688
  run_status_sensor_fn, "run_status_sensor_fn"
687
689
  )
690
+ self._run_status = run_status
691
+ self._monitored_jobs = monitored_jobs
692
+ self._monitor_all_code_locations = monitor_all_code_locations
693
+ self._raw_required_resource_keys = combined_required_resource_keys
688
694
  event_type = PIPELINE_RUN_STATUS_TO_EVENT_TYPE[run_status]
689
695
 
690
696
  # split monitored_jobs into external repos, external jobs, and jobs in the current repo
@@ -1024,6 +1030,78 @@ class RunStatusSensorDefinition(SensorDefinition):
1024
1030
  def sensor_type(self) -> SensorType:
1025
1031
  return SensorType.RUN_STATUS
1026
1032
 
1033
+ @staticmethod
1034
+ def dagster_internal_init( # type: ignore
1035
+ *,
1036
+ name: str,
1037
+ run_status: DagsterRunStatus,
1038
+ run_status_sensor_fn: RunStatusSensorEvaluationFunction,
1039
+ minimum_interval_seconds: Optional[int],
1040
+ description: Optional[str],
1041
+ monitored_jobs: Optional[
1042
+ Sequence[
1043
+ Union[
1044
+ JobDefinition,
1045
+ GraphDefinition,
1046
+ UnresolvedAssetJobDefinition,
1047
+ "RepositorySelector",
1048
+ "JobSelector",
1049
+ "CodeLocationSelector",
1050
+ ]
1051
+ ]
1052
+ ],
1053
+ monitor_all_code_locations: Optional[bool],
1054
+ default_status: DefaultSensorStatus,
1055
+ request_job: Optional[ExecutableDefinition],
1056
+ request_jobs: Optional[Sequence[ExecutableDefinition]],
1057
+ tags: Optional[Mapping[str, str]],
1058
+ metadata: Optional[RawMetadataMapping],
1059
+ required_resource_keys: Optional[set[str]],
1060
+ ) -> "RunStatusSensorDefinition":
1061
+ return RunStatusSensorDefinition(
1062
+ name=name,
1063
+ run_status=run_status,
1064
+ run_status_sensor_fn=run_status_sensor_fn,
1065
+ minimum_interval_seconds=minimum_interval_seconds,
1066
+ description=description,
1067
+ monitored_jobs=monitored_jobs,
1068
+ monitor_all_code_locations=monitor_all_code_locations,
1069
+ default_status=default_status,
1070
+ request_job=request_job,
1071
+ request_jobs=request_jobs,
1072
+ tags=tags,
1073
+ metadata=metadata,
1074
+ required_resource_keys=required_resource_keys,
1075
+ )
1076
+
1077
+ def with_attributes(
1078
+ self,
1079
+ *,
1080
+ jobs: Optional[Sequence[ExecutableDefinition]] = None,
1081
+ metadata: Optional[RawMetadataMapping] = None,
1082
+ ) -> "RunStatusSensorDefinition":
1083
+ """Returns a copy of this sensor with the attributes replaced."""
1084
+ job_name, new_job, new_jobs = resolve_jobs_from_targets_for_with_attributes(self, jobs)
1085
+
1086
+ # We need to store the run_status and monitored_jobs for reconstruction
1087
+ # Extract monitored_jobs from the wrapped function's closure if possible
1088
+ # For now, we'll need to access the stored attributes
1089
+ return RunStatusSensorDefinition.dagster_internal_init(
1090
+ name=self.name,
1091
+ run_status=self._run_status,
1092
+ run_status_sensor_fn=self._run_status_sensor_fn,
1093
+ minimum_interval_seconds=self.minimum_interval_seconds,
1094
+ description=self.description,
1095
+ monitored_jobs=self._monitored_jobs,
1096
+ monitor_all_code_locations=self._monitor_all_code_locations,
1097
+ default_status=self.default_status,
1098
+ request_job=new_job,
1099
+ request_jobs=new_jobs,
1100
+ tags=self._tags,
1101
+ metadata=metadata if metadata is not None else self._metadata,
1102
+ required_resource_keys=self._raw_required_resource_keys,
1103
+ )
1104
+
1027
1105
 
1028
1106
  @deprecated_param(
1029
1107
  param="job_selection",
@@ -315,6 +315,10 @@ class InstigatorSelector:
315
315
  def get_id(self) -> str:
316
316
  return create_snapshot_id(self)
317
317
 
318
+ @property
319
+ def instigator_name(self) -> str:
320
+ return self.name
321
+
318
322
 
319
323
  @record
320
324
  class GraphSelector:
@@ -554,6 +554,37 @@ def split_run_requests(
554
554
  return run_requests_for_backfill_daemon, run_requests_for_single_runs
555
555
 
556
556
 
557
+ def resolve_jobs_from_targets_for_with_attributes(
558
+ sensor_def: "SensorDefinition", new_jobs: Optional[Sequence[ExecutableDefinition]]
559
+ ) -> tuple[Optional[str], Optional[ExecutableDefinition], Optional[Sequence[ExecutableDefinition]]]:
560
+ """Utility function to resolve job/jobs/job_name parameters for with_attributes method.
561
+
562
+ Returns a tuple of (job_name, job, jobs) to pass to dagster_internal_init.
563
+ """
564
+ if new_jobs is not None:
565
+ new_jobs_seq = new_jobs if len(new_jobs) > 1 else None
566
+ new_job = new_jobs[0] if len(new_jobs) == 1 else None
567
+ job_name = None
568
+ elif sensor_def.has_jobs:
569
+ new_job = sensor_def.job if len(sensor_def.jobs) == 1 else None
570
+ new_jobs_seq = sensor_def.jobs if len(sensor_def.jobs) > 1 else None
571
+ job_name = None
572
+ elif sensor_def._targets: # noqa: SLF001
573
+ check.invariant(
574
+ len(sensor_def._targets) == 1 and not sensor_def._targets[0].has_job_def, # noqa: SLF001
575
+ "Expected only one target by job name string.",
576
+ )
577
+ job_name = sensor_def._targets[0].job_name # noqa: SLF001
578
+ new_job = None
579
+ new_jobs_seq = None
580
+ else:
581
+ job_name = None
582
+ new_job = None
583
+ new_jobs_seq = None
584
+
585
+ return job_name, new_job, new_jobs_seq
586
+
587
+
557
588
  @public
558
589
  @beta_param(param="owners")
559
590
  class SensorDefinition(IHasInternalInit):
@@ -597,27 +628,7 @@ class SensorDefinition(IHasInternalInit):
597
628
  metadata: Optional[RawMetadataMapping] = None,
598
629
  ) -> "SensorDefinition":
599
630
  """Returns a copy of this sensor with the attributes replaced."""
600
- # unfortunate re-derivation of how inputs map to _targets
601
- if jobs is not None:
602
- new_jobs = jobs if len(jobs) > 1 else None
603
- new_job = jobs[0] if len(jobs) == 1 else None
604
- job_name = None
605
- elif self.has_jobs:
606
- new_job = self.job if len(self.jobs) == 1 else None
607
- new_jobs = self.jobs if len(self.jobs) > 1 else None
608
- job_name = None
609
- elif self._targets:
610
- check.invariant(
611
- len(self._targets) == 1 and not self._targets[0].has_job_def,
612
- "Expected only one target by job name string.",
613
- )
614
- job_name = self._targets[0].job_name
615
- new_job = None
616
- new_jobs = None
617
- else:
618
- job_name = None
619
- new_job = None
620
- new_jobs = None
631
+ job_name, new_job, new_jobs = resolve_jobs_from_targets_for_with_attributes(self, jobs)
621
632
 
622
633
  return SensorDefinition.dagster_internal_init(
623
634
  name=self.name,
@@ -26,6 +26,7 @@ from dagster._core.remote_representation.external_data import job_name_for_parti
26
26
  from dagster._core.storage.dagster_run import (
27
27
  CANCELABLE_RUN_STATUSES,
28
28
  NOT_FINISHED_STATUSES,
29
+ DagsterRunStatus,
29
30
  RunsFilter,
30
31
  )
31
32
  from dagster._core.storage.tags import BACKFILL_ID_TAG, USER_TAG
@@ -557,9 +558,11 @@ def cancel_backfill_runs_and_cancellation_complete(
557
558
 
558
559
  while True:
559
560
  # Cancel all cancelable runs for the backfill in batches
561
+
562
+ # start with the queued runs since those will be faster to cancel
560
563
  runs_to_cancel_in_iteration = instance.run_storage.get_runs(
561
564
  filters=RunsFilter(
562
- statuses=CANCELABLE_RUN_STATUSES,
565
+ statuses=[DagsterRunStatus.QUEUED],
563
566
  tags={
564
567
  BACKFILL_ID_TAG: backfill_id,
565
568
  },
@@ -567,16 +570,38 @@ def cancel_backfill_runs_and_cancellation_complete(
567
570
  limit=CANCELABLE_RUNS_BATCH_SIZE,
568
571
  ascending=True,
569
572
  )
573
+
570
574
  if not runs_to_cancel_in_iteration:
571
- break
575
+ # once all queued runs are canceled, cancel all other cancelable runs
576
+ runs_to_cancel_in_iteration = instance.run_storage.get_runs(
577
+ filters=RunsFilter(
578
+ statuses=CANCELABLE_RUN_STATUSES,
579
+ tags={
580
+ BACKFILL_ID_TAG: backfill_id,
581
+ },
582
+ ),
583
+ limit=CANCELABLE_RUNS_BATCH_SIZE,
584
+ ascending=True,
585
+ )
586
+ if not runs_to_cancel_in_iteration:
587
+ break
572
588
 
573
589
  canceled_any_runs = True
574
590
  for run in runs_to_cancel_in_iteration:
575
591
  run_id = run.run_id
576
592
  logger.info(f"Terminating submitted run {run_id}")
577
- # calling cancel_run will immediately set its status to CANCELING or CANCELED,
593
+
594
+ # in both cases this will synchonrously set its status to CANCELING or CANCELED,
578
595
  # ensuring that it will not be returned in the next loop
579
- instance.run_coordinator.cancel_run(run_id)
596
+
597
+ if run.status == DagsterRunStatus.QUEUED:
598
+ instance.report_run_canceling(
599
+ run,
600
+ message="Canceling run from the queue.",
601
+ )
602
+ instance.report_run_canceled(run)
603
+ else:
604
+ instance.run_launcher.terminate(run_id)
580
605
 
581
606
  if canceled_any_runs:
582
607
  # since we are canceling some runs in this iteration, we know that there is more work to do.
@@ -236,8 +236,8 @@ class OutputContext:
236
236
  """A dict of the metadata that is assigned to the output at execution time."""
237
237
  if self._warn_on_step_context_use:
238
238
  warnings.warn(
239
- "You are using InputContext.upstream_output.output_metadata."
240
- "Output metadata is not available when accessed from the InputContext."
239
+ "You are using InputContext.upstream_output.output_metadata. "
240
+ "Output metadata is not available when accessed from the InputContext. "
241
241
  "https://github.com/dagster-io/dagster/issues/20094"
242
242
  )
243
243
  return {}
@@ -369,9 +369,9 @@ class OutputContext:
369
369
  def step_context(self) -> "StepExecutionContext":
370
370
  if self._warn_on_step_context_use:
371
371
  warnings.warn(
372
- "You are using InputContext.upstream_output.step_context"
373
- "This use on upstream_output is deprecated and will fail in the future"
374
- "Try to obtain what you need directly from InputContext"
372
+ "You are using InputContext.upstream_output.step_context. "
373
+ "This use on upstream_output is deprecated and will fail in the future. "
374
+ "Try to obtain what you need directly from InputContext. "
375
375
  "For more details: https://github.com/dagster-io/dagster/issues/7900"
376
376
  )
377
377
 
@@ -389,9 +389,9 @@ class OutputContext:
389
389
  """Whether the current run is a partitioned run."""
390
390
  if self._warn_on_step_context_use:
391
391
  warnings.warn(
392
- "You are using InputContext.upstream_output.has_partition_key"
393
- "This use on upstream_output is deprecated and will fail in the future"
394
- "Try to obtain what you need directly from InputContext"
392
+ "You are using InputContext.upstream_output.has_partition_key. "
393
+ "This use on upstream_output is deprecated and will fail in the future. "
394
+ "Try to obtain what you need directly from InputContext. "
395
395
  "For more details: https://github.com/dagster-io/dagster/issues/7900"
396
396
  )
397
397
 
@@ -406,9 +406,9 @@ class OutputContext:
406
406
  """
407
407
  if self._warn_on_step_context_use:
408
408
  warnings.warn(
409
- "You are using InputContext.upstream_output.partition_key"
410
- "This use on upstream_output is deprecated and will fail in the future"
411
- "Try to obtain what you need directly from InputContext"
409
+ "You are using InputContext.upstream_output.partition_key. "
410
+ "This use on upstream_output is deprecated and will fail in the future. "
411
+ "Try to obtain what you need directly from InputContext. "
412
412
  "For more details: https://github.com/dagster-io/dagster/issues/7900"
413
413
  )
414
414
 
@@ -425,9 +425,9 @@ class OutputContext:
425
425
  """Returns True if the asset being stored is partitioned."""
426
426
  if self._warn_on_step_context_use:
427
427
  warnings.warn(
428
- "You are using InputContext.upstream_output.has_asset_partitions"
429
- "This use on upstream_output is deprecated and will fail in the future"
430
- "Try to obtain what you need directly from InputContext"
428
+ "You are using InputContext.upstream_output.has_asset_partitions. "
429
+ "This use on upstream_output is deprecated and will fail in the future. "
430
+ "Try to obtain what you need directly from InputContext. "
431
431
  "For more details: https://github.com/dagster-io/dagster/issues/7900"
432
432
  )
433
433
 
@@ -446,9 +446,9 @@ class OutputContext:
446
446
  """
447
447
  if self._warn_on_step_context_use:
448
448
  warnings.warn(
449
- "You are using InputContext.upstream_output.asset_partition_key"
450
- "This use on upstream_output is deprecated and will fail in the future"
451
- "Try to obtain what you need directly from InputContext"
449
+ "You are using InputContext.upstream_output.asset_partition_key. "
450
+ "This use on upstream_output is deprecated and will fail in the future. "
451
+ "Try to obtain what you need directly from InputContext. "
452
452
  "For more details: https://github.com/dagster-io/dagster/issues/7900"
453
453
  )
454
454
 
@@ -463,9 +463,9 @@ class OutputContext:
463
463
  """
464
464
  if self._warn_on_step_context_use:
465
465
  warnings.warn(
466
- "You are using InputContext.upstream_output.asset_partition_key_range"
467
- "This use on upstream_output is deprecated and will fail in the future"
468
- "Try to obtain what you need directly from InputContext"
466
+ "You are using InputContext.upstream_output.asset_partition_key_range. "
467
+ "This use on upstream_output is deprecated and will fail in the future. "
468
+ "Try to obtain what you need directly from InputContext. "
469
469
  "For more details: https://github.com/dagster-io/dagster/issues/7900"
470
470
  )
471
471
 
@@ -480,9 +480,9 @@ class OutputContext:
480
480
  """
481
481
  if self._warn_on_step_context_use:
482
482
  warnings.warn(
483
- "You are using InputContext.upstream_output.asset_partition_keys"
484
- "This use on upstream_output is deprecated and will fail in the future"
485
- "Try to obtain what you need directly from InputContext"
483
+ "You are using InputContext.upstream_output.asset_partition_keys. "
484
+ "This use on upstream_output is deprecated and will fail in the future. "
485
+ "Try to obtain what you need directly from InputContext. "
486
486
  "For more details: https://github.com/dagster-io/dagster/issues/7900"
487
487
  )
488
488
 
@@ -503,9 +503,9 @@ class OutputContext:
503
503
  """
504
504
  if self._warn_on_step_context_use:
505
505
  warnings.warn(
506
- "You are using InputContext.upstream_output.asset_partitions_time_window"
507
- "This use on upstream_output is deprecated and will fail in the future"
508
- "Try to obtain what you need directly from InputContext"
506
+ "You are using InputContext.upstream_output.asset_partitions_time_window. "
507
+ "This use on upstream_output is deprecated and will fail in the future. "
508
+ "Try to obtain what you need directly from InputContext. "
509
509
  "For more details: https://github.com/dagster-io/dagster/issues/7900"
510
510
  )
511
511
 
@@ -128,7 +128,9 @@ class StepFailureData(
128
128
  "User code error is missing cause. User code errors are expected to have a"
129
129
  " causes, which are the errors thrown from user code.",
130
130
  )
131
- return self.error.message.strip() + ":\n\n" + user_code_error.to_string()
131
+ return (
132
+ self.error.message.strip() + ":\n\n" + check.not_none(user_code_error).to_string()
133
+ )
132
134
  else:
133
135
  return self.error.to_string()
134
136
 
@@ -18,11 +18,7 @@ from dagster._core.definitions.reconstruct import ReconstructableJob, Reconstruc
18
18
  from dagster._core.definitions.repository_definition import RepositoryDefinition
19
19
  from dagster._core.definitions.selector import JobSubsetSelector
20
20
  from dagster._core.definitions.timestamp import TimestampWithTimezone
21
- from dagster._core.errors import (
22
- DagsterInvalidSubsetError,
23
- DagsterInvariantViolationError,
24
- DagsterUserCodeProcessError,
25
- )
21
+ from dagster._core.errors import DagsterInvalidSubsetError, DagsterUserCodeProcessError
26
22
  from dagster._core.execution.api import create_execution_plan
27
23
  from dagster._core.execution.plan.state import KnownExecutionState
28
24
  from dagster._core.instance import DagsterInstance
@@ -318,14 +314,16 @@ class CodeLocation(AbstractContextManager):
318
314
 
319
315
  @property
320
316
  @abstractmethod
321
- def repository_code_pointer_dict(self) -> Mapping[str, CodePointer]:
317
+ def repository_code_pointer_dict(self) -> Mapping[str, Optional[CodePointer]]:
322
318
  pass
323
319
 
324
- def get_repository_python_origin(self, repository_name: str) -> "RepositoryPythonOrigin":
325
- if repository_name not in self.repository_code_pointer_dict:
326
- raise DagsterInvariantViolationError(f"Unable to find repository {repository_name}.")
320
+ def get_repository_python_origin(
321
+ self, repository_name: str
322
+ ) -> Optional["RepositoryPythonOrigin"]:
323
+ code_pointer = self.repository_code_pointer_dict.get(repository_name)
324
+ if not code_pointer:
325
+ return None
327
326
 
328
- code_pointer = self.repository_code_pointer_dict[repository_name]
329
327
  return RepositoryPythonOrigin(
330
328
  executable_path=self.executable_path or sys.executable,
331
329
  code_pointer=code_pointer,
@@ -414,7 +412,7 @@ class InProcessCodeLocation(CodeLocation):
414
412
  return self._origin.entry_point
415
413
 
416
414
  @property
417
- def repository_code_pointer_dict(self) -> Mapping[str, CodePointer]:
415
+ def repository_code_pointer_dict(self) -> Mapping[str, Optional[CodePointer]]:
418
416
  return self._repository_code_pointer_dict
419
417
 
420
418
  def _get_reconstructable_repository(self, repository_name: str) -> ReconstructableRepository:
@@ -801,8 +799,8 @@ class GrpcServerCodeLocation(CodeLocation):
801
799
  return self._container_context
802
800
 
803
801
  @property
804
- def repository_code_pointer_dict(self) -> Mapping[str, CodePointer]:
805
- return cast("Mapping[str, CodePointer]", self._repository_code_pointer_dict)
802
+ def repository_code_pointer_dict(self) -> Mapping[str, Optional[CodePointer]]:
803
+ return cast("Mapping[str, Optional[CodePointer]]", self._repository_code_pointer_dict)
806
804
 
807
805
  @property
808
806
  def executable_path(self) -> Optional[str]: