dagster 1.12.2__py3-none-any.whl → 1.12.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. dagster/_core/definitions/assets/graph/remote_asset_graph.py +14 -11
  2. dagster/_core/definitions/declarative_automation/automation_condition.py +40 -6
  3. dagster/_core/definitions/declarative_automation/automation_context.py +8 -2
  4. dagster/_core/definitions/declarative_automation/legacy/legacy_context.py +10 -4
  5. dagster/_core/definitions/declarative_automation/legacy/rule_condition.py +8 -2
  6. dagster/_core/definitions/declarative_automation/operators/check_operators.py +18 -4
  7. dagster/_core/definitions/declarative_automation/operators/dep_operators.py +18 -4
  8. dagster/_core/definitions/declarative_automation/operators/newly_true_operator.py +27 -1
  9. dagster/_core/definitions/declarative_automation/operators/since_operator.py +27 -1
  10. dagster/_core/definitions/selector.py +4 -0
  11. dagster/_core/execution/backfill.py +29 -4
  12. dagster/_core/execution/context/output.py +26 -26
  13. dagster/_core/workspace/context.py +7 -3
  14. dagster/_daemon/asset_daemon.py +14 -2
  15. dagster/_daemon/sensor.py +11 -3
  16. dagster/components/component/component.py +21 -2
  17. dagster/components/core/component_tree.py +41 -28
  18. dagster/components/core/context.py +50 -15
  19. dagster/components/lib/definitions_component/__init__.py +2 -0
  20. dagster/components/lib/executable_component/function_component.py +26 -23
  21. dagster/components/lib/executable_component/python_script_component.py +2 -0
  22. dagster/components/lib/executable_component/uv_run_component.py +2 -0
  23. dagster/components/lib/sql_component/sql_component.py +1 -0
  24. dagster/components/list/list.py +1 -1
  25. dagster/components/resolved/context.py +15 -36
  26. dagster/components/resolved/scopes.py +161 -0
  27. dagster/components/testing/utils.py +2 -2
  28. dagster/version.py +1 -1
  29. {dagster-1.12.2.dist-info → dagster-1.12.3.dist-info}/METADATA +3 -3
  30. {dagster-1.12.2.dist-info → dagster-1.12.3.dist-info}/RECORD +34 -33
  31. {dagster-1.12.2.dist-info → dagster-1.12.3.dist-info}/WHEEL +0 -0
  32. {dagster-1.12.2.dist-info → dagster-1.12.3.dist-info}/entry_points.txt +0 -0
  33. {dagster-1.12.2.dist-info → dagster-1.12.3.dist-info}/licenses/LICENSE +0 -0
  34. {dagster-1.12.2.dist-info → dagster-1.12.3.dist-info}/top_level.txt +0 -0
@@ -40,9 +40,10 @@ from dagster._core.definitions.freshness_policy import LegacyFreshnessPolicy
40
40
  from dagster._core.definitions.metadata import ArbitraryMetadataMapping
41
41
  from dagster._core.definitions.partitions.definition import PartitionsDefinition
42
42
  from dagster._core.definitions.partitions.mapping import PartitionMapping
43
+ from dagster._core.definitions.selector import ScheduleSelector, SensorSelector
43
44
  from dagster._core.definitions.utils import DEFAULT_GROUP_NAME
44
45
  from dagster._core.remote_representation.external import RemoteRepository
45
- from dagster._core.remote_representation.handle import InstigatorHandle, RepositoryHandle
46
+ from dagster._core.remote_representation.handle import RepositoryHandle
46
47
  from dagster._core.workspace.workspace import CurrentWorkspace
47
48
  from dagster._record import ImportFrom, record
48
49
  from dagster._utils.cached_method import cached_method
@@ -395,31 +396,33 @@ class RemoteWorkspaceAssetNode(RemoteAssetNode):
395
396
  )
396
397
  )
397
398
 
398
- def get_targeting_schedule_handles(
399
+ def get_targeting_schedule_selectors(
399
400
  self,
400
- ) -> Sequence[InstigatorHandle]:
401
+ ) -> Sequence[ScheduleSelector]:
401
402
  selectors = []
402
403
  for node in self.repo_scoped_asset_infos:
403
404
  for schedule_name in node.targeting_schedule_names:
404
405
  selectors.append(
405
- InstigatorHandle(
406
- repository_handle=node.handle,
407
- instigator_name=schedule_name,
406
+ ScheduleSelector(
407
+ location_name=node.handle.location_name,
408
+ repository_name=node.handle.repository_name,
409
+ schedule_name=schedule_name,
408
410
  )
409
411
  )
410
412
 
411
413
  return selectors
412
414
 
413
- def get_targeting_sensor_handles(
415
+ def get_targeting_sensor_selectors(
414
416
  self,
415
- ) -> Sequence[InstigatorHandle]:
417
+ ) -> Sequence[SensorSelector]:
416
418
  selectors = []
417
419
  for node in self.repo_scoped_asset_infos:
418
420
  for sensor_name in node.targeting_sensor_names:
419
421
  selectors.append(
420
- InstigatorHandle(
421
- repository_handle=node.handle,
422
- instigator_name=sensor_name,
422
+ SensorSelector(
423
+ location_name=node.handle.location_name,
424
+ repository_name=node.handle.repository_name,
425
+ sensor_name=sensor_name,
423
426
  )
424
427
  )
425
428
  return selectors
@@ -15,6 +15,7 @@ from dagster._core.definitions.asset_key import (
15
15
  AssetCheckKey,
16
16
  AssetKey,
17
17
  CoercibleToAssetKey,
18
+ EntityKey,
18
19
  T_EntityKey,
19
20
  )
20
21
  from dagster._core.definitions.declarative_automation.serialized_objects import (
@@ -136,7 +137,9 @@ class AutomationCondition(ABC, Generic[T_EntityKey]):
136
137
  self, *, parent_unique_id: Optional[str] = None, index: Optional[int] = None
137
138
  ) -> AutomationConditionSnapshot:
138
139
  """Returns a serializable snapshot of the entire AutomationCondition tree."""
139
- unique_id = self.get_node_unique_id(parent_unique_id=parent_unique_id, index=index)
140
+ unique_id = self.get_node_unique_id(
141
+ parent_unique_id=parent_unique_id, index=index, target_key=None
142
+ )
140
143
  node_snapshot = self.get_node_snapshot(unique_id)
141
144
  children = [
142
145
  child.get_snapshot(parent_unique_id=unique_id, index=i)
@@ -144,12 +147,22 @@ class AutomationCondition(ABC, Generic[T_EntityKey]):
144
147
  ]
145
148
  return AutomationConditionSnapshot(node_snapshot=node_snapshot, children=children)
146
149
 
147
- def get_node_unique_id(self, *, parent_unique_id: Optional[str], index: Optional[int]) -> str:
150
+ def get_node_unique_id(
151
+ self,
152
+ *,
153
+ parent_unique_id: Optional[str],
154
+ index: Optional[int],
155
+ target_key: Optional[EntityKey],
156
+ ) -> str:
148
157
  """Returns a unique identifier for this condition within the broader condition tree."""
149
158
  return non_secure_md5_hash_str(f"{parent_unique_id}{index}{self.name}".encode())
150
159
 
151
160
  def get_backcompat_node_unique_ids(
152
- self, *, parent_unique_id: Optional[str] = None, index: Optional[int] = None
161
+ self,
162
+ *,
163
+ parent_unique_id: Optional[str] = None,
164
+ index: Optional[int] = None,
165
+ target_key: Optional[EntityKey] = None,
153
166
  ) -> Sequence[str]:
154
167
  """Used for backwards compatibility when condition unique id logic changes."""
155
168
  return []
@@ -159,6 +172,7 @@ class AutomationCondition(ABC, Generic[T_EntityKey]):
159
172
  *,
160
173
  parent_unique_ids: Sequence[Optional[str]],
161
174
  child_indices: Sequence[Optional[int]],
175
+ target_key: Optional[EntityKey],
162
176
  ) -> Sequence[str]:
163
177
  unique_ids = []
164
178
  for parent_unique_id in parent_unique_ids:
@@ -166,10 +180,14 @@ class AutomationCondition(ABC, Generic[T_EntityKey]):
166
180
  unique_ids.extend(
167
181
  [
168
182
  self.get_node_unique_id(
169
- parent_unique_id=parent_unique_id, index=child_index
183
+ parent_unique_id=parent_unique_id,
184
+ index=child_index,
185
+ target_key=target_key,
170
186
  ),
171
187
  *self.get_backcompat_node_unique_ids(
172
- parent_unique_id=parent_unique_id, index=child_index
188
+ parent_unique_id=parent_unique_id,
189
+ index=child_index,
190
+ target_key=target_key,
173
191
  ),
174
192
  ]
175
193
  )
@@ -180,7 +198,7 @@ class AutomationCondition(ABC, Generic[T_EntityKey]):
180
198
  ) -> str:
181
199
  """Returns a unique identifier for the entire subtree."""
182
200
  node_unique_id = self.get_node_unique_id(
183
- parent_unique_id=parent_node_unique_id, index=index
201
+ parent_unique_id=parent_node_unique_id, index=index, target_key=None
184
202
  )
185
203
  child_unique_ids = [
186
204
  child.get_unique_id(parent_node_unique_id=node_unique_id, index=i)
@@ -845,6 +863,22 @@ class BuiltinAutomationCondition(AutomationCondition[T_EntityKey]):
845
863
  """Returns a copy of this AutomationCondition with a human-readable label."""
846
864
  return copy(self, label=label)
847
865
 
866
+ def _get_stable_unique_id(self, target_key: Optional[EntityKey]) -> str:
867
+ """Returns an identifier that is stable regardless of where it exists in the broader condition tree.
868
+ This should only be used for conditions that don't change their output based on what conditions are
869
+ evaluated before them (i.e. they explicitly set their candidate subset to the entire subset).
870
+ """
871
+ child_ids = [
872
+ child.get_node_unique_id(
873
+ parent_unique_id=None,
874
+ index=i,
875
+ target_key=target_key,
876
+ )
877
+ for i, child in enumerate(self.children)
878
+ ]
879
+ parts = [self.name, *child_ids, target_key.to_user_string() if target_key else ""]
880
+ return non_secure_md5_hash_str("".join(parts).encode())
881
+
848
882
 
849
883
  @public
850
884
  @hidden_param(param="subsets_with_metadata", breaking_version="", emit_runtime_warning=False)
@@ -73,7 +73,9 @@ class AutomationContext(Generic[T_EntityKey]):
73
73
  condition = check.not_none(
74
74
  evaluator.asset_graph.get(key).automation_condition or evaluator.default_condition
75
75
  )
76
- unique_ids = condition.get_node_unique_ids(parent_unique_ids=[None], child_indices=[None])
76
+ unique_ids = condition.get_node_unique_ids(
77
+ parent_unique_ids=[None], child_indices=[None], target_key=None
78
+ )
77
79
 
78
80
  return AutomationContext(
79
81
  condition=condition,
@@ -101,7 +103,11 @@ class AutomationContext(Generic[T_EntityKey]):
101
103
  check.invariant(len(child_indices) > 0, "Must be at least one child index")
102
104
 
103
105
  unique_ids = child_condition.get_node_unique_ids(
104
- parent_unique_ids=self.condition_unique_ids, child_indices=child_indices
106
+ parent_unique_ids=self.condition_unique_ids,
107
+ child_indices=child_indices,
108
+ target_key=candidate_subset.key
109
+ if candidate_subset.key != self.root_context.key
110
+ else None,
105
111
  )
106
112
  return AutomationContext(
107
113
  condition=child_condition,
@@ -88,7 +88,7 @@ class LegacyRuleEvaluationContext:
88
88
  condition=condition,
89
89
  cursor=cursor,
90
90
  node_cursor=cursor.node_cursors_by_unique_id.get(
91
- condition.get_node_unique_id(parent_unique_id=None, index=0)
91
+ condition.get_node_unique_id(parent_unique_id=None, index=0, target_key=None)
92
92
  )
93
93
  if cursor
94
94
  else None,
@@ -224,17 +224,23 @@ class LegacyRuleEvaluationContext:
224
224
  # Or(MaterializeCond, Not(SkipCond), Not(DiscardCond))
225
225
  if len(self.condition.children) != 3:
226
226
  return None
227
- unique_id = self.condition.get_node_unique_id(parent_unique_id=None, index=None)
227
+ unique_id = self.condition.get_node_unique_id(
228
+ parent_unique_id=None, index=None, target_key=None
229
+ )
228
230
 
229
231
  # get Not(DiscardCond)
230
232
  not_discard_condition = self.condition.children[2]
231
- unique_id = not_discard_condition.get_node_unique_id(parent_unique_id=unique_id, index=2)
233
+ unique_id = not_discard_condition.get_node_unique_id(
234
+ parent_unique_id=unique_id, index=2, target_key=None
235
+ )
232
236
  if not isinstance(not_discard_condition, NotAutomationCondition):
233
237
  return None
234
238
 
235
239
  # get DiscardCond
236
240
  discard_condition = not_discard_condition.children[0]
237
- unique_id = discard_condition.get_node_unique_id(parent_unique_id=unique_id, index=0)
241
+ unique_id = discard_condition.get_node_unique_id(
242
+ parent_unique_id=unique_id, index=0, target_key=None
243
+ )
238
244
  if not isinstance(discard_condition, RuleCondition) or not isinstance(
239
245
  discard_condition.rule, DiscardOnMaxMaterializationsExceededRule
240
246
  ):
@@ -2,7 +2,7 @@ from typing import TYPE_CHECKING, Optional
2
2
 
3
3
  from dagster_shared.serdes import whitelist_for_serdes
4
4
 
5
- from dagster._core.definitions.asset_key import AssetKey
5
+ from dagster._core.definitions.asset_key import AssetKey, EntityKey
6
6
  from dagster._core.definitions.auto_materialize_rule import AutoMaterializeRule
7
7
  from dagster._core.definitions.declarative_automation.automation_condition import (
8
8
  AutomationResult,
@@ -24,7 +24,13 @@ class RuleCondition(BuiltinAutomationCondition[AssetKey]):
24
24
 
25
25
  rule: AutoMaterializeRule
26
26
 
27
- def get_node_unique_id(self, *, parent_unique_id: Optional[str], index: Optional[int]) -> str:
27
+ def get_node_unique_id(
28
+ self,
29
+ *,
30
+ parent_unique_id: Optional[str],
31
+ index: Optional[int],
32
+ target_key: Optional[EntityKey],
33
+ ) -> str:
28
34
  # preserves old (bad) behavior of not including the parent_unique_id to avoid invalidating
29
35
  # old serialized information
30
36
  parts = [self.rule.__class__.__name__, self.description]
@@ -5,7 +5,7 @@ from typing import TYPE_CHECKING, AbstractSet, Any, Optional, Sequence # noqa:
5
5
  from dagster_shared.serdes import whitelist_for_serdes
6
6
 
7
7
  import dagster._check as check
8
- from dagster._core.definitions.asset_key import AssetCheckKey, AssetKey
8
+ from dagster._core.definitions.asset_key import AssetCheckKey, AssetKey, EntityKey
9
9
  from dagster._core.definitions.assets.graph.base_asset_graph import BaseAssetGraph, BaseAssetNode
10
10
  from dagster._core.definitions.declarative_automation.automation_condition import (
11
11
  AutomationCondition,
@@ -56,16 +56,30 @@ class ChecksAutomationCondition(BuiltinAutomationCondition[AssetKey]):
56
56
  def requires_cursor(self) -> bool:
57
57
  return False
58
58
 
59
- def get_node_unique_id(self, *, parent_unique_id: Optional[str], index: Optional[int]) -> str:
59
+ def get_node_unique_id(
60
+ self,
61
+ *,
62
+ parent_unique_id: Optional[str],
63
+ index: Optional[int],
64
+ target_key: Optional[EntityKey],
65
+ ) -> str:
60
66
  """Ignore allow_selection / ignore_selection for the cursor hash."""
61
67
  parts = [str(parent_unique_id), str(index), self.base_name]
62
68
  return non_secure_md5_hash_str("".join(parts).encode())
63
69
 
64
70
  def get_backcompat_node_unique_ids(
65
- self, *, parent_unique_id: Optional[str] = None, index: Optional[int] = None
71
+ self,
72
+ *,
73
+ parent_unique_id: Optional[str] = None,
74
+ index: Optional[int] = None,
75
+ target_key: Optional[EntityKey] = None,
66
76
  ) -> Sequence[str]:
67
77
  # backcompat for previous cursors where the allow/ignore selection influenced the hash
68
- return [super().get_node_unique_id(parent_unique_id=parent_unique_id, index=index)]
78
+ return [
79
+ super().get_node_unique_id(
80
+ parent_unique_id=parent_unique_id, index=index, target_key=target_key
81
+ )
82
+ ]
69
83
 
70
84
  def allow(self, selection: "AssetSelection") -> "ChecksAutomationCondition":
71
85
  """Returns a copy of this condition that will only consider dependencies within the provided
@@ -8,7 +8,7 @@ from typing_extensions import Self
8
8
  import dagster._check as check
9
9
  from dagster._annotations import public
10
10
  from dagster._core.asset_graph_view.asset_graph_view import U_EntityKey
11
- from dagster._core.definitions.asset_key import AssetKey, T_EntityKey
11
+ from dagster._core.definitions.asset_key import AssetKey, EntityKey, T_EntityKey
12
12
  from dagster._core.definitions.assets.graph.base_asset_graph import BaseAssetGraph, BaseAssetNode
13
13
  from dagster._core.definitions.declarative_automation.automation_condition import (
14
14
  AutomationCondition,
@@ -123,16 +123,30 @@ class DepsAutomationCondition(BuiltinAutomationCondition[T_EntityKey]):
123
123
  def requires_cursor(self) -> bool:
124
124
  return False
125
125
 
126
- def get_node_unique_id(self, *, parent_unique_id: Optional[str], index: Optional[int]) -> str:
126
+ def get_node_unique_id(
127
+ self,
128
+ *,
129
+ parent_unique_id: Optional[str],
130
+ index: Optional[int],
131
+ target_key: Optional[EntityKey],
132
+ ) -> str:
127
133
  """Ignore allow_selection / ignore_selection for the cursor hash."""
128
134
  parts = [str(parent_unique_id), str(index), self.base_name]
129
135
  return non_secure_md5_hash_str("".join(parts).encode())
130
136
 
131
137
  def get_backcompat_node_unique_ids(
132
- self, *, parent_unique_id: Optional[str] = None, index: Optional[int] = None
138
+ self,
139
+ *,
140
+ parent_unique_id: Optional[str] = None,
141
+ index: Optional[int] = None,
142
+ target_key: Optional[EntityKey] = None,
133
143
  ) -> Sequence[str]:
134
144
  # backcompat for previous cursors where the allow/ignore selection influenced the hash
135
- return [super().get_node_unique_id(parent_unique_id=parent_unique_id, index=index)]
145
+ return [
146
+ super().get_node_unique_id(
147
+ parent_unique_id=parent_unique_id, index=index, target_key=target_key
148
+ )
149
+ ]
136
150
 
137
151
  @public
138
152
  def allow(self, selection: "AssetSelection") -> "DepsAutomationCondition":
@@ -5,7 +5,7 @@ from dagster_shared.serdes import whitelist_for_serdes
5
5
 
6
6
  from dagster._core.asset_graph_view.entity_subset import EntitySubset
7
7
  from dagster._core.asset_graph_view.serializable_entity_subset import SerializableEntitySubset
8
- from dagster._core.definitions.asset_key import T_EntityKey
8
+ from dagster._core.definitions.asset_key import EntityKey, T_EntityKey
9
9
  from dagster._core.definitions.declarative_automation.automation_condition import (
10
10
  AutomationCondition,
11
11
  AutomationResult,
@@ -39,6 +39,32 @@ class NewlyTrueCondition(BuiltinAutomationCondition[T_EntityKey]):
39
39
  return None
40
40
  return context.asset_graph_view.get_subset_from_serializable_subset(true_subset)
41
41
 
42
+ def get_node_unique_id(
43
+ self,
44
+ *,
45
+ parent_unique_id: Optional[str],
46
+ index: Optional[int],
47
+ target_key: Optional[EntityKey],
48
+ ) -> str:
49
+ # newly true conditions should have stable cursoring logic regardless of where they
50
+ # exist in the broader condition tree, as they're always evaluated over the entire
51
+ # subset
52
+ return self._get_stable_unique_id(target_key)
53
+
54
+ def get_backcompat_node_unique_ids(
55
+ self,
56
+ *,
57
+ parent_unique_id: Optional[str] = None,
58
+ index: Optional[int] = None,
59
+ target_key: Optional[EntityKey] = None,
60
+ ) -> Sequence[str]:
61
+ return [
62
+ # get the standard globally-aware unique id for backcompat purposes
63
+ super().get_node_unique_id(
64
+ parent_unique_id=parent_unique_id, index=index, target_key=target_key
65
+ )
66
+ ]
67
+
42
68
  async def evaluate(self, context: AutomationContext) -> AutomationResult: # pyright: ignore[reportIncompatibleMethodOverride]
43
69
  # evaluate child condition
44
70
  child_result = await context.for_child_condition(
@@ -8,7 +8,7 @@ from typing_extensions import Self
8
8
 
9
9
  import dagster._check as check
10
10
  from dagster._annotations import public
11
- from dagster._core.definitions.asset_key import T_EntityKey
11
+ from dagster._core.definitions.asset_key import EntityKey, T_EntityKey
12
12
  from dagster._core.definitions.declarative_automation.automation_condition import (
13
13
  AutomationCondition,
14
14
  AutomationResult,
@@ -93,6 +93,32 @@ class SinceCondition(BuiltinAutomationCondition[T_EntityKey]):
93
93
  def children(self) -> Sequence[AutomationCondition[T_EntityKey]]:
94
94
  return [self.trigger_condition, self.reset_condition]
95
95
 
96
+ def get_node_unique_id(
97
+ self,
98
+ *,
99
+ parent_unique_id: Optional[str],
100
+ index: Optional[int],
101
+ target_key: Optional[EntityKey],
102
+ ) -> str:
103
+ # since conditions should have stable cursoring logic regardless of where they
104
+ # exist in the broader condition tree, as they're always evaluated over the entire
105
+ # subset
106
+ return self._get_stable_unique_id(target_key)
107
+
108
+ def get_backcompat_node_unique_ids(
109
+ self,
110
+ *,
111
+ parent_unique_id: Optional[str] = None,
112
+ index: Optional[int] = None,
113
+ target_key: Optional[EntityKey] = None,
114
+ ) -> Sequence[str]:
115
+ return [
116
+ # get the standard globally-aware unique id for backcompat purposes
117
+ super().get_node_unique_id(
118
+ parent_unique_id=parent_unique_id, index=index, target_key=target_key
119
+ )
120
+ ]
121
+
96
122
  async def evaluate( # pyright: ignore[reportIncompatibleMethodOverride]
97
123
  self, context: AutomationContext[T_EntityKey]
98
124
  ) -> AutomationResult[T_EntityKey]:
@@ -315,6 +315,10 @@ class InstigatorSelector:
315
315
  def get_id(self) -> str:
316
316
  return create_snapshot_id(self)
317
317
 
318
+ @property
319
+ def instigator_name(self) -> str:
320
+ return self.name
321
+
318
322
 
319
323
  @record
320
324
  class GraphSelector:
@@ -26,6 +26,7 @@ from dagster._core.remote_representation.external_data import job_name_for_parti
26
26
  from dagster._core.storage.dagster_run import (
27
27
  CANCELABLE_RUN_STATUSES,
28
28
  NOT_FINISHED_STATUSES,
29
+ DagsterRunStatus,
29
30
  RunsFilter,
30
31
  )
31
32
  from dagster._core.storage.tags import BACKFILL_ID_TAG, USER_TAG
@@ -557,9 +558,11 @@ def cancel_backfill_runs_and_cancellation_complete(
557
558
 
558
559
  while True:
559
560
  # Cancel all cancelable runs for the backfill in batches
561
+
562
+ # start with the queued runs since those will be faster to cancel
560
563
  runs_to_cancel_in_iteration = instance.run_storage.get_runs(
561
564
  filters=RunsFilter(
562
- statuses=CANCELABLE_RUN_STATUSES,
565
+ statuses=[DagsterRunStatus.QUEUED],
563
566
  tags={
564
567
  BACKFILL_ID_TAG: backfill_id,
565
568
  },
@@ -567,16 +570,38 @@ def cancel_backfill_runs_and_cancellation_complete(
567
570
  limit=CANCELABLE_RUNS_BATCH_SIZE,
568
571
  ascending=True,
569
572
  )
573
+
570
574
  if not runs_to_cancel_in_iteration:
571
- break
575
+ # once all queued runs are canceled, cancel all other cancelable runs
576
+ runs_to_cancel_in_iteration = instance.run_storage.get_runs(
577
+ filters=RunsFilter(
578
+ statuses=CANCELABLE_RUN_STATUSES,
579
+ tags={
580
+ BACKFILL_ID_TAG: backfill_id,
581
+ },
582
+ ),
583
+ limit=CANCELABLE_RUNS_BATCH_SIZE,
584
+ ascending=True,
585
+ )
586
+ if not runs_to_cancel_in_iteration:
587
+ break
572
588
 
573
589
  canceled_any_runs = True
574
590
  for run in runs_to_cancel_in_iteration:
575
591
  run_id = run.run_id
576
592
  logger.info(f"Terminating submitted run {run_id}")
577
- # calling cancel_run will immediately set its status to CANCELING or CANCELED,
593
+
594
+ # in both cases this will synchonrously set its status to CANCELING or CANCELED,
578
595
  # ensuring that it will not be returned in the next loop
579
- instance.run_coordinator.cancel_run(run_id)
596
+
597
+ if run.status == DagsterRunStatus.QUEUED:
598
+ instance.report_run_canceling(
599
+ run,
600
+ message="Canceling run from the queue.",
601
+ )
602
+ instance.report_run_canceled(run)
603
+ else:
604
+ instance.run_launcher.terminate(run_id)
580
605
 
581
606
  if canceled_any_runs:
582
607
  # since we are canceling some runs in this iteration, we know that there is more work to do.
@@ -236,8 +236,8 @@ class OutputContext:
236
236
  """A dict of the metadata that is assigned to the output at execution time."""
237
237
  if self._warn_on_step_context_use:
238
238
  warnings.warn(
239
- "You are using InputContext.upstream_output.output_metadata."
240
- "Output metadata is not available when accessed from the InputContext."
239
+ "You are using InputContext.upstream_output.output_metadata. "
240
+ "Output metadata is not available when accessed from the InputContext. "
241
241
  "https://github.com/dagster-io/dagster/issues/20094"
242
242
  )
243
243
  return {}
@@ -369,9 +369,9 @@ class OutputContext:
369
369
  def step_context(self) -> "StepExecutionContext":
370
370
  if self._warn_on_step_context_use:
371
371
  warnings.warn(
372
- "You are using InputContext.upstream_output.step_context"
373
- "This use on upstream_output is deprecated and will fail in the future"
374
- "Try to obtain what you need directly from InputContext"
372
+ "You are using InputContext.upstream_output.step_context. "
373
+ "This use on upstream_output is deprecated and will fail in the future. "
374
+ "Try to obtain what you need directly from InputContext. "
375
375
  "For more details: https://github.com/dagster-io/dagster/issues/7900"
376
376
  )
377
377
 
@@ -389,9 +389,9 @@ class OutputContext:
389
389
  """Whether the current run is a partitioned run."""
390
390
  if self._warn_on_step_context_use:
391
391
  warnings.warn(
392
- "You are using InputContext.upstream_output.has_partition_key"
393
- "This use on upstream_output is deprecated and will fail in the future"
394
- "Try to obtain what you need directly from InputContext"
392
+ "You are using InputContext.upstream_output.has_partition_key. "
393
+ "This use on upstream_output is deprecated and will fail in the future. "
394
+ "Try to obtain what you need directly from InputContext. "
395
395
  "For more details: https://github.com/dagster-io/dagster/issues/7900"
396
396
  )
397
397
 
@@ -406,9 +406,9 @@ class OutputContext:
406
406
  """
407
407
  if self._warn_on_step_context_use:
408
408
  warnings.warn(
409
- "You are using InputContext.upstream_output.partition_key"
410
- "This use on upstream_output is deprecated and will fail in the future"
411
- "Try to obtain what you need directly from InputContext"
409
+ "You are using InputContext.upstream_output.partition_key. "
410
+ "This use on upstream_output is deprecated and will fail in the future. "
411
+ "Try to obtain what you need directly from InputContext. "
412
412
  "For more details: https://github.com/dagster-io/dagster/issues/7900"
413
413
  )
414
414
 
@@ -425,9 +425,9 @@ class OutputContext:
425
425
  """Returns True if the asset being stored is partitioned."""
426
426
  if self._warn_on_step_context_use:
427
427
  warnings.warn(
428
- "You are using InputContext.upstream_output.has_asset_partitions"
429
- "This use on upstream_output is deprecated and will fail in the future"
430
- "Try to obtain what you need directly from InputContext"
428
+ "You are using InputContext.upstream_output.has_asset_partitions. "
429
+ "This use on upstream_output is deprecated and will fail in the future. "
430
+ "Try to obtain what you need directly from InputContext. "
431
431
  "For more details: https://github.com/dagster-io/dagster/issues/7900"
432
432
  )
433
433
 
@@ -446,9 +446,9 @@ class OutputContext:
446
446
  """
447
447
  if self._warn_on_step_context_use:
448
448
  warnings.warn(
449
- "You are using InputContext.upstream_output.asset_partition_key"
450
- "This use on upstream_output is deprecated and will fail in the future"
451
- "Try to obtain what you need directly from InputContext"
449
+ "You are using InputContext.upstream_output.asset_partition_key. "
450
+ "This use on upstream_output is deprecated and will fail in the future. "
451
+ "Try to obtain what you need directly from InputContext. "
452
452
  "For more details: https://github.com/dagster-io/dagster/issues/7900"
453
453
  )
454
454
 
@@ -463,9 +463,9 @@ class OutputContext:
463
463
  """
464
464
  if self._warn_on_step_context_use:
465
465
  warnings.warn(
466
- "You are using InputContext.upstream_output.asset_partition_key_range"
467
- "This use on upstream_output is deprecated and will fail in the future"
468
- "Try to obtain what you need directly from InputContext"
466
+ "You are using InputContext.upstream_output.asset_partition_key_range. "
467
+ "This use on upstream_output is deprecated and will fail in the future. "
468
+ "Try to obtain what you need directly from InputContext. "
469
469
  "For more details: https://github.com/dagster-io/dagster/issues/7900"
470
470
  )
471
471
 
@@ -480,9 +480,9 @@ class OutputContext:
480
480
  """
481
481
  if self._warn_on_step_context_use:
482
482
  warnings.warn(
483
- "You are using InputContext.upstream_output.asset_partition_keys"
484
- "This use on upstream_output is deprecated and will fail in the future"
485
- "Try to obtain what you need directly from InputContext"
483
+ "You are using InputContext.upstream_output.asset_partition_keys. "
484
+ "This use on upstream_output is deprecated and will fail in the future. "
485
+ "Try to obtain what you need directly from InputContext. "
486
486
  "For more details: https://github.com/dagster-io/dagster/issues/7900"
487
487
  )
488
488
 
@@ -503,9 +503,9 @@ class OutputContext:
503
503
  """
504
504
  if self._warn_on_step_context_use:
505
505
  warnings.warn(
506
- "You are using InputContext.upstream_output.asset_partitions_time_window"
507
- "This use on upstream_output is deprecated and will fail in the future"
508
- "Try to obtain what you need directly from InputContext"
506
+ "You are using InputContext.upstream_output.asset_partitions_time_window. "
507
+ "This use on upstream_output is deprecated and will fail in the future. "
508
+ "Try to obtain what you need directly from InputContext. "
509
509
  "For more details: https://github.com/dagster-io/dagster/issues/7900"
510
510
  )
511
511
 
@@ -26,6 +26,7 @@ from dagster._core.definitions.data_version import CachingStaleStatusResolver
26
26
  from dagster._core.definitions.partitions.context import partition_loading_context
27
27
  from dagster._core.definitions.partitions.definition import PartitionsDefinition
28
28
  from dagster._core.definitions.selector import (
29
+ InstigatorSelector,
29
30
  JobSelector,
30
31
  JobSubsetSelector,
31
32
  RepositorySelector,
@@ -69,7 +70,7 @@ from dagster._core.remote_representation.grpc_server_state_subscriber import (
69
70
  LocationStateChangeEventType,
70
71
  LocationStateSubscriber,
71
72
  )
72
- from dagster._core.remote_representation.handle import InstigatorHandle, RepositoryHandle
73
+ from dagster._core.remote_representation.handle import RepositoryHandle
73
74
  from dagster._core.snap.dagster_types import DagsterTypeSnap
74
75
  from dagster._core.snap.mode import ResourceDefSnap
75
76
  from dagster._core.snap.node import GraphDefSnap, OpDefSnap
@@ -284,6 +285,9 @@ class BaseWorkspaceRequestContext(LoadingContext):
284
285
  def viewer_has_any_owner_definition_permissions(self) -> bool:
285
286
  return False
286
287
 
288
+ def read_partition_subsets_from_asset_health(self) -> bool:
289
+ return False
290
+
287
291
  def get_viewer_tags(self) -> dict[str, str]:
288
292
  return {}
289
293
 
@@ -596,7 +600,7 @@ class BaseWorkspaceRequestContext(LoadingContext):
596
600
  )
597
601
 
598
602
  def get_sensor(
599
- self, selector: Union[InstigatorHandle, SensorSelector]
603
+ self, selector: Union[SensorSelector, InstigatorSelector]
600
604
  ) -> Optional[RemoteSensor]:
601
605
  if not self.has_code_location(selector.location_name):
602
606
  return None
@@ -613,7 +617,7 @@ class BaseWorkspaceRequestContext(LoadingContext):
613
617
  return repository.get_sensor(selector.instigator_name)
614
618
 
615
619
  def get_schedule(
616
- self, selector: Union[InstigatorHandle, ScheduleSelector]
620
+ self, selector: Union[ScheduleSelector, InstigatorSelector]
617
621
  ) -> Optional[RemoteSchedule]:
618
622
  if not self.has_code_location(selector.location_name):
619
623
  return None