dagster 1.12.10__py3-none-any.whl → 1.12.12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
dagster/_cli/asset.py CHANGED
@@ -43,7 +43,11 @@ def asset_cli():
43
43
  @click.option("--partition", help="Asset partition to target", required=False)
44
44
  @click.option(
45
45
  "--partition-range",
46
- help="Asset partition range to target i.e. <start>...<end>",
46
+ help=(
47
+ "Asset partition range to materialize in the format <start>...<end>. "
48
+ "Requires all assets to have a BackfillPolicy.single_run() policy, which allows "
49
+ "the partition range to be executed in a single run. For example: 2025-01-01...2025-01-05"
50
+ ),
47
51
  required=False,
48
52
  )
49
53
  @click.option(
@@ -163,12 +167,18 @@ def execute_materialize_command(
163
167
  for asset_key in asset_keys:
164
168
  backfill_policy = implicit_job_def.asset_layer.get(asset_key).backfill_policy
165
169
  if (
166
- backfill_policy is not None
167
- and backfill_policy.policy_type != BackfillPolicyType.SINGLE_RUN
170
+ backfill_policy is None
171
+ or backfill_policy.policy_type != BackfillPolicyType.SINGLE_RUN
168
172
  ):
169
173
  check.failed(
170
- "Provided partition range, but not all assets have a single-run backfill policy."
174
+ "Partition ranges with the CLI require all selected assets to have a "
175
+ "BackfillPolicy.single_run() policy. This allows the partition range to be "
176
+ "executed in a single run. Assets without this policy would require creating "
177
+ "a backfill with separate runs per partition, which needs a running daemon "
178
+ "process. Consider using the Dagster UI or a running daemon to execute "
179
+ "partition ranges for assets without a single-run backfill policy."
171
180
  )
181
+
172
182
  try:
173
183
  implicit_job_def.validate_partition_key(
174
184
  partition_range_start, selected_asset_keys=asset_keys, context=context
@@ -181,6 +191,7 @@ def execute_materialize_command(
181
191
  "All selected assets must have a PartitionsDefinition containing the passed"
182
192
  f" partition key `{partition_range_start}` or have no PartitionsDefinition."
183
193
  )
194
+
184
195
  tags = {
185
196
  ASSET_PARTITION_RANGE_START_TAG: partition_range_start,
186
197
  ASSET_PARTITION_RANGE_END_TAG: partition_range_end,
dagster/_cli/job.py CHANGED
@@ -405,11 +405,16 @@ def execute_execute_command(
405
405
  for asset_key in job_def.asset_layer.executable_asset_keys:
406
406
  backfill_policy = job_def.asset_layer.get(asset_key).backfill_policy
407
407
  if (
408
- backfill_policy is not None
409
- and backfill_policy.policy_type != BackfillPolicyType.SINGLE_RUN
408
+ backfill_policy is None
409
+ or backfill_policy.policy_type != BackfillPolicyType.SINGLE_RUN
410
410
  ):
411
411
  check.failed(
412
- "Provided partition range, but not all assets have a single-run backfill policy."
412
+ "Partition ranges with the CLI require all selected assets to have a "
413
+ "BackfillPolicy.single_run() policy. This allows the partition range to be "
414
+ "executed in a single run. Assets without this policy would require creating "
415
+ "a backfill with separate runs per partition, which needs a running daemon "
416
+ "process. Consider using the Dagster UI or a running daemon to execute "
417
+ "partition ranges for assets without a single-run backfill policy."
413
418
  )
414
419
  try:
415
420
  job_def.validate_partition_key(
@@ -132,6 +132,10 @@ class SerializableEntitySubset(Generic[T_EntityKey]):
132
132
  def is_compatible_with_partitions_def(
133
133
  self, partitions_def: Optional[PartitionsDefinition]
134
134
  ) -> bool:
135
+ from dagster._core.definitions.partitions.definition.time_window import (
136
+ TimeWindowPartitionsDefinition,
137
+ )
138
+
135
139
  if self.is_partitioned:
136
140
  # for some PartitionSubset types, we have access to the underlying partitions
137
141
  # definitions, so we can ensure those are identical
@@ -150,6 +154,11 @@ class SerializableEntitySubset(Generic[T_EntityKey]):
150
154
  and partitions_def.has_partition_key(r.end)
151
155
  for r in self.value.key_ranges
152
156
  )
157
+ elif isinstance(self.value, DefaultPartitionsSubset) and isinstance(
158
+ partitions_def, TimeWindowPartitionsDefinition
159
+ ):
160
+ return all(partitions_def.has_partition_key(k) for k in self.value.subset)
161
+
153
162
  else:
154
163
  return partitions_def is not None
155
164
  else:
@@ -2,6 +2,7 @@ import importlib
2
2
  import inspect
3
3
  import os
4
4
  import sys
5
+ import uuid
5
6
  from abc import ABC, abstractmethod
6
7
  from collections.abc import Callable, Sequence
7
8
  from pathlib import Path
@@ -59,7 +60,11 @@ def rebase_file(relative_path_in_file: str, file_path_resides_in: str) -> str:
59
60
  )
60
61
 
61
62
 
62
- def load_python_file(python_file: Union[str, Path], working_directory: Optional[str]) -> ModuleType:
63
+ def load_python_file(
64
+ python_file: Union[str, Path],
65
+ working_directory: Optional[str],
66
+ add_uuid_suffix: bool = False,
67
+ ) -> ModuleType:
63
68
  """Takes a path to a python file and returns a loaded module."""
64
69
  check.inst_param(python_file, "python_file", (str, Path))
65
70
  check.opt_str_param(working_directory, "working_directory")
@@ -68,6 +73,8 @@ def load_python_file(python_file: Union[str, Path], working_directory: Optional[
68
73
  os.stat(python_file)
69
74
 
70
75
  module_name = os.path.splitext(os.path.basename(python_file))[0]
76
+ if add_uuid_suffix:
77
+ module_name = f"{module_name}_{uuid.uuid4().hex}"
71
78
 
72
79
  # Use the passed in working directory for local imports (sys.path[0] isn't
73
80
  # consistently set in the different entry points that Dagster uses to import code)
@@ -520,7 +520,7 @@ class RemoteAssetGraph(BaseAssetGraph[TRemoteAssetNode], ABC, Generic[TRemoteAss
520
520
 
521
521
  by_table_name = defaultdict(set)
522
522
  for node in self.asset_nodes:
523
- normalized_table_name = TableMetadataSet.extract(node.metadata).normalized_table_name
523
+ normalized_table_name = TableMetadataSet.extract_normalized_table_name(node.metadata)
524
524
  if normalized_table_name:
525
525
  by_table_name[normalized_table_name.lower()].add(node.key)
526
526
 
@@ -547,7 +547,7 @@ class RemoteAssetGraph(BaseAssetGraph[TRemoteAssetNode], ABC, Generic[TRemoteAss
547
547
  from dagster._core.definitions.metadata.metadata_set import TableMetadataSet
548
548
 
549
549
  input_node = self.get(asset_key)
550
- input_table_name = TableMetadataSet.extract(input_node.metadata).normalized_table_name
550
+ input_table_name = TableMetadataSet.extract_normalized_table_name(input_node.metadata)
551
551
 
552
552
  if not input_table_name:
553
553
  return set()
@@ -1,3 +1,4 @@
1
+ import itertools
1
2
  from collections.abc import Iterator, Mapping, Sequence
2
3
  from dataclasses import dataclass
3
4
  from typing import ( # noqa: UP035
@@ -312,3 +313,38 @@ class AutomationConditionEvaluationState:
312
313
  @property
313
314
  def true_subset(self) -> SerializableEntitySubset:
314
315
  return self.previous_evaluation.true_subset
316
+
317
+
318
+ def get_expanded_label(
319
+ item: Union[AutomationConditionEvaluation, AutomationConditionSnapshot],
320
+ use_label=False,
321
+ ) -> Sequence[str]:
322
+ if isinstance(item, AutomationConditionSnapshot):
323
+ label, name, description, children = (
324
+ item.node_snapshot.label,
325
+ item.node_snapshot.name,
326
+ item.node_snapshot.description,
327
+ item.children,
328
+ )
329
+ else:
330
+ snapshot = item.condition_snapshot
331
+ label, name, description, children = (
332
+ snapshot.label,
333
+ snapshot.name,
334
+ snapshot.description,
335
+ item.child_evaluations,
336
+ )
337
+
338
+ if use_label and label is not None:
339
+ return [label]
340
+ node_text = name or description
341
+ child_labels = [f"({' '.join(get_expanded_label(c, use_label=True))})" for c in children]
342
+ if len(child_labels) == 0:
343
+ return [node_text]
344
+ elif len(child_labels) == 1:
345
+ return [node_text, f"{child_labels[0]}"]
346
+ else:
347
+ # intersperses node_text (e.g. AND) between each child label
348
+ return list(itertools.chain(*itertools.zip_longest(child_labels, [], fillvalue=node_text)))[
349
+ :-1
350
+ ]
@@ -82,11 +82,11 @@ def asset(
82
82
  io_manager_def: Optional[object] = ...,
83
83
  io_manager_key: Optional[str] = ...,
84
84
  dagster_type: Optional[DagsterType] = ...,
85
- partitions_def: Optional[PartitionsDefinition] = ...,
85
+ partitions_def: Optional[PartitionsDefinition[str]] = ...,
86
86
  op_tags: Optional[Mapping[str, Any]] = ...,
87
87
  group_name: Optional[str] = ...,
88
88
  output_required: bool = ...,
89
- automation_condition: Optional[AutomationCondition] = ...,
89
+ automation_condition: Optional[AutomationCondition[AssetKey]] = ...,
90
90
  backfill_policy: Optional[BackfillPolicy] = ...,
91
91
  retry_policy: Optional[RetryPolicy] = ...,
92
92
  code_version: Optional[str] = ...,
@@ -95,14 +95,14 @@ def asset(
95
95
  owners: Optional[Sequence[str]] = ...,
96
96
  kinds: Optional[AbstractSet[str]] = ...,
97
97
  pool: Optional[str] = ...,
98
- **kwargs,
98
+ **kwargs: Any,
99
99
  ) -> Callable[[Callable[..., Any]], AssetsDefinition]: ...
100
100
 
101
101
 
102
102
  @overload
103
103
  def asset(
104
104
  compute_fn: Callable[..., Any],
105
- **kwargs,
105
+ **kwargs: Any,
106
106
  ) -> AssetsDefinition: ...
107
107
 
108
108
 
@@ -168,11 +168,11 @@ def asset(
168
168
  io_manager_def: Optional[object] = None,
169
169
  io_manager_key: Optional[str] = None,
170
170
  dagster_type: Optional[DagsterType] = None,
171
- partitions_def: Optional[PartitionsDefinition] = None,
171
+ partitions_def: Optional[PartitionsDefinition[str]] = None,
172
172
  op_tags: Optional[Mapping[str, Any]] = None,
173
173
  group_name: Optional[str] = None,
174
174
  output_required: bool = True,
175
- automation_condition: Optional[AutomationCondition] = None,
175
+ automation_condition: Optional[AutomationCondition[AssetKey]] = None,
176
176
  freshness_policy: Optional[FreshnessPolicy] = None,
177
177
  backfill_policy: Optional[BackfillPolicy] = None,
178
178
  retry_policy: Optional[RetryPolicy] = None,
@@ -182,7 +182,7 @@ def asset(
182
182
  owners: Optional[Sequence[str]] = None,
183
183
  kinds: Optional[AbstractSet[str]] = None,
184
184
  pool: Optional[str] = None,
185
- **kwargs,
185
+ **kwargs: Any,
186
186
  ) -> Union[AssetsDefinition, Callable[[Callable[..., Any]], AssetsDefinition]]:
187
187
  """Create a definition for how to compute an asset.
188
188
 
@@ -590,7 +590,7 @@ def multi_asset(
590
590
  config_schema: Optional[UserConfigSchema] = None,
591
591
  required_resource_keys: Optional[AbstractSet[str]] = None,
592
592
  internal_asset_deps: Optional[Mapping[str, set[AssetKey]]] = None,
593
- partitions_def: Optional[PartitionsDefinition] = None,
593
+ partitions_def: Optional[PartitionsDefinition[str]] = None,
594
594
  hooks: Optional[AbstractSet[HookDefinition]] = None,
595
595
  backfill_policy: Optional[BackfillPolicy] = None,
596
596
  op_tags: Optional[Mapping[str, Any]] = None,
@@ -769,7 +769,7 @@ def graph_asset(
769
769
  config: Optional[Union[ConfigMapping, Mapping[str, Any]]] = None,
770
770
  key_prefix: Optional[CoercibleToAssetKeyPrefix] = None,
771
771
  group_name: Optional[str] = None,
772
- partitions_def: Optional[PartitionsDefinition] = None,
772
+ partitions_def: Optional[PartitionsDefinition[str]] = None,
773
773
  hooks: Optional[AbstractSet[HookDefinition]] = None,
774
774
  metadata: Optional[RawMetadataMapping] = ...,
775
775
  tags: Optional[Mapping[str, str]] = ...,
@@ -777,7 +777,7 @@ def graph_asset(
777
777
  kinds: Optional[AbstractSet[str]] = None,
778
778
  legacy_freshness_policy: Optional[LegacyFreshnessPolicy] = ...,
779
779
  auto_materialize_policy: Optional[AutoMaterializePolicy] = ...,
780
- automation_condition: Optional[AutomationCondition] = ...,
780
+ automation_condition: Optional[AutomationCondition[AssetKey]] = ...,
781
781
  backfill_policy: Optional[BackfillPolicy] = ...,
782
782
  resource_defs: Optional[Mapping[str, ResourceDefinition]] = ...,
783
783
  check_specs: Optional[Sequence[AssetCheckSpec]] = None,
@@ -806,19 +806,19 @@ def graph_asset(
806
806
  config: Optional[Union[ConfigMapping, Mapping[str, Any]]] = None,
807
807
  key_prefix: Optional[CoercibleToAssetKeyPrefix] = None,
808
808
  group_name: Optional[str] = None,
809
- partitions_def: Optional[PartitionsDefinition] = None,
809
+ partitions_def: Optional[PartitionsDefinition[str]] = None,
810
810
  hooks: Optional[AbstractSet[HookDefinition]] = None,
811
811
  metadata: Optional[RawMetadataMapping] = None,
812
812
  tags: Optional[Mapping[str, str]] = None,
813
813
  owners: Optional[Sequence[str]] = None,
814
- automation_condition: Optional[AutomationCondition] = None,
814
+ automation_condition: Optional[AutomationCondition[AssetKey]] = None,
815
815
  backfill_policy: Optional[BackfillPolicy] = None,
816
816
  resource_defs: Optional[Mapping[str, ResourceDefinition]] = None,
817
817
  check_specs: Optional[Sequence[AssetCheckSpec]] = None,
818
818
  code_version: Optional[str] = None,
819
819
  key: Optional[CoercibleToAssetKey] = None,
820
820
  kinds: Optional[AbstractSet[str]] = None,
821
- **kwargs,
821
+ **kwargs: Any,
822
822
  ) -> Union[AssetsDefinition, Callable[[Callable[..., Any]], AssetsDefinition]]:
823
823
  """Creates a software-defined asset that's computed using a graph of ops.
824
824
 
@@ -1,14 +1,16 @@
1
1
  from collections.abc import Callable, Mapping, Sequence
2
2
  from inspect import Parameter, Signature, isgeneratorfunction, signature
3
- from typing import Any, NamedTuple, Optional
3
+ from typing import Any, Optional
4
4
 
5
+ from dagster_shared.record import record
5
6
  from docstring_parser import parse
6
7
 
7
8
  from dagster._core.decorator_utils import get_type_hints
8
9
  from dagster._core.definitions.utils import NoValueSentinel
9
10
 
10
11
 
11
- class InferredInputProps(NamedTuple):
12
+ @record
13
+ class InferredInputProps:
12
14
  """The information about an input that can be inferred from the function signature."""
13
15
 
14
16
  name: str
@@ -17,7 +19,8 @@ class InferredInputProps(NamedTuple):
17
19
  default_value: Any = NoValueSentinel
18
20
 
19
21
 
20
- class InferredOutputProps(NamedTuple):
22
+ @record
23
+ class InferredOutputProps:
21
24
  """The information about an input that can be inferred from the function signature."""
22
25
 
23
26
  annotation: Any
@@ -80,15 +83,15 @@ def _infer_inputs_from_params(
80
83
  for param in params:
81
84
  if param.default is not Parameter.empty:
82
85
  input_def = InferredInputProps(
83
- param.name,
84
- type_hints.get(param.name, param.annotation),
86
+ name=param.name,
87
+ annotation=type_hints.get(param.name, param.annotation),
85
88
  default_value=param.default,
86
89
  description=_descriptions.get(param.name),
87
90
  )
88
91
  else:
89
92
  input_def = InferredInputProps(
90
- param.name,
91
- type_hints.get(param.name, param.annotation),
93
+ name=param.name,
94
+ annotation=type_hints.get(param.name, param.annotation),
92
95
  description=_descriptions.get(param.name),
93
96
  )
94
97
 
@@ -1,12 +1,13 @@
1
1
  import inspect
2
2
  from collections.abc import Callable, Mapping
3
- from types import FunctionType, UnionType
4
- from typing import TYPE_CHECKING, Any, NamedTuple, Optional, TypeVar, Union
3
+ from types import UnionType
4
+ from typing import TYPE_CHECKING, Any, Optional, TypeVar, Union
5
5
 
6
6
  from dagster_shared.error import DagsterError
7
+ from dagster_shared.record import IHaveNew, record, record_custom
7
8
 
8
9
  import dagster._check as check
9
- from dagster._annotations import PublicAttr, deprecated_param, public, superseded
10
+ from dagster._annotations import PublicAttr, public, superseded
10
11
  from dagster._core.definitions.events import AssetKey
11
12
  from dagster._core.definitions.inference import InferredInputProps
12
13
  from dagster._core.definitions.metadata import (
@@ -309,39 +310,22 @@ def _checked_inferred_type(inferred: InferredInputProps) -> DagsterType:
309
310
  return resolved_type
310
311
 
311
312
 
312
- class InputPointer(NamedTuple("_InputPointer", [("node_name", str), ("input_name", str)])):
313
- def __new__(cls, node_name: str, input_name: str):
314
- return super().__new__(
315
- cls,
316
- check.str_param(node_name, "node_name"),
317
- check.str_param(input_name, "input_name"),
318
- )
313
+ @record(kw_only=False)
314
+ class InputPointer:
315
+ node_name: str
316
+ input_name: str
319
317
 
320
318
 
321
- class FanInInputPointer(
322
- NamedTuple(
323
- "_FanInInputPointer", [("node_name", str), ("input_name", str), ("fan_in_index", int)]
324
- )
325
- ):
326
- def __new__(cls, node_name: str, input_name: str, fan_in_index: int):
327
- return super().__new__(
328
- cls,
329
- check.str_param(node_name, "node_name"),
330
- check.str_param(input_name, "input_name"),
331
- check.int_param(fan_in_index, "fan_in_index"),
332
- )
319
+ @record(kw_only=False)
320
+ class FanInInputPointer:
321
+ node_name: str
322
+ input_name: str
323
+ fan_in_index: int
333
324
 
334
325
 
335
- @deprecated_param(
336
- param="dagster_type",
337
- breaking_version="2.0",
338
- additional_warn_text="Any defined `dagster_type` should come from the upstream op `Output`.",
339
- # Disabling warning here since we're passing this internally and I'm not sure whether it is
340
- # actually used or discarded.
341
- emit_runtime_warning=False,
342
- )
343
326
  @public
344
- class InputMapping(NamedTuple):
327
+ @record
328
+ class InputMapping:
345
329
  """Defines an input mapping for a graph.
346
330
 
347
331
  Args:
@@ -350,8 +334,8 @@ class InputMapping(NamedTuple):
350
334
  mapped_node_input_name (str): Name of the input in the node (op/graph) that is being mapped to.
351
335
  fan_in_index (Optional[int]): The index in to a fanned input, otherwise None.
352
336
  graph_input_description (Optional[str]): A description of the input in the graph being mapped from.
353
- dagster_type (Optional[DagsterType]): The dagster type of the graph's input
354
- being mapped from.
337
+ dagster_type (Optional[DagsterType]): DEPRECATED, to be removed in 2.0.
338
+ The dagster type of the graph's input being mapped from.
355
339
 
356
340
  Examples:
357
341
  .. code-block:: python
@@ -384,6 +368,8 @@ class InputMapping(NamedTuple):
384
368
  mapped_node_input_name: str
385
369
  fan_in_index: Optional[int] = None
386
370
  graph_input_description: Optional[str] = None
371
+
372
+ # DEPRECATED: Any defined `dagster_type` should come from the upstream op `Output`.
387
373
  dagster_type: Optional[DagsterType] = None
388
374
 
389
375
  @property
@@ -411,26 +397,8 @@ class InputMapping(NamedTuple):
411
397
 
412
398
 
413
399
  @public
414
- class In(
415
- NamedTuple(
416
- "_In",
417
- [
418
- ("dagster_type", PublicAttr[Union[DagsterType, type[NoValueSentinel]]]),
419
- ("description", PublicAttr[Optional[str]]),
420
- ("default_value", PublicAttr[Any]),
421
- ("metadata", PublicAttr[Optional[Mapping[str, Any]]]),
422
- (
423
- "asset_key",
424
- PublicAttr[Optional[Union[AssetKey, Callable[["InputContext"], AssetKey]]]],
425
- ),
426
- (
427
- "asset_partitions",
428
- PublicAttr[Optional[Union[set[str], Callable[["InputContext"], set[str]]]]],
429
- ),
430
- ("input_manager_key", PublicAttr[Optional[str]]),
431
- ],
432
- )
433
- ):
400
+ @record_custom
401
+ class In(IHaveNew):
434
402
  """Defines an argument to an op's compute function.
435
403
 
436
404
  Inputs may flow from previous op's outputs, or be stubbed using config. They may optionally
@@ -454,6 +422,14 @@ class In(
454
422
  upstream output.
455
423
  """
456
424
 
425
+ dagster_type: PublicAttr[Union[DagsterType, type[NoValueSentinel]]]
426
+ description: PublicAttr[Optional[str]]
427
+ default_value: PublicAttr[Any]
428
+ metadata: PublicAttr[Mapping[str, Any]]
429
+ asset_key: PublicAttr[Optional[Union[AssetKey, Callable[["InputContext"], AssetKey]]]]
430
+ asset_partitions: PublicAttr[Optional[Union[set[str], Callable[["InputContext"], set[str]]]]]
431
+ input_manager_key: PublicAttr[Optional[str]]
432
+
457
433
  def __new__(
458
434
  cls,
459
435
  dagster_type: Union[type, UnionType, DagsterType] = NoValueSentinel,
@@ -471,12 +447,12 @@ class In(
471
447
  if dagster_type is NoValueSentinel
472
448
  else resolve_dagster_type(dagster_type)
473
449
  ),
474
- description=check.opt_str_param(description, "description"),
450
+ description=description,
475
451
  default_value=default_value,
476
- metadata=check.opt_mapping_param(metadata, "metadata", key_type=str),
477
- asset_key=check.opt_inst_param(asset_key, "asset_key", (AssetKey, FunctionType)),
452
+ metadata={} if metadata is None else metadata,
453
+ asset_key=asset_key,
478
454
  asset_partitions=asset_partitions,
479
- input_manager_key=check.opt_str_param(input_manager_key, "input_manager_key"),
455
+ input_manager_key=input_manager_key,
480
456
  )
481
457
 
482
458
  @staticmethod
@@ -506,15 +482,15 @@ class In(
506
482
 
507
483
 
508
484
  @public
509
- class GraphIn(NamedTuple("_GraphIn", [("description", PublicAttr[Optional[str]])])):
485
+ @record
486
+ class GraphIn:
510
487
  """Represents information about an input that a graph maps.
511
488
 
512
489
  Args:
513
490
  description (Optional[str]): Human-readable description of the input.
514
491
  """
515
492
 
516
- def __new__(cls, description: Optional[str] = None):
517
- return super().__new__(cls, description=description)
493
+ description: PublicAttr[Optional[str]] = None
518
494
 
519
495
  def to_definition(self, name: str) -> InputDefinition:
520
496
  return InputDefinition(name=name, description=self.description)
@@ -197,9 +197,20 @@ class TableMetadataSet(NamespacedMetadataSet):
197
197
  def current_key_by_legacy_key(cls) -> Mapping[str, str]:
198
198
  return {"relation_identifier": "table_name"}
199
199
 
200
- @property
201
- def normalized_table_name(self) -> Optional[str]:
202
- return self.table_name.lower() if self.table_name else None
200
+ @classmethod
201
+ def extract_normalized_table_name(cls, metadata: Mapping[str, Any]) -> Optional[str]:
202
+ from pydantic import ValidationError
203
+
204
+ metadata_subset = {
205
+ key: metadata[key]
206
+ for key in {"dagster/table_name", "dagster/relation_identifier"}
207
+ if key in metadata
208
+ }
209
+ try:
210
+ table_name = TableMetadataSet.extract(metadata_subset).table_name
211
+ return table_name.lower() if table_name else None
212
+ except ValidationError:
213
+ return None
203
214
 
204
215
 
205
216
  class UriMetadataSet(NamespacedMetadataSet):
@@ -21,6 +21,7 @@ def _kill_on_cancel(instance_ref: InstanceRef, run_id, shutdown_event):
21
21
  if run.status in [
22
22
  DagsterRunStatus.CANCELING,
23
23
  DagsterRunStatus.CANCELED,
24
+ DagsterRunStatus.FAILURE,
24
25
  ]:
25
26
  print( # noqa: T201
26
27
  f"Detected run status {run.status}, sending interrupt to main thread"
@@ -25,6 +25,7 @@ from dagster_shared.telemetry import (
25
25
  TelemetrySettings,
26
26
  dagster_home_if_set,
27
27
  get_or_set_instance_id,
28
+ get_or_set_user_id,
28
29
  log_telemetry_action,
29
30
  write_telemetry_log_line,
30
31
  )
@@ -379,6 +380,7 @@ def log_remote_repo_stats(
379
380
  client_time=str(datetime.datetime.now()),
380
381
  event_id=str(uuid.uuid4()),
381
382
  instance_id=instance_id,
383
+ user_id=get_or_set_user_id(),
382
384
  metadata={
383
385
  **get_stats_from_remote_repo(remote_repo),
384
386
  "source": source,
@@ -450,6 +452,7 @@ def log_repo_stats(
450
452
  client_time=str(datetime.datetime.now()),
451
453
  event_id=str(uuid.uuid4()),
452
454
  instance_id=instance_id,
455
+ user_id=get_or_set_user_id(),
453
456
  metadata={
454
457
  "source": source,
455
458
  "pipeline_name_hash": job_name_hash,
@@ -43,6 +43,7 @@ AVAILABLE_EXAMPLES = [
43
43
  "project_analytics",
44
44
  "project_fully_featured",
45
45
  "quickstart_etl",
46
+ "snowflake_cortex",
46
47
  "tutorial_notebook_assets",
47
48
  "with_great_expectations",
48
49
  "with_openai",