dagster-cloud 1.10.11__py3-none-any.whl → 1.12.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. dagster_cloud/__init__.py +3 -3
  2. dagster_cloud/agent/dagster_cloud_agent.py +64 -20
  3. dagster_cloud/agent/instrumentation/run_launch.py +2 -2
  4. dagster_cloud/agent/instrumentation/schedule.py +1 -1
  5. dagster_cloud/agent/instrumentation/sensor.py +1 -1
  6. dagster_cloud/anomaly_detection/__init__.py +2 -2
  7. dagster_cloud/anomaly_detection/defs.py +11 -8
  8. dagster_cloud/api/dagster_cloud_api.py +7 -5
  9. dagster_cloud/auth/constants.py +21 -5
  10. dagster_cloud/batching/__init__.py +1 -1
  11. dagster_cloud/dagster_insights/__init__.py +12 -6
  12. dagster_cloud/dagster_insights/bigquery/dbt_wrapper.py +8 -2
  13. dagster_cloud/dagster_insights/bigquery/insights_bigquery_resource.py +4 -2
  14. dagster_cloud/dagster_insights/insights_utils.py +1 -1
  15. dagster_cloud/dagster_insights/metrics_utils.py +1 -1
  16. dagster_cloud/dagster_insights/snowflake/dagster_snowflake_insights.py +1 -9
  17. dagster_cloud/dagster_insights/snowflake/dbt_wrapper.py +9 -2
  18. dagster_cloud/dagster_insights/snowflake/definitions.py +5 -4
  19. dagster_cloud/dagster_insights/snowflake/insights_snowflake_resource.py +1 -2
  20. dagster_cloud/definitions/__init__.py +0 -0
  21. dagster_cloud/definitions/job_selection.py +36 -0
  22. dagster_cloud/execution/utils/process.py +1 -1
  23. dagster_cloud/instance/__init__.py +81 -42
  24. dagster_cloud/metadata/source_code.py +3 -1
  25. dagster_cloud/opentelemetry/config/exporter.py +1 -1
  26. dagster_cloud/opentelemetry/controller.py +1 -1
  27. dagster_cloud/opentelemetry/observers/dagster_exception_handler.py +1 -1
  28. dagster_cloud/opentelemetry/observers/execution_observer.py +4 -2
  29. dagster_cloud/pex/grpc/__init__.py +2 -2
  30. dagster_cloud/pex/grpc/client.py +2 -2
  31. dagster_cloud/pex/grpc/server/__init__.py +2 -2
  32. dagster_cloud/pex/grpc/server/cli/__init__.py +2 -2
  33. dagster_cloud/pex/grpc/server/manager.py +5 -4
  34. dagster_cloud/pex/grpc/server/registry.py +15 -8
  35. dagster_cloud/pex/grpc/server/server.py +17 -8
  36. dagster_cloud/secrets/__init__.py +1 -1
  37. dagster_cloud/serverless/__init__.py +1 -1
  38. dagster_cloud/serverless/io_manager.py +4 -1
  39. dagster_cloud/storage/compute_logs/__init__.py +3 -1
  40. dagster_cloud/storage/compute_logs/compute_log_manager.py +17 -13
  41. dagster_cloud/storage/defs_state/__init__.py +3 -0
  42. dagster_cloud/storage/defs_state/queries.py +15 -0
  43. dagster_cloud/storage/defs_state/storage.py +113 -0
  44. dagster_cloud/storage/event_logs/__init__.py +3 -1
  45. dagster_cloud/storage/event_logs/storage.py +9 -2
  46. dagster_cloud/storage/event_logs/utils.py +1 -3
  47. dagster_cloud/storage/runs/__init__.py +1 -1
  48. dagster_cloud/storage/runs/queries.py +15 -0
  49. dagster_cloud/storage/runs/storage.py +30 -3
  50. dagster_cloud/storage/schedules/__init__.py +1 -1
  51. dagster_cloud/storage/schedules/storage.py +1 -1
  52. dagster_cloud/util/errors.py +0 -91
  53. dagster_cloud/version.py +1 -1
  54. dagster_cloud/workspace/config_schema/__init__.py +43 -5
  55. dagster_cloud/workspace/docker/__init__.py +8 -7
  56. dagster_cloud/workspace/docker/utils.py +1 -1
  57. dagster_cloud/workspace/ecs/__init__.py +1 -1
  58. dagster_cloud/workspace/ecs/client.py +23 -18
  59. dagster_cloud/workspace/ecs/launcher.py +19 -5
  60. dagster_cloud/workspace/ecs/run_launcher.py +1 -2
  61. dagster_cloud/workspace/ecs/utils.py +5 -2
  62. dagster_cloud/workspace/kubernetes/__init__.py +1 -1
  63. dagster_cloud/workspace/kubernetes/launcher.py +11 -12
  64. dagster_cloud/workspace/kubernetes/utils.py +1 -2
  65. dagster_cloud/workspace/user_code_launcher/__init__.py +5 -3
  66. dagster_cloud/workspace/user_code_launcher/process.py +2 -3
  67. dagster_cloud/workspace/user_code_launcher/user_code_launcher.py +71 -34
  68. dagster_cloud/workspace/user_code_launcher/utils.py +7 -0
  69. {dagster_cloud-1.10.11.dist-info → dagster_cloud-1.12.6.dist-info}/METADATA +9 -8
  70. dagster_cloud-1.12.6.dist-info/RECORD +134 -0
  71. {dagster_cloud-1.10.11.dist-info → dagster_cloud-1.12.6.dist-info}/WHEEL +1 -1
  72. dagster_cloud-1.10.11.dist-info/RECORD +0 -129
  73. {dagster_cloud-1.10.11.dist-info → dagster_cloud-1.12.6.dist-info}/top_level.txt +0 -0
@@ -1,5 +1,6 @@
1
+ import os
1
2
  from collections.abc import Sequence
2
- from typing import TYPE_CHECKING, Any, Optional
3
+ from typing import IO, TYPE_CHECKING, Any, Optional
3
4
 
4
5
  import requests
5
6
  from dagster import (
@@ -8,14 +9,15 @@ from dagster import (
8
9
  StringSource,
9
10
  _check as check,
10
11
  )
11
- from dagster._core.storage.cloud_storage_compute_log_manager import CloudStorageComputeLogManager
12
+ from dagster._core.storage.cloud_storage_compute_log_manager import (
13
+ TruncatingCloudStorageComputeLogManager,
14
+ )
12
15
  from dagster._core.storage.compute_log_manager import ComputeIOType
13
16
  from dagster._core.storage.local_compute_log_manager import (
14
17
  IO_TYPE_EXTENSION,
15
18
  LocalComputeLogManager,
16
19
  )
17
20
  from dagster._serdes import ConfigurableClass, ConfigurableClassData
18
- from dagster._utils import ensure_file
19
21
  from dagster_cloud_cli.core.errors import raise_http_error
20
22
  from dagster_cloud_cli.core.headers.auth import DagsterCloudInstanceScope
21
23
  from dagster_shared import seven
@@ -27,7 +29,7 @@ if TYPE_CHECKING:
27
29
 
28
30
 
29
31
  class CloudComputeLogManager(
30
- CloudStorageComputeLogManager["DagsterCloudAgentInstance"], ConfigurableClass
32
+ TruncatingCloudStorageComputeLogManager["DagsterCloudAgentInstance"], ConfigurableClass
31
33
  ):
32
34
  def __init__(
33
35
  self,
@@ -47,6 +49,7 @@ class CloudComputeLogManager(
47
49
  self._local_manager = LocalComputeLogManager(local_dir)
48
50
  self._upload_interval = check.opt_int_param(upload_interval, "upload_interval")
49
51
  self._inst_data = check.opt_inst_param(inst_data, "inst_data", ConfigurableClassData)
52
+ super().__init__()
50
53
 
51
54
  @property
52
55
  def inst_data(self):
@@ -88,17 +91,19 @@ class CloudComputeLogManager(
88
91
  """Returns whether the cloud storage contains logs for a given log key."""
89
92
  return False
90
93
 
91
- def upload_to_cloud_storage(
92
- self, log_key: Sequence[str], io_type: ComputeIOType, partial=False
94
+ def _upload_file_obj(
95
+ self, data: IO[bytes], log_key: Sequence[str], io_type: ComputeIOType, partial=False
93
96
  ):
94
97
  path = self.local_manager.get_captured_local_path(log_key, IO_TYPE_EXTENSION[io_type])
95
- ensure_file(path)
98
+ size_bytes = os.stat(path).st_size
96
99
  params: dict[str, Any] = {
97
100
  "log_key": log_key,
98
101
  "io_type": io_type.value,
102
+ "size_bytes": size_bytes,
99
103
  # for back-compat
100
104
  "run_id": log_key[0],
101
105
  "key": log_key[-1],
106
+ "method": "PUT",
102
107
  }
103
108
  if partial:
104
109
  params["partial"] = True
@@ -115,12 +120,11 @@ class CloudComputeLogManager(
115
120
  if resp_data.get("skip_upload"):
116
121
  return
117
122
 
118
- with open(path, "rb") as f:
119
- self._upload_session.post(
120
- resp_data["url"],
121
- data=resp_data["fields"],
122
- files={"file": f},
123
- )
123
+ self._upload_session.put(
124
+ resp_data["url"],
125
+ data=data,
126
+ timeout=self._instance.dagster_cloud_api_timeout,
127
+ )
124
128
 
125
129
  def download_from_cloud_storage(
126
130
  self, log_key: Sequence[str], io_type: ComputeIOType, partial=False
@@ -0,0 +1,3 @@
1
+ from dagster_cloud.storage.defs_state.storage import (
2
+ GraphQLDefsStateStorage as GraphQLDefsStateStorage,
3
+ )
@@ -0,0 +1,15 @@
1
+ GET_LATEST_DEFS_STATE_INFO_QUERY = """
2
+ query getLatestDefsStateInfo {
3
+ latestDefsStateInfo
4
+ }
5
+ """
6
+
7
+ SET_LATEST_VERSION_MUTATION = """
8
+ mutation setLatestDefsStateVersion($key: String!, $version: String!) {
9
+ defsState {
10
+ setLatestDefsStateVersion(key: $key, version: $version) {
11
+ ok
12
+ }
13
+ }
14
+ }
15
+ """
@@ -0,0 +1,113 @@
1
+ from pathlib import Path
2
+ from typing import TYPE_CHECKING, Any, Optional
3
+
4
+ import dagster._check as check
5
+ from dagster._core.storage.defs_state.base import DefsStateStorage
6
+ from dagster._serdes import ConfigurableClass, ConfigurableClassData
7
+ from dagster_cloud_cli.core.artifacts import download_artifact, upload_artifact
8
+ from dagster_cloud_cli.core.headers.auth import DagsterCloudInstanceScope
9
+ from dagster_shared.serdes import deserialize_value
10
+ from dagster_shared.serdes.objects.models.defs_state_info import DefsStateInfo
11
+ from typing_extensions import Self
12
+
13
+ if TYPE_CHECKING:
14
+ from dagster_cloud.instance import DagsterCloudAgentInstance # noqa: F401
15
+
16
+ GET_LATEST_DEFS_STATE_INFO_QUERY = """
17
+ query getLatestDefsStateInfo {
18
+ latestDefsStateInfo
19
+ }
20
+ """
21
+
22
+ SET_LATEST_VERSION_MUTATION = """
23
+ mutation setLatestDefsStateVersion($key: String!, $version: String!) {
24
+ defsState {
25
+ setLatestDefsStateVersion(key: $key, version: $version) {
26
+ ok
27
+ }
28
+ }
29
+ }
30
+ """
31
+
32
+
33
+ class GraphQLDefsStateStorage(DefsStateStorage["DagsterCloudAgentInstance"], ConfigurableClass):
34
+ def __init__(
35
+ self, inst_data: Optional[ConfigurableClassData] = None, override_graphql_client=None
36
+ ):
37
+ """Initialize this class directly only for test (using `override_graphql_client`).
38
+ Use the ConfigurableClass machinery to init from instance yaml.
39
+ """
40
+ self._inst_data = check.opt_inst_param(inst_data, "inst_data", ConfigurableClassData)
41
+ self._override_graphql_client = override_graphql_client
42
+
43
+ @property
44
+ def inst_data(self):
45
+ return self._inst_data
46
+
47
+ @classmethod
48
+ def config_type(cls):
49
+ return {}
50
+
51
+ @classmethod
52
+ def from_config_value(cls, inst_data: ConfigurableClassData, config_value: Any) -> Self:
53
+ return cls(inst_data=inst_data)
54
+
55
+ @property
56
+ def url(self) -> str:
57
+ return self._instance.dagster_cloud_url
58
+
59
+ @property
60
+ def api_token(self) -> str:
61
+ return check.not_none(self._instance.dagster_cloud_agent_token)
62
+
63
+ @property
64
+ def deployment(self) -> str:
65
+ return check.not_none(self._instance.deployment_name)
66
+
67
+ @property
68
+ def graphql_client(self):
69
+ return (
70
+ self._override_graphql_client
71
+ if self._override_graphql_client
72
+ else self._instance.graphql_client
73
+ )
74
+
75
+ def _execute_query(self, query, variables=None, idempotent_mutation=False):
76
+ return self.graphql_client.execute(
77
+ query, variable_values=variables, idempotent_mutation=idempotent_mutation
78
+ )
79
+
80
+ def _get_artifact_key(self, key: str, version: str) -> str:
81
+ return f"__state__/{self._sanitize_key(key)}/{version}"
82
+
83
+ def download_state_to_path(self, key: str, version: str, path: Path) -> None:
84
+ download_artifact(
85
+ url=self.url,
86
+ scope=DagsterCloudInstanceScope.DEPLOYMENT,
87
+ api_token=self.api_token,
88
+ key=self._get_artifact_key(key, version),
89
+ path=path,
90
+ deployment=self.deployment,
91
+ )
92
+
93
+ def upload_state_from_path(self, key: str, version: str, path: Path) -> None:
94
+ upload_artifact(
95
+ url=self.url,
96
+ scope=DagsterCloudInstanceScope.DEPLOYMENT,
97
+ api_token=self.api_token,
98
+ key=self._get_artifact_key(key, version),
99
+ path=path,
100
+ deployment=self.deployment,
101
+ )
102
+ self.set_latest_version(key, version)
103
+
104
+ def get_latest_defs_state_info(self) -> Optional[DefsStateInfo]:
105
+ res = self._execute_query(GET_LATEST_DEFS_STATE_INFO_QUERY)
106
+ result = res["data"]["latestDefsStateInfo"]
107
+ if result is not None:
108
+ return deserialize_value(result, DefsStateInfo)
109
+ else:
110
+ return None
111
+
112
+ def set_latest_version(self, key: str, version: str) -> None:
113
+ self._execute_query(SET_LATEST_VERSION_MUTATION, variables={"key": key, "version": version})
@@ -1 +1,3 @@
1
- from .storage import GraphQLEventLogStorage as GraphQLEventLogStorage
1
+ from dagster_cloud.storage.event_logs.storage import (
2
+ GraphQLEventLogStorage as GraphQLEventLogStorage,
3
+ )
@@ -11,6 +11,7 @@ import dagster._check as check
11
11
  from dagster import AssetCheckKey, DagsterInvalidInvocationError, PartitionsDefinition
12
12
  from dagster._core.assets import AssetDetails
13
13
  from dagster._core.definitions.events import AssetKey, ExpectationResult
14
+ from dagster._core.definitions.freshness import FreshnessStateRecord
14
15
  from dagster._core.event_api import (
15
16
  AssetRecordsFilter,
16
17
  EventLogRecord,
@@ -67,7 +68,7 @@ if TYPE_CHECKING:
67
68
  DEFAULT_RUN_SCOPED_EVENT_TAILER_OFFSET = 20000
68
69
 
69
70
 
70
- from .queries import (
71
+ from dagster_cloud.storage.event_logs.queries import (
71
72
  ADD_DYNAMIC_PARTITIONS_MUTATION,
72
73
  CHECK_CONCURRENCY_CLAIM_QUERY,
73
74
  CLAIM_CONCURRENCY_SLOT_MUTATION,
@@ -624,6 +625,7 @@ class GraphQLEventLogStorage(EventLogStorage, ConfigurableClass):
624
625
  if event.dagster_event is not None
625
626
  else "user",
626
627
  )
628
+ event_span.set_attribute("run_id", event.run_id)
627
629
  yield
628
630
 
629
631
  def store_event(self, event: EventLogEntry):
@@ -778,6 +780,11 @@ class GraphQLEventLogStorage(EventLogStorage, ConfigurableClass):
778
780
  for result in res["data"]["eventLogs"]["getAssetRecords"]
779
781
  ]
780
782
 
783
+ def get_freshness_state_records(
784
+ self, keys: Sequence[AssetKey]
785
+ ) -> Mapping[AssetKey, FreshnessStateRecord]:
786
+ raise NotImplementedError("Not callable from user cloud")
787
+
781
788
  def has_asset_key(self, asset_key: AssetKey):
782
789
  check.inst_param(asset_key, "asset_key", AssetKey)
783
790
 
@@ -950,7 +957,7 @@ class GraphQLEventLogStorage(EventLogStorage, ConfigurableClass):
950
957
  ]
951
958
 
952
959
  # Translate list to tuple
953
- return {key: tuple(val) for key, val in result.items()} # type: ignore
960
+ return {key: tuple(val) for key, val in result.items()}
954
961
 
955
962
  def get_event_tags_for_asset(
956
963
  self,
@@ -11,9 +11,7 @@ from dagster._core.events import (
11
11
  StepRetryData,
12
12
  )
13
13
  from dagster._core.events.log import EventLogEntry
14
- from dagster._utils.error import SerializableErrorInfo
15
-
16
- from dagster_cloud.util.errors import truncate_serialized_error
14
+ from dagster._utils.error import SerializableErrorInfo, truncate_serialized_error
17
15
 
18
16
 
19
17
  def _get_error_character_size_limit() -> int:
@@ -1 +1 @@
1
- from .storage import GraphQLRunStorage as GraphQLRunStorage
1
+ from dagster_cloud.storage.runs.storage import GraphQLRunStorage as GraphQLRunStorage
@@ -43,6 +43,21 @@ ADD_RUN_MUTATION = (
43
43
  """
44
44
  )
45
45
 
46
+ ADD_HISTORICAL_RUN_MUTATION = (
47
+ ERROR_FRAGMENT
48
+ + """
49
+ mutation addHistoricalRunMutation($serializedPipelineRun: String!, $runCreationTime: Float!) {
50
+ runs {
51
+ addHistoricalRun(serializedPipelineRun: $serializedPipelineRun, runCreationTime: $runCreationTime) {
52
+ ok
53
+ error {
54
+ ...errorFragment
55
+ }
56
+ }
57
+ }
58
+ }
59
+ """
60
+ )
46
61
 
47
62
  GET_RUNS_QUERY = """
48
63
  query getRunsQuery($filters: RunsFilter, $cursor: String, $limit: Int, $bucketBy: RunBucket, $ascending: Boolean) {
@@ -1,5 +1,6 @@
1
1
  import json
2
2
  from collections.abc import Iterable, Mapping, Sequence
3
+ from datetime import datetime
3
4
  from typing import Any, Callable, Optional, Union
4
5
 
5
6
  import dagster._check as check
@@ -12,7 +13,7 @@ from dagster._core.errors import (
12
13
  from dagster._core.events import DagsterEvent
13
14
  from dagster._core.execution.backfill import BulkActionsFilter, BulkActionStatus, PartitionBackfill
14
15
  from dagster._core.execution.telemetry import RunTelemetryData
15
- from dagster._core.remote_representation.origin import RemoteJobOrigin
16
+ from dagster._core.remote_origin import RemoteJobOrigin
16
17
  from dagster._core.snap import ExecutionPlanSnapshot, JobSnap, create_execution_plan_snapshot_id
17
18
  from dagster._core.storage.dagster_run import (
18
19
  DagsterRun,
@@ -35,10 +36,11 @@ from dagster._utils.merger import merge_dicts
35
36
  from dagster_cloud_cli.core.errors import DagsterCloudAgentServerError
36
37
  from typing_extensions import Self
37
38
 
38
- from .queries import (
39
+ from dagster_cloud.storage.runs.queries import (
39
40
  ADD_BACKFILL_MUTATION,
40
41
  ADD_DAEMON_HEARTBEAT_MUTATION,
41
42
  ADD_EXECUTION_PLAN_SNAPSHOT_MUTATION,
43
+ ADD_HISTORICAL_RUN_MUTATION,
42
44
  ADD_PIPELINE_SNAPSHOT_MUTATION,
43
45
  ADD_RUN_MUTATION,
44
46
  ADD_RUN_TAGS_MUTATION,
@@ -190,6 +192,29 @@ class GraphQLRunStorage(RunStorage, ConfigurableClass):
190
192
  query, variable_values=variables, idempotent_mutation=idempotent_mutation
191
193
  )
192
194
 
195
+ def add_historical_run(
196
+ self, dagster_run: DagsterRun, run_creation_time: datetime
197
+ ) -> DagsterRun:
198
+ check.inst_param(dagster_run, "dagster_run", DagsterRun)
199
+ res = self._execute_query(
200
+ ADD_HISTORICAL_RUN_MUTATION,
201
+ variables={
202
+ "serializedPipelineRun": serialize_value(dagster_run),
203
+ "runCreationTime": run_creation_time.timestamp(),
204
+ },
205
+ )
206
+ result = res["data"]["runs"]["addHistoricalRun"]
207
+ error = result.get("error")
208
+ # Special-case some errors to match the RunStorage API
209
+ if error:
210
+ if error["className"] == "DagsterRunAlreadyExists":
211
+ raise DagsterRunAlreadyExists(error["message"])
212
+ if error["className"] == "DagsterSnapshotDoesNotExist":
213
+ raise DagsterSnapshotDoesNotExist(error["message"])
214
+ else:
215
+ raise DagsterCloudAgentServerError(res)
216
+ return dagster_run
217
+
193
218
  def add_run(self, dagster_run: DagsterRun):
194
219
  check.inst_param(dagster_run, "dagster_run", DagsterRun)
195
220
  res = self._execute_query(
@@ -211,7 +236,9 @@ class GraphQLRunStorage(RunStorage, ConfigurableClass):
211
236
 
212
237
  return dagster_run
213
238
 
214
- def handle_run_event(self, run_id: str, event: DagsterEvent):
239
+ def handle_run_event(
240
+ self, run_id: str, event: DagsterEvent, update_timestamp: Optional[datetime] = None
241
+ ):
215
242
  raise NotImplementedError("Should never be called by an agent client")
216
243
 
217
244
  @property
@@ -1 +1 @@
1
- from .storage import GraphQLScheduleStorage as GraphQLScheduleStorage
1
+ from dagster_cloud.storage.schedules.storage import GraphQLScheduleStorage as GraphQLScheduleStorage
@@ -24,7 +24,7 @@ from dagster._serdes import (
24
24
  )
25
25
  from typing_extensions import Self
26
26
 
27
- from .queries import (
27
+ from dagster_cloud.storage.schedules.queries import (
28
28
  ADD_JOB_STATE_MUTATION,
29
29
  ALL_STORED_JOB_STATE_QUERY,
30
30
  CREATE_JOB_TICK_MUTATION,
@@ -1,98 +1,7 @@
1
1
  from collections.abc import Sequence
2
- from typing import Optional
3
2
 
4
- from dagster._serdes import serialize_value
5
3
  from dagster._utils.error import SerializableErrorInfo
6
4
 
7
- ERROR_CLASS_NAME_SIZE_LIMIT = 1000
8
-
9
-
10
- def unwrap_user_code_error(error_info: SerializableErrorInfo) -> SerializableErrorInfo:
11
- """Extracts the underlying error from the passed error, if it is a DagsterUserCodeLoadError."""
12
- if error_info.cls_name == "DagsterUserCodeLoadError":
13
- return unwrap_user_code_error(error_info.cause)
14
- return error_info
15
-
16
-
17
- def truncate_serialized_error(
18
- error_info: SerializableErrorInfo,
19
- field_size_limit: int,
20
- max_depth: int,
21
- truncations: Optional[list[str]] = None,
22
- ):
23
- truncations = [] if truncations is None else truncations
24
-
25
- if error_info.cause:
26
- if max_depth == 0:
27
- truncations.append("cause")
28
- new_cause = (
29
- error_info.cause
30
- if len(serialize_value(error_info.cause)) <= field_size_limit
31
- else SerializableErrorInfo(
32
- message="(Cause truncated due to size limitations)",
33
- stack=[],
34
- cls_name=None,
35
- )
36
- )
37
- else:
38
- new_cause = truncate_serialized_error(
39
- error_info.cause,
40
- field_size_limit,
41
- max_depth=max_depth - 1,
42
- truncations=truncations,
43
- )
44
- error_info = error_info._replace(cause=new_cause)
45
-
46
- if error_info.context:
47
- if max_depth == 0:
48
- truncations.append("context")
49
- new_context = (
50
- error_info.context
51
- if len(serialize_value(error_info.context)) <= field_size_limit
52
- else SerializableErrorInfo(
53
- message="(Context truncated due to size limitations)",
54
- stack=[],
55
- cls_name=None,
56
- )
57
- )
58
- else:
59
- new_context = truncate_serialized_error(
60
- error_info.context,
61
- field_size_limit,
62
- max_depth=max_depth - 1,
63
- truncations=truncations,
64
- )
65
- error_info = error_info._replace(context=new_context)
66
-
67
- stack_size_so_far = 0
68
- truncated_stack = []
69
- for stack_elem in error_info.stack:
70
- stack_size_so_far += len(stack_elem)
71
- if stack_size_so_far > field_size_limit:
72
- truncations.append("stack")
73
- truncated_stack.append("(TRUNCATED)")
74
- break
75
-
76
- truncated_stack.append(stack_elem)
77
-
78
- error_info = error_info._replace(stack=truncated_stack)
79
-
80
- msg_len = len(error_info.message)
81
- if msg_len > field_size_limit:
82
- truncations.append(f"message from {msg_len} to {field_size_limit}")
83
- error_info = error_info._replace(
84
- message=error_info.message[:field_size_limit] + " (TRUNCATED)"
85
- )
86
-
87
- if error_info.cls_name and len(error_info.cls_name) > ERROR_CLASS_NAME_SIZE_LIMIT:
88
- truncations.append("cls_name")
89
- error_info = error_info._replace(
90
- cls_name=error_info.cls_name[:ERROR_CLASS_NAME_SIZE_LIMIT] + " (TRUNCATED)"
91
- )
92
-
93
- return error_info
94
-
95
-
96
5
  DAGSTER_FRAMEWORK_SUBSTRINGS = [
97
6
  "/site-packages/dagster/",
98
7
  "/python_modules/dagster/dagster",
dagster_cloud/version.py CHANGED
@@ -1 +1 @@
1
- __version__ = "1.10.11"
1
+ __version__ = "1.12.6"
@@ -13,12 +13,12 @@ from dagster import (
13
13
  )
14
14
  from dagster._config import EvaluationError, StringSource, validate_config
15
15
 
16
- from .docker import SHARED_DOCKER_CONFIG
17
- from .ecs import (
16
+ from dagster_cloud.workspace.config_schema.docker import SHARED_DOCKER_CONFIG
17
+ from dagster_cloud.workspace.config_schema.ecs import (
18
18
  ECS_CONTAINER_CONTEXT_CONFIG as ECS_CONTAINER_CONTEXT_CONFIG,
19
19
  SHARED_ECS_CONFIG as SHARED_ECS_CONFIG,
20
20
  )
21
- from .kubernetes import SHARED_K8S_CONFIG
21
+ from dagster_cloud.workspace.config_schema.kubernetes import SHARED_K8S_CONFIG
22
22
 
23
23
 
24
24
  def validate_workspace_location(workspace_location) -> Optional[list[str]]:
@@ -62,8 +62,21 @@ def process_workspace_config(workspace_config) -> dict[str, Any]:
62
62
  python_file = config.get("python_file")
63
63
  package_name = config.get("package_name")
64
64
  module_name = config.get("module_name")
65
+ autoload_defs_module_name = config.get("autoload_defs_module_name")
65
66
  check.invariant(
66
- len([val for val in [python_file, package_name, module_name] if val]) == 1,
67
+ len(
68
+ [
69
+ val
70
+ for val in [
71
+ python_file,
72
+ package_name,
73
+ module_name,
74
+ autoload_defs_module_name,
75
+ ]
76
+ if val
77
+ ]
78
+ )
79
+ == 1,
67
80
  "Must supply exactly one of a file name, a package name, or a module name",
68
81
  )
69
82
 
@@ -73,7 +86,8 @@ def process_workspace_config(workspace_config) -> dict[str, Any]:
73
86
  new_location = {
74
87
  k: v
75
88
  for k, v in location.items()
76
- if k not in ("python_file", "package_name", "module_name")
89
+ if k
90
+ not in ("python_file", "package_name", "module_name", "autoload_defs_module_name")
77
91
  }
78
92
  new_location["code_source"] = {}
79
93
  if "python_file" in location:
@@ -82,6 +96,10 @@ def process_workspace_config(workspace_config) -> dict[str, Any]:
82
96
  new_location["code_source"]["package_name"] = location["package_name"]
83
97
  elif "module_name" in location:
84
98
  new_location["code_source"]["module_name"] = location["module_name"]
99
+ elif "autoload_defs_module_name" in location:
100
+ new_location["code_source"]["autoload_defs_module_name"] = location[
101
+ "autoload_defs_module_name"
102
+ ]
85
103
 
86
104
  new_location["location_name"] = name
87
105
  updated_locations.append(new_location)
@@ -194,6 +212,10 @@ CONFIG_SCHEMA_FIELDS = {
194
212
  config=str,
195
213
  description="Python module containing the target Dagster repository.",
196
214
  ),
215
+ "autoload_defs_module_name": Field(
216
+ config=str,
217
+ description="Python module to automatically load Dagster definitions from.",
218
+ ),
197
219
  },
198
220
  ),
199
221
  description="Python entry point for the code location.",
@@ -254,6 +276,17 @@ CONFIG_SCHEMA_FIELDS = {
254
276
  is_required=False,
255
277
  description="Locations that specify an agent queue will only have their requests handled by agents configured to read from a matching queue. By default, requests are placed on a default queue that's handled by all agents.",
256
278
  ),
279
+ "defs_state_info": Field(
280
+ config=Shape(
281
+ fields={
282
+ "info_mapping": Map(
283
+ str, Noneable(Shape(fields={"version": str, "create_timestamp": float}))
284
+ )
285
+ },
286
+ ),
287
+ is_required=False,
288
+ description="Defs state info for the code location.",
289
+ ),
257
290
  }
258
291
 
259
292
 
@@ -282,6 +315,11 @@ LEGACY_CONFIG_SCHEMA_FIELDS = {
282
315
  is_required=False,
283
316
  description="Python module containing the target Dagster repository.",
284
317
  ),
318
+ "autoload_defs_module_name": Field(
319
+ config=str,
320
+ is_required=False,
321
+ description="Python module to automatically load Dagster definitions from.",
322
+ ),
285
323
  }
286
324
  LEGACY_LOCATION_CONFIG_SCHEMA = Shape(fields=LEGACY_CONFIG_SCHEMA_FIELDS)
287
325
  LEGACY_NAMED_LOCATIONS_CONFIG_SCHEMA = Map(
@@ -28,19 +28,20 @@ from typing_extensions import Self
28
28
  from dagster_cloud.api.dagster_cloud_api import UserCodeDeploymentType
29
29
  from dagster_cloud.execution.monitoring import CloudContainerResourceLimits
30
30
  from dagster_cloud.storage.tags import PEX_METADATA_TAG
31
- from dagster_cloud.workspace.user_code_launcher.user_code_launcher import UserCodeLauncherEntry
32
- from dagster_cloud.workspace.user_code_launcher.utils import get_grpc_server_env
33
-
34
- from ..config_schema.docker import SHARED_DOCKER_CONFIG
35
- from ..user_code_launcher import (
31
+ from dagster_cloud.workspace.config_schema.docker import SHARED_DOCKER_CONFIG
32
+ from dagster_cloud.workspace.docker.utils import unique_docker_resource_name
33
+ from dagster_cloud.workspace.user_code_launcher import (
36
34
  DEFAULT_SERVER_PROCESS_STARTUP_TIMEOUT,
37
35
  SHARED_USER_CODE_LAUNCHER_CONFIG,
38
36
  DagsterCloudGrpcServer,
39
37
  DagsterCloudUserCodeLauncher,
40
38
  ServerEndpoint,
41
39
  )
42
- from ..user_code_launcher.utils import deterministic_label_for_location
43
- from .utils import unique_docker_resource_name
40
+ from dagster_cloud.workspace.user_code_launcher.user_code_launcher import UserCodeLauncherEntry
41
+ from dagster_cloud.workspace.user_code_launcher.utils import (
42
+ deterministic_label_for_location,
43
+ get_grpc_server_env,
44
+ )
44
45
 
45
46
  GRPC_SERVER_LABEL = "dagster_grpc_server"
46
47
  MULTIPEX_SERVER_LABEL = "dagster_multipex_server"
@@ -1,6 +1,6 @@
1
1
  import re
2
2
 
3
- from ..user_code_launcher.utils import unique_resource_name
3
+ from dagster_cloud.workspace.user_code_launcher.utils import unique_resource_name
4
4
 
5
5
 
6
6
  def unique_docker_resource_name(deployment_name, location_name):
@@ -1 +1 @@
1
- from .launcher import EcsUserCodeLauncher as EcsUserCodeLauncher
1
+ from dagster_cloud.workspace.ecs.launcher import EcsUserCodeLauncher as EcsUserCodeLauncher