dagster-cloud 1.11.13__py3-none-any.whl → 1.11.15__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dagster_cloud/agent/dagster_cloud_agent.py +24 -0
- dagster_cloud/dagster_insights/metrics_utils.py +1 -1
- dagster_cloud/instance/__init__.py +3 -3
- dagster_cloud/opentelemetry/controller.py +1 -1
- dagster_cloud/pex/grpc/server/server.py +3 -1
- dagster_cloud/storage/defs_state/storage.py +2 -2
- dagster_cloud/storage/event_logs/storage.py +1 -1
- dagster_cloud/version.py +1 -1
- dagster_cloud/workspace/config_schema/__init__.py +7 -1
- dagster_cloud/workspace/ecs/client.py +12 -12
- dagster_cloud/workspace/user_code_launcher/user_code_launcher.py +28 -11
- {dagster_cloud-1.11.13.dist-info → dagster_cloud-1.11.15.dist-info}/METADATA +8 -8
- {dagster_cloud-1.11.13.dist-info → dagster_cloud-1.11.15.dist-info}/RECORD +15 -15
- {dagster_cloud-1.11.13.dist-info → dagster_cloud-1.11.15.dist-info}/WHEEL +0 -0
- {dagster_cloud-1.11.13.dist-info → dagster_cloud-1.11.15.dist-info}/top_level.txt +0 -0
|
@@ -163,6 +163,8 @@ class DagsterCloudAgent:
|
|
|
163
163
|
|
|
164
164
|
self._last_liveness_check_time = None
|
|
165
165
|
|
|
166
|
+
self._warned_about_long_in_progress_reconcile = False
|
|
167
|
+
|
|
166
168
|
def __enter__(self):
|
|
167
169
|
return self
|
|
168
170
|
|
|
@@ -366,6 +368,8 @@ class DagsterCloudAgent:
|
|
|
366
368
|
except Exception:
|
|
367
369
|
self._logger.exception("Failed to add heartbeat")
|
|
368
370
|
|
|
371
|
+
self._check_for_long_running_reconcile(user_code_launcher)
|
|
372
|
+
|
|
369
373
|
# Check for any received interrupts
|
|
370
374
|
with raise_interrupts_as(KeyboardInterrupt):
|
|
371
375
|
pass
|
|
@@ -408,6 +412,26 @@ class DagsterCloudAgent:
|
|
|
408
412
|
self._logger.error(f"Failed to write liveness sentinel and disabling it: {e}")
|
|
409
413
|
self._last_liveness_check_time = False
|
|
410
414
|
|
|
415
|
+
def _check_for_long_running_reconcile(self, user_code_launcher):
|
|
416
|
+
"""Detect from the main thread if the background reconcile thread is running behind or has gotten stuck."""
|
|
417
|
+
in_progress_reconcile_start_time = user_code_launcher.in_progress_reconcile_start_time
|
|
418
|
+
|
|
419
|
+
reconcile_start_time_warning = int(
|
|
420
|
+
os.getenv("DAGSTER_CLOUD_AGENT_RECONCILE_START_TIME_WARNING", "3600")
|
|
421
|
+
)
|
|
422
|
+
|
|
423
|
+
if (
|
|
424
|
+
in_progress_reconcile_start_time is not None
|
|
425
|
+
and (time.time() - in_progress_reconcile_start_time) >= reconcile_start_time_warning
|
|
426
|
+
):
|
|
427
|
+
if not self._warned_about_long_in_progress_reconcile:
|
|
428
|
+
self._logger.warning(
|
|
429
|
+
f"Agent has been redeploying code servers for more than {reconcile_start_time_warning} seconds. This may indicate the background thread that performs the redeploys is stuck."
|
|
430
|
+
)
|
|
431
|
+
self._warned_about_long_in_progress_reconcile = True
|
|
432
|
+
else:
|
|
433
|
+
self._warned_about_long_in_progress_reconcile = False
|
|
434
|
+
|
|
411
435
|
def _check_update_workspace(self, user_code_launcher, upload_all):
|
|
412
436
|
curr_time = get_current_datetime()
|
|
413
437
|
|
|
@@ -28,7 +28,7 @@ def get_url_and_token_from_instance(instance: DagsterInstance) -> tuple[str, str
|
|
|
28
28
|
if not isinstance(instance, DagsterCloudAgentInstance):
|
|
29
29
|
raise RuntimeError("This asset only functions in a running Dagster Cloud instance")
|
|
30
30
|
|
|
31
|
-
return f"{instance.dagit_url}graphql", instance.dagster_cloud_agent_token
|
|
31
|
+
return f"{instance.dagit_url}graphql", instance.dagster_cloud_agent_token
|
|
32
32
|
|
|
33
33
|
|
|
34
34
|
def get_insights_upload_request_params(
|
|
@@ -202,7 +202,7 @@ class DagsterCloudAgentInstance(DagsterCloudInstance):
|
|
|
202
202
|
return create_agent_graphql_client(
|
|
203
203
|
self.client_managed_retries_requests_session,
|
|
204
204
|
self.dagster_cloud_graphql_url,
|
|
205
|
-
self._dagster_cloud_api_config_for_deployment(None),
|
|
205
|
+
self._dagster_cloud_api_config_for_deployment(None),
|
|
206
206
|
scope=DagsterCloudInstanceScope.ORGANIZATION,
|
|
207
207
|
)
|
|
208
208
|
|
|
@@ -210,13 +210,13 @@ class DagsterCloudAgentInstance(DagsterCloudInstance):
|
|
|
210
210
|
return create_agent_graphql_client(
|
|
211
211
|
self.client_managed_retries_requests_session,
|
|
212
212
|
self.dagster_cloud_graphql_url,
|
|
213
|
-
self._dagster_cloud_api_config_for_deployment(deployment_name),
|
|
213
|
+
self._dagster_cloud_api_config_for_deployment(deployment_name),
|
|
214
214
|
scope=DagsterCloudInstanceScope.DEPLOYMENT,
|
|
215
215
|
)
|
|
216
216
|
|
|
217
217
|
def headers_for_deployment(self, deployment_name: str):
|
|
218
218
|
return get_agent_headers(
|
|
219
|
-
self._dagster_cloud_api_config_for_deployment(deployment_name),
|
|
219
|
+
self._dagster_cloud_api_config_for_deployment(deployment_name),
|
|
220
220
|
DagsterCloudInstanceScope.DEPLOYMENT,
|
|
221
221
|
)
|
|
222
222
|
|
|
@@ -251,7 +251,7 @@ class OpenTelemetryController:
|
|
|
251
251
|
)
|
|
252
252
|
self._logging_handler = logs_factory.build_logging_handler(
|
|
253
253
|
self._logger_provider, # pyright: ignore[reportArgumentType]
|
|
254
|
-
logging_config.get("handler", {}),
|
|
254
|
+
logging_config.get("handler", {}),
|
|
255
255
|
)
|
|
256
256
|
self._logger.addHandler(self._logging_handler)
|
|
257
257
|
|
|
@@ -235,7 +235,9 @@ class DagsterPexProxyApiServer(DagsterApiServicer):
|
|
|
235
235
|
return self._query("ExternalPipelineSubsetSnapshot", request, context)
|
|
236
236
|
|
|
237
237
|
def ExternalRepository(self, request, context):
|
|
238
|
-
return self._query(
|
|
238
|
+
return self._query(
|
|
239
|
+
"ExternalRepository", request, context, timeout=DEFAULT_REPOSITORY_GRPC_TIMEOUT
|
|
240
|
+
)
|
|
239
241
|
|
|
240
242
|
def ExternalJob(self, request, context):
|
|
241
243
|
return self._query("ExternalJob", request, context)
|
|
@@ -69,7 +69,7 @@ class GraphQLDefsStateStorage(DefsStateStorage["DagsterCloudAgentInstance"], Con
|
|
|
69
69
|
return (
|
|
70
70
|
self._override_graphql_client
|
|
71
71
|
if self._override_graphql_client
|
|
72
|
-
else self._instance.graphql_client
|
|
72
|
+
else self._instance.graphql_client
|
|
73
73
|
)
|
|
74
74
|
|
|
75
75
|
def _execute_query(self, query, variables=None, idempotent_mutation=False):
|
|
@@ -78,7 +78,7 @@ class GraphQLDefsStateStorage(DefsStateStorage["DagsterCloudAgentInstance"], Con
|
|
|
78
78
|
)
|
|
79
79
|
|
|
80
80
|
def _get_artifact_key(self, key: str, version: str) -> str:
|
|
81
|
-
return f"__state__/{key}/{version}"
|
|
81
|
+
return f"__state__/{self._sanitize_key(key)}/{version}"
|
|
82
82
|
|
|
83
83
|
def download_state_to_path(self, key: str, version: str, path: Path) -> None:
|
|
84
84
|
download_artifact(
|
|
@@ -957,7 +957,7 @@ class GraphQLEventLogStorage(EventLogStorage, ConfigurableClass):
|
|
|
957
957
|
]
|
|
958
958
|
|
|
959
959
|
# Translate list to tuple
|
|
960
|
-
return {key: tuple(val) for key, val in result.items()}
|
|
960
|
+
return {key: tuple(val) for key, val in result.items()}
|
|
961
961
|
|
|
962
962
|
def get_event_tags_for_asset(
|
|
963
963
|
self,
|
dagster_cloud/version.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = "1.11.
|
|
1
|
+
__version__ = "1.11.15"
|
|
@@ -277,7 +277,13 @@ CONFIG_SCHEMA_FIELDS = {
|
|
|
277
277
|
description="Locations that specify an agent queue will only have their requests handled by agents configured to read from a matching queue. By default, requests are placed on a default queue that's handled by all agents.",
|
|
278
278
|
),
|
|
279
279
|
"defs_state_info": Field(
|
|
280
|
-
config=
|
|
280
|
+
config=Shape(
|
|
281
|
+
fields={
|
|
282
|
+
"info_mapping": Map(
|
|
283
|
+
str, Noneable(Shape(fields={"version": str, "create_timestamp": float}))
|
|
284
|
+
)
|
|
285
|
+
},
|
|
286
|
+
),
|
|
281
287
|
is_required=False,
|
|
282
288
|
description="Defs state info for the code location.",
|
|
283
289
|
),
|
|
@@ -112,7 +112,7 @@ class Client:
|
|
|
112
112
|
name="serviceLongArnFormat",
|
|
113
113
|
effectiveSettings=True,
|
|
114
114
|
)
|
|
115
|
-
return settings["settings"][0]["value"] == "enabled"
|
|
115
|
+
return settings["settings"][0]["value"] == "enabled"
|
|
116
116
|
|
|
117
117
|
@property
|
|
118
118
|
@cached_method
|
|
@@ -160,7 +160,7 @@ class Client:
|
|
|
160
160
|
):
|
|
161
161
|
task_definition_arn = (
|
|
162
162
|
self.ecs.register_task_definition(
|
|
163
|
-
**desired_task_definition_config.task_definition_dict()
|
|
163
|
+
**desired_task_definition_config.task_definition_dict()
|
|
164
164
|
)
|
|
165
165
|
.get("taskDefinition")
|
|
166
166
|
.get("taskDefinitionArn")
|
|
@@ -466,10 +466,10 @@ class Client:
|
|
|
466
466
|
|
|
467
467
|
task_arn = (
|
|
468
468
|
self.ecs.run_task(
|
|
469
|
-
taskDefinition=task_definition_arn,
|
|
469
|
+
taskDefinition=task_definition_arn,
|
|
470
470
|
cluster=self.cluster_name,
|
|
471
|
-
launchType=self.launch_type,
|
|
472
|
-
networkConfiguration=self.network_configuration,
|
|
471
|
+
launchType=self.launch_type,
|
|
472
|
+
networkConfiguration=self.network_configuration,
|
|
473
473
|
)
|
|
474
474
|
.get("tasks", [{}])[0]
|
|
475
475
|
.get("taskArn")
|
|
@@ -477,14 +477,14 @@ class Client:
|
|
|
477
477
|
|
|
478
478
|
self.ecs.get_waiter("tasks_stopped").wait(
|
|
479
479
|
cluster=self.cluster_name,
|
|
480
|
-
tasks=[task_arn],
|
|
480
|
+
tasks=[task_arn],
|
|
481
481
|
WaiterConfig={"Delay": 1, "MaxAttempts": self.timeout},
|
|
482
482
|
)
|
|
483
483
|
|
|
484
484
|
exit_code = (
|
|
485
485
|
self.ecs.describe_tasks(
|
|
486
486
|
cluster=self.cluster_name,
|
|
487
|
-
tasks=[task_arn],
|
|
487
|
+
tasks=[task_arn],
|
|
488
488
|
)
|
|
489
489
|
.get("tasks", [{}])[0]
|
|
490
490
|
.get("containers", [{}])[0]
|
|
@@ -546,7 +546,7 @@ class Client:
|
|
|
546
546
|
for key, value in tags.items()
|
|
547
547
|
]
|
|
548
548
|
|
|
549
|
-
arn = self.ecs.create_service(**params).get("service").get("serviceArn")
|
|
549
|
+
arn = self.ecs.create_service(**params).get("service").get("serviceArn")
|
|
550
550
|
|
|
551
551
|
return Service(client=self, arn=arn)
|
|
552
552
|
|
|
@@ -721,7 +721,7 @@ class Client:
|
|
|
721
721
|
|
|
722
722
|
stopped_tasks = sorted(
|
|
723
723
|
stopped_tasks,
|
|
724
|
-
key=lambda task: task["createdAt"].timestamp(),
|
|
724
|
+
key=lambda task: task["createdAt"].timestamp(),
|
|
725
725
|
reverse=True,
|
|
726
726
|
)
|
|
727
727
|
return stopped_tasks
|
|
@@ -785,14 +785,14 @@ class Client:
|
|
|
785
785
|
task = self.ecs.describe_tasks(cluster=self.cluster_name, tasks=[task_arn]).get("tasks")[0]
|
|
786
786
|
|
|
787
787
|
task_definition_arn = task.get("taskDefinitionArn")
|
|
788
|
-
task_definition = self.ecs.describe_task_definition(taskDefinition=task_definition_arn).get(
|
|
788
|
+
task_definition = self.ecs.describe_task_definition(taskDefinition=task_definition_arn).get(
|
|
789
789
|
"taskDefinition"
|
|
790
790
|
)
|
|
791
791
|
|
|
792
792
|
matching_container_definitions = [
|
|
793
793
|
container_definition
|
|
794
794
|
for container_definition in task_definition.get("containerDefinitions", [])
|
|
795
|
-
if container_definition["name"] == container_name
|
|
795
|
+
if container_definition["name"] == container_name
|
|
796
796
|
]
|
|
797
797
|
if not matching_container_definitions:
|
|
798
798
|
raise Exception(f"Could not find container with name {container_name}")
|
|
@@ -800,7 +800,7 @@ class Client:
|
|
|
800
800
|
container_definition = matching_container_definitions[0]
|
|
801
801
|
|
|
802
802
|
log_stream_prefix = (
|
|
803
|
-
container_definition.get("logConfiguration").get("options").get("awslogs-stream-prefix")
|
|
803
|
+
container_definition.get("logConfiguration").get("options").get("awslogs-stream-prefix")
|
|
804
804
|
)
|
|
805
805
|
container_name = container_definition.get("name")
|
|
806
806
|
task_id = task_arn.split("/")[-1]
|
|
@@ -401,6 +401,7 @@ class DagsterCloudUserCodeLauncher(
|
|
|
401
401
|
self._run_worker_statuses_dict: dict[str, list[CloudRunWorkerStatus]] = {}
|
|
402
402
|
self._run_worker_monitoring_lock = threading.Lock()
|
|
403
403
|
|
|
404
|
+
self._in_progress_reconcile_start_time = time.time()
|
|
404
405
|
self._reconcile_count = 0
|
|
405
406
|
self._reconcile_grpc_metadata_shutdown_event = threading.Event()
|
|
406
407
|
self._reconcile_grpc_metadata_thread = None
|
|
@@ -888,23 +889,32 @@ class DagsterCloudUserCodeLauncher(
|
|
|
888
889
|
repository_name,
|
|
889
890
|
code_pointer,
|
|
890
891
|
) in list_repositories_response.repository_code_pointer_dict.items():
|
|
891
|
-
|
|
892
|
-
|
|
893
|
-
|
|
892
|
+
if os.getenv("DAGSTER_CLOUD_USE_STREAMING_EXTERNAL_REPOSITORY"):
|
|
893
|
+
external_repository_chunks = [
|
|
894
|
+
chunk
|
|
895
|
+
async for chunk in client.gen_streaming_external_repository(
|
|
896
|
+
remote_repository_origin=RemoteRepositoryOrigin(
|
|
897
|
+
location_origin,
|
|
898
|
+
repository_name,
|
|
899
|
+
),
|
|
900
|
+
defer_snapshots=True,
|
|
901
|
+
)
|
|
902
|
+
]
|
|
903
|
+
|
|
904
|
+
serialized_repository_data = "".join(
|
|
905
|
+
[
|
|
906
|
+
chunk["serialized_external_repository_chunk"]
|
|
907
|
+
for chunk in external_repository_chunks
|
|
908
|
+
]
|
|
909
|
+
)
|
|
910
|
+
else:
|
|
911
|
+
serialized_repository_data = await client.gen_external_repository(
|
|
894
912
|
remote_repository_origin=RemoteRepositoryOrigin(
|
|
895
913
|
location_origin,
|
|
896
914
|
repository_name,
|
|
897
915
|
),
|
|
898
916
|
defer_snapshots=True,
|
|
899
917
|
)
|
|
900
|
-
]
|
|
901
|
-
|
|
902
|
-
serialized_repository_data = "".join(
|
|
903
|
-
[
|
|
904
|
-
chunk["serialized_external_repository_chunk"]
|
|
905
|
-
for chunk in external_repository_chunks
|
|
906
|
-
]
|
|
907
|
-
)
|
|
908
918
|
|
|
909
919
|
# Don't deserialize in case there are breaking changes - let the server do it
|
|
910
920
|
upload_repo_datas.append(
|
|
@@ -1480,6 +1490,8 @@ class DagsterCloudUserCodeLauncher(
|
|
|
1480
1490
|
self._logger.exception("Failed to refresh actual entries.")
|
|
1481
1491
|
self._last_refreshed_actual_entries = now
|
|
1482
1492
|
|
|
1493
|
+
self._in_progress_reconcile_start_time = time.time()
|
|
1494
|
+
|
|
1483
1495
|
self._reconcile(
|
|
1484
1496
|
desired_entries,
|
|
1485
1497
|
upload_locations,
|
|
@@ -1496,6 +1508,7 @@ class DagsterCloudUserCodeLauncher(
|
|
|
1496
1508
|
f"Started polling for requests from {self._instance.dagster_cloud_url}"
|
|
1497
1509
|
)
|
|
1498
1510
|
|
|
1511
|
+
self._in_progress_reconcile_start_time = None
|
|
1499
1512
|
self._reconcile_count += 1
|
|
1500
1513
|
|
|
1501
1514
|
def _update_metrics_thread(self, shutdown_event):
|
|
@@ -1520,6 +1533,10 @@ class DagsterCloudUserCodeLauncher(
|
|
|
1520
1533
|
# thread-safe since reconcile_count is an integer
|
|
1521
1534
|
return self._reconcile_count > 0
|
|
1522
1535
|
|
|
1536
|
+
@property
|
|
1537
|
+
def in_progress_reconcile_start_time(self) -> Optional[float]:
|
|
1538
|
+
return self._in_progress_reconcile_start_time
|
|
1539
|
+
|
|
1523
1540
|
def _make_check_on_running_server_endpoint(
|
|
1524
1541
|
self, server_endpoint: ServerEndpoint
|
|
1525
1542
|
) -> Callable[[], Union[ListRepositoriesResponse, SerializableErrorInfo]]:
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: dagster-cloud
|
|
3
|
-
Version: 1.11.
|
|
3
|
+
Version: 1.11.15
|
|
4
4
|
Author-email: Elementl <support@elementl.com>
|
|
5
5
|
License: Apache-2.0
|
|
6
6
|
Project-URL: Homepage, https://dagster.io/cloud
|
|
@@ -28,14 +28,14 @@ Classifier: Topic :: Software Development :: Libraries :: Application Frameworks
|
|
|
28
28
|
Classifier: Operating System :: OS Independent
|
|
29
29
|
Requires-Python: <3.14,>=3.9
|
|
30
30
|
Description-Content-Type: text/markdown
|
|
31
|
-
Requires-Dist: dagster==1.11.
|
|
32
|
-
Requires-Dist: dagster-shared==1.11.
|
|
33
|
-
Requires-Dist: dagster-cloud-cli==1.11.
|
|
31
|
+
Requires-Dist: dagster==1.11.15
|
|
32
|
+
Requires-Dist: dagster-shared==1.11.15
|
|
33
|
+
Requires-Dist: dagster-cloud-cli==1.11.15
|
|
34
34
|
Requires-Dist: opentelemetry-api<2,>=1.27.0
|
|
35
35
|
Requires-Dist: opentelemetry-sdk<2,>=1.27.0
|
|
36
36
|
Requires-Dist: opentelemetry-exporter-otlp-proto-grpc<2,>=1.27.0
|
|
37
37
|
Requires-Dist: opentelemetry-exporter-otlp-proto-http<2,>=1.27.0
|
|
38
|
-
Requires-Dist: pex<
|
|
38
|
+
Requires-Dist: pex<2.60.0,>=2.1.132
|
|
39
39
|
Requires-Dist: questionary
|
|
40
40
|
Requires-Dist: requests
|
|
41
41
|
Requires-Dist: typer
|
|
@@ -65,12 +65,12 @@ Provides-Extra: insights
|
|
|
65
65
|
Requires-Dist: pyarrow; extra == "insights"
|
|
66
66
|
Provides-Extra: docker
|
|
67
67
|
Requires-Dist: docker; extra == "docker"
|
|
68
|
-
Requires-Dist: dagster-docker==0.27.
|
|
68
|
+
Requires-Dist: dagster-docker==0.27.15; extra == "docker"
|
|
69
69
|
Provides-Extra: kubernetes
|
|
70
70
|
Requires-Dist: kubernetes; extra == "kubernetes"
|
|
71
|
-
Requires-Dist: dagster-k8s==0.27.
|
|
71
|
+
Requires-Dist: dagster-k8s==0.27.15; extra == "kubernetes"
|
|
72
72
|
Provides-Extra: ecs
|
|
73
|
-
Requires-Dist: dagster-aws==0.27.
|
|
73
|
+
Requires-Dist: dagster-aws==0.27.15; extra == "ecs"
|
|
74
74
|
Requires-Dist: boto3; extra == "ecs"
|
|
75
75
|
Provides-Extra: sandbox
|
|
76
76
|
Requires-Dist: supervisor; extra == "sandbox"
|
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
dagster_cloud/__init__.py,sha256=zyM9bqyJFxtEClv_5X4VRldrj0UniKgZzEl0pPJJ_Ts,355
|
|
2
2
|
dagster_cloud/constants.py,sha256=CPAqXJ99SWGMviksdIA2A9894FEvHChNk8UcP4TluYM,455
|
|
3
3
|
dagster_cloud/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
4
|
-
dagster_cloud/version.py,sha256=
|
|
4
|
+
dagster_cloud/version.py,sha256=PZwJq8ftyzpAjWU6gB9pCD8ArXJnA3I-ezd7LUG_rZk,24
|
|
5
5
|
dagster_cloud/agent/__init__.py,sha256=_erVyIrxuHUiyNerwX8vNZcKZN8NAloTEkPq8vPZ3MI,811
|
|
6
|
-
dagster_cloud/agent/dagster_cloud_agent.py,sha256=
|
|
6
|
+
dagster_cloud/agent/dagster_cloud_agent.py,sha256=bNMPSJBURWYbwAPhTbYq34o5qxPrqVWI2JA9b6vKHiw,58341
|
|
7
7
|
dagster_cloud/agent/queries.py,sha256=iI84GQ1Zxt5ryo6M1ELIaIae-gwUY14QPPMUeiFK97o,1837
|
|
8
8
|
dagster_cloud/agent/cli/__init__.py,sha256=rGbeQJ2Ap95wPYQuk5XbyHAfP9cs-XPUSmuVM_k278k,9084
|
|
9
9
|
dagster_cloud/agent/instrumentation/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -25,7 +25,7 @@ dagster_cloud/batching/batcher.py,sha256=kt9F-WdMnCJstCJFuHqagG_eHFTWjlfRQdhE-Cc
|
|
|
25
25
|
dagster_cloud/dagster_insights/__init__.py,sha256=s8PrSxja4JgQtXz3tuzqPH3oqhm5OMo-97qDBpeQ6LA,2079
|
|
26
26
|
dagster_cloud/dagster_insights/errors.py,sha256=_GrycqbxxqHSnxKveff-sJz9L-UHi4iRg-U_rGAEss8,138
|
|
27
27
|
dagster_cloud/dagster_insights/insights_utils.py,sha256=B7VwG2Gx8mpkcbkNJ1YaOypgeCaA3d3Kfc0Eves-OFg,4658
|
|
28
|
-
dagster_cloud/dagster_insights/metrics_utils.py,sha256=
|
|
28
|
+
dagster_cloud/dagster_insights/metrics_utils.py,sha256=woZddMZIj2YRjALFFSpOPa6weWvwtzmjl3rHlXs6fbQ,3498
|
|
29
29
|
dagster_cloud/dagster_insights/query.py,sha256=BB-JgzCZl-lLHPP8irF3p6X6hkLoni2HpBS4iviibqU,1403
|
|
30
30
|
dagster_cloud/dagster_insights/bigquery/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
31
31
|
dagster_cloud/dagster_insights/bigquery/bigquery_utils.py,sha256=hcE0yyU_tFBYVMFs3_7VgDAfu6sIYhD0qmEzWVodQ14,824
|
|
@@ -46,14 +46,14 @@ dagster_cloud/execution/cloud_run_launcher/process.py,sha256=eYxvzcaJbqYKjHcXMUU
|
|
|
46
46
|
dagster_cloud/execution/monitoring/__init__.py,sha256=2_o2UjeEPvfSpnOb0zky3ZQs-G9VHABAXzjvfMnAh8A,17782
|
|
47
47
|
dagster_cloud/execution/utils/__init__.py,sha256=EfU-tK5-ScZYLF_zmWqUgIrmLsb9DDHWFN-CSg3BSXY,254
|
|
48
48
|
dagster_cloud/execution/utils/process.py,sha256=Er6FKMdlGBjpOHDEIkukZf4-TiZ3Xi_iqurfxkOozGM,911
|
|
49
|
-
dagster_cloud/instance/__init__.py,sha256=
|
|
49
|
+
dagster_cloud/instance/__init__.py,sha256=ecxGScVMeqQJqegEUOzMA-LRkovHSB5LgkL4zcFyz8c,26135
|
|
50
50
|
dagster_cloud/instrumentation/__init__.py,sha256=OKaT4qkwQNmCFdKveVrgB7JIo5sZD1F1rutQKdozK_4,886
|
|
51
51
|
dagster_cloud/metadata/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
52
52
|
dagster_cloud/metadata/source_code.py,sha256=yCUfD6V-wR2s4AUrmVpX_Dd7Gln7SYbtpuEpAH71J1I,5952
|
|
53
53
|
dagster_cloud/metrics/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
54
54
|
dagster_cloud/metrics/tracer.py,sha256=DtVuEUp6LI2p8LRyMcuNBn4dChs8vGT9swSCtuYHgIw,1518
|
|
55
55
|
dagster_cloud/opentelemetry/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
56
|
-
dagster_cloud/opentelemetry/controller.py,sha256=
|
|
56
|
+
dagster_cloud/opentelemetry/controller.py,sha256=48CvdcljFZ0ZEtDlzMtRfF1fOzVYsSMr08w9dw5je08,13030
|
|
57
57
|
dagster_cloud/opentelemetry/enum.py,sha256=ls6J0fde00WP9vYdFNUOQijbTBGsb7kRD0lar6ImtLk,1686
|
|
58
58
|
dagster_cloud/opentelemetry/config/__init__.py,sha256=Aegfue6NedLV64yCAKQtV3uDjJJLjWKeg53F7SQ46zs,2868
|
|
59
59
|
dagster_cloud/opentelemetry/config/exporter.py,sha256=zznpS4PaBJ5RrvJiRZH-DBi0k0tMK7_xsUbrmbpsyxM,3551
|
|
@@ -81,7 +81,7 @@ dagster_cloud/pex/grpc/__generated__/multi_pex_api_pb2_grpc.py,sha256=cCwuUnJ1UH
|
|
|
81
81
|
dagster_cloud/pex/grpc/server/__init__.py,sha256=OKZqM9NdiRBUOUtWRdWG1_kaS13ELJJ5yHTHUc375ns,177
|
|
82
82
|
dagster_cloud/pex/grpc/server/manager.py,sha256=I1SJXoWKHNmgFPXr7YKtk-UkXOqvKjqxMHiNWccHRog,18702
|
|
83
83
|
dagster_cloud/pex/grpc/server/registry.py,sha256=PzkFlzcPA9HEFlkMFNX68lDj7Eh3m_P4cF3gNyabN5s,12899
|
|
84
|
-
dagster_cloud/pex/grpc/server/server.py,sha256=
|
|
84
|
+
dagster_cloud/pex/grpc/server/server.py,sha256=wdpCFDKQbV7C-M9l5PryQjwtGZphdaG7hO1a3GVXqbQ,16441
|
|
85
85
|
dagster_cloud/pex/grpc/server/cli/__init__.py,sha256=Se6sC8sSWlkgO_RdD4KPO-0xNua3IwXIeM9b_u10Hlo,2523
|
|
86
86
|
dagster_cloud/secrets/__init__.py,sha256=usa0GjXl_T1p8oRJfFcAChLohJGPt0YaR2PJbhd4vx4,96
|
|
87
87
|
dagster_cloud/secrets/loader.py,sha256=gr5m3Ih7fsg8DBSKCmtHCUCpgzNxBOera6B3ONLYKWo,1661
|
|
@@ -94,10 +94,10 @@ dagster_cloud/storage/compute_logs/__init__.py,sha256=9TWHkWqOmqEQz9HM-brZwYOMtD
|
|
|
94
94
|
dagster_cloud/storage/compute_logs/compute_log_manager.py,sha256=OBNuGj0PECWmnAfbayFtRixztOcqaK-IGmgOD0AokfY,4857
|
|
95
95
|
dagster_cloud/storage/defs_state/__init__.py,sha256=eGmQiGjhAYl9FWsO1YcGcf8k1EpZi0EgtWPWWNH3VGE,113
|
|
96
96
|
dagster_cloud/storage/defs_state/queries.py,sha256=UICxqqWH1gCpE_YR7TedxanES2gZ_7dR00y6KR8HWQA,365
|
|
97
|
-
dagster_cloud/storage/defs_state/storage.py,sha256=
|
|
97
|
+
dagster_cloud/storage/defs_state/storage.py,sha256=AwuEUuMZQrlmOPZZXVdW73M8KnTdkK4G3ViCfL9iLDM,3979
|
|
98
98
|
dagster_cloud/storage/event_logs/__init__.py,sha256=kZotTbwh4KGH6Oe7PFXj1PMg9ji85hOOX4NcYHK6JJY,111
|
|
99
99
|
dagster_cloud/storage/event_logs/queries.py,sha256=sgkHhHX-0Vr9WukcRjbuGSH8vjC9razSNnwqXBrihX8,19924
|
|
100
|
-
dagster_cloud/storage/event_logs/storage.py,sha256=
|
|
100
|
+
dagster_cloud/storage/event_logs/storage.py,sha256=kh9SlpoS2OCFHR77Zxs6SBOfKKGbMGUZB5YWdWLzVuc,56454
|
|
101
101
|
dagster_cloud/storage/event_logs/utils.py,sha256=M6uvW8L-H7600bjOy5yTTealHVKKXAFBJO4dRZObTdI,3061
|
|
102
102
|
dagster_cloud/storage/runs/__init__.py,sha256=aNua07DabXVkxhOvQol1Vai4Lg88a_BwuxnvKiwV6_4,86
|
|
103
103
|
dagster_cloud/storage/runs/queries.py,sha256=vJFvL5cU8iV-CqwOZUTupKJ42-LJ0FJDR113LjOqWvc,7250
|
|
@@ -109,14 +109,14 @@ dagster_cloud/util/__init__.py,sha256=-rLWHQVwWr0FHyz5dDA-Ia2BRDKJvUQDm9CX3D3mw2
|
|
|
109
109
|
dagster_cloud/util/container_resources.py,sha256=qkFiULrhvqSB4iLlL30bQpO_XjXrm5lNfMx8iFUYiR8,4693
|
|
110
110
|
dagster_cloud/util/errors.py,sha256=EazIylrZJRbPkpG6SIMqTzXY3fSKBS1pck59wXCrNZE,1638
|
|
111
111
|
dagster_cloud/workspace/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
112
|
-
dagster_cloud/workspace/config_schema/__init__.py,sha256=
|
|
112
|
+
dagster_cloud/workspace/config_schema/__init__.py,sha256=tkGPjf7fw_k0bB_FDZJsIdNAZyZs-GLxRyZvlqedUIM,12542
|
|
113
113
|
dagster_cloud/workspace/config_schema/docker.py,sha256=Xs7FDBVpIOMlWcKXrVWX_lSDgf9qpc9UpYcps1IdQDQ,967
|
|
114
114
|
dagster_cloud/workspace/config_schema/ecs.py,sha256=NKB08hsYlowORvBHCrTqmKVbF67q1XDb-40s1phI5DE,6575
|
|
115
115
|
dagster_cloud/workspace/config_schema/kubernetes.py,sha256=JIdZ5hX06hAw1lGT-5AcwZjfoKMnMj3PKjaQ9GDUL-U,5286
|
|
116
116
|
dagster_cloud/workspace/docker/__init__.py,sha256=2lFoeX45PWf6xUaVwj9p8lNEar8XzVvHd2MfjMR7C-s,15160
|
|
117
117
|
dagster_cloud/workspace/docker/utils.py,sha256=VjT2kiCTByZj9HIQIO34Ukqvb-3cqlMQe54wqmyNh9I,374
|
|
118
118
|
dagster_cloud/workspace/ecs/__init__.py,sha256=Gys8s6kBDnfi198uRflXXRshwOKW-MBEACWqR_SCY-E,92
|
|
119
|
-
dagster_cloud/workspace/ecs/client.py,sha256=
|
|
119
|
+
dagster_cloud/workspace/ecs/client.py,sha256=M9JRwv7UsB7C5_-TaIfAYc2tejxp_ialQ6PB-7QzVyA,31157
|
|
120
120
|
dagster_cloud/workspace/ecs/launcher.py,sha256=z9sasod3QFEtvQ_eVw8R1pWHLiY70MviuJAbR67E6zs,31189
|
|
121
121
|
dagster_cloud/workspace/ecs/run_launcher.py,sha256=e8Rgd3dcrHL1O_Q7UcR11JO75sxCcA8YaaOUmpk3WrU,600
|
|
122
122
|
dagster_cloud/workspace/ecs/service.py,sha256=v-puyDEg2BzHA3RJqEhH8V04bUAcrYIlPCH1SThvwWw,4126
|
|
@@ -126,9 +126,9 @@ dagster_cloud/workspace/kubernetes/launcher.py,sha256=HSxyHMYTvJWEXa8RutImxluig5
|
|
|
126
126
|
dagster_cloud/workspace/kubernetes/utils.py,sha256=2PrxCXew-KZmluVyW9jC9z_kuPcchq4b1ONv4E5UjsM,12234
|
|
127
127
|
dagster_cloud/workspace/user_code_launcher/__init__.py,sha256=E-Izs69AHPAXD9pqd3UH46l4uKxM4Lbz7y2G4KeWzEQ,880
|
|
128
128
|
dagster_cloud/workspace/user_code_launcher/process.py,sha256=6TwjlXZzBlzyQEvmGzuVAUgxe_vCye0Q3aYkPPDAshs,14315
|
|
129
|
-
dagster_cloud/workspace/user_code_launcher/user_code_launcher.py,sha256=
|
|
129
|
+
dagster_cloud/workspace/user_code_launcher/user_code_launcher.py,sha256=BWkm4stk35HrFGydB5Cp61v-OI3z1eTZsFUec1hpGSI,103700
|
|
130
130
|
dagster_cloud/workspace/user_code_launcher/utils.py,sha256=t8Epee9MrXtRhWL-b_3avXxgMGrjLScUNWtBUUGpMCg,5285
|
|
131
|
-
dagster_cloud-1.11.
|
|
132
|
-
dagster_cloud-1.11.
|
|
133
|
-
dagster_cloud-1.11.
|
|
134
|
-
dagster_cloud-1.11.
|
|
131
|
+
dagster_cloud-1.11.15.dist-info/METADATA,sha256=qzIG_Ngw2j6wepJ3qBv1XHxotMMslhlaoEw-XNoZPPk,6589
|
|
132
|
+
dagster_cloud-1.11.15.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
133
|
+
dagster_cloud-1.11.15.dist-info/top_level.txt,sha256=2hMt-U33jyCgnywNrDB9Ih0EpaVmiO6dFkYcJ7Iwx4I,14
|
|
134
|
+
dagster_cloud-1.11.15.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|