great-expectations-cloud 20250915.1.dev1__py3-none-any.whl → 20260120.0.dev0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of great-expectations-cloud might be problematic. Click here for more details.

Files changed (18) hide show
  1. great_expectations_cloud/agent/actions/agent_action.py +3 -3
  2. great_expectations_cloud/agent/actions/draft_datasource_config_action.py +6 -12
  3. great_expectations_cloud/agent/actions/generate_data_quality_check_expectations_action.py +9 -11
  4. great_expectations_cloud/agent/actions/list_asset_names.py +2 -6
  5. great_expectations_cloud/agent/actions/run_checkpoint.py +64 -3
  6. great_expectations_cloud/agent/actions/run_metric_list_action.py +3 -3
  7. great_expectations_cloud/agent/actions/run_scheduled_checkpoint.py +26 -2
  8. great_expectations_cloud/agent/actions/run_window_checkpoint.py +1 -1
  9. great_expectations_cloud/agent/agent.py +220 -10
  10. great_expectations_cloud/agent/event_handler.py +8 -7
  11. great_expectations_cloud/agent/message_service/asyncio_rabbit_mq_client.py +36 -8
  12. great_expectations_cloud/agent/message_service/subscriber.py +4 -0
  13. great_expectations_cloud/agent/models.py +22 -2
  14. {great_expectations_cloud-20250915.1.dev1.dist-info → great_expectations_cloud-20260120.0.dev0.dist-info}/METADATA +5 -5
  15. {great_expectations_cloud-20250915.1.dev1.dist-info → great_expectations_cloud-20260120.0.dev0.dist-info}/RECORD +18 -18
  16. {great_expectations_cloud-20250915.1.dev1.dist-info → great_expectations_cloud-20260120.0.dev0.dist-info}/WHEEL +1 -1
  17. {great_expectations_cloud-20250915.1.dev1.dist-info → great_expectations_cloud-20260120.0.dev0.dist-info}/entry_points.txt +0 -0
  18. {great_expectations_cloud-20250915.1.dev1.dist-info → great_expectations_cloud-20260120.0.dev0.dist-info/licenses}/LICENSE +0 -0
@@ -4,7 +4,6 @@ import datetime
4
4
  from abc import abstractmethod
5
5
  from collections.abc import Sequence
6
6
  from typing import TYPE_CHECKING, Generic, Optional, TypeVar, Union
7
- from uuid import UUID
8
7
 
9
8
  from pydantic.v1 import BaseModel
10
9
 
@@ -12,6 +11,7 @@ from great_expectations_cloud.agent.models import (
12
11
  AgentBaseExtraForbid,
13
12
  AgentBaseExtraIgnore,
14
13
  CreatedResource,
14
+ DomainContext,
15
15
  )
16
16
 
17
17
  if TYPE_CHECKING:
@@ -32,11 +32,11 @@ _EventT = TypeVar("_EventT", bound=Union[AgentBaseExtraForbid, AgentBaseExtraIgn
32
32
 
33
33
  class AgentAction(Generic[_EventT]):
34
34
  def __init__(
35
- self, context: CloudDataContext, base_url: str, organization_id: UUID, auth_key: str
35
+ self, context: CloudDataContext, base_url: str, domain_context: DomainContext, auth_key: str
36
36
  ):
37
37
  self._context = context
38
38
  self._base_url = base_url
39
- self._organization_id = organization_id
39
+ self._domain_context = domain_context
40
40
  self._auth_key = auth_key
41
41
 
42
42
  @abstractmethod
@@ -40,9 +40,7 @@ class DraftDatasourceConfigAction(AgentAction[DraftDatasourceConfigEvent]):
40
40
  def check_draft_datasource_config(
41
41
  self, event: DraftDatasourceConfigEvent, id: str
42
42
  ) -> ActionResult:
43
- draft_config = self.get_draft_config(
44
- config_id=event.config_id, workspace_id=event.workspace_id
45
- )
43
+ draft_config = self.get_draft_config(config_id=event.config_id)
46
44
  datasource_type = draft_config.get("type", None)
47
45
  if datasource_type is None:
48
46
  raise TypeError( # noqa: TRY003 # one off error
@@ -60,9 +58,7 @@ class DraftDatasourceConfigAction(AgentAction[DraftDatasourceConfigEvent]):
60
58
 
61
59
  if isinstance(datasource, SQLDatasource):
62
60
  asset_names = get_asset_names(datasource)
63
- self._update_asset_names_list(
64
- config_id=event.config_id, workspace_id=event.workspace_id, asset_names=asset_names
65
- )
61
+ self._update_asset_names_list(config_id=event.config_id, asset_names=asset_names)
66
62
 
67
63
  return ActionResult(
68
64
  id=id,
@@ -70,13 +66,11 @@ class DraftDatasourceConfigAction(AgentAction[DraftDatasourceConfigEvent]):
70
66
  created_resources=[],
71
67
  )
72
68
 
73
- def _update_asset_names_list(
74
- self, config_id: UUID, workspace_id: UUID, asset_names: list[str]
75
- ) -> None:
69
+ def _update_asset_names_list(self, config_id: UUID, asset_names: list[str]) -> None:
76
70
  with create_session(access_token=self._auth_key) as session:
77
71
  url = urljoin(
78
72
  base=self._base_url,
79
- url=f"/api/v1/organizations/{self._organization_id}/workspaces/{workspace_id}/draft-table-names/{config_id}",
73
+ url=f"/api/v1/organizations/{self._domain_context.organization_id}/workspaces/{self._domain_context.workspace_id}/draft-table-names/{config_id}",
80
74
  )
81
75
  response = session.put(
82
76
  url=url,
@@ -90,10 +84,10 @@ class DraftDatasourceConfigAction(AgentAction[DraftDatasourceConfigEvent]):
90
84
  f"={config_id}.",
91
85
  )
92
86
 
93
- def get_draft_config(self, config_id: UUID, workspace_id: UUID) -> dict[str, Any]:
87
+ def get_draft_config(self, config_id: UUID) -> dict[str, Any]:
94
88
  resource_url = urljoin(
95
89
  base=self._base_url,
96
- url=f"/api/v1/organizations/{self._organization_id}/workspaces/{workspace_id}/draft-datasources/{config_id}",
90
+ url=f"/api/v1/organizations/{self._domain_context.organization_id}/workspaces/{self._domain_context.workspace_id}/draft-datasources/{config_id}",
97
91
  )
98
92
  with create_session(access_token=self._auth_key) as session:
99
93
  response = session.get(resource_url)
@@ -43,6 +43,7 @@ from great_expectations_cloud.agent.event_handler import register_event_action
43
43
  from great_expectations_cloud.agent.exceptions import GXAgentError
44
44
  from great_expectations_cloud.agent.models import (
45
45
  CreatedResource,
46
+ DomainContext,
46
47
  GenerateDataQualityCheckExpectationsEvent,
47
48
  )
48
49
  from great_expectations_cloud.agent.utils import (
@@ -83,13 +84,13 @@ class GenerateDataQualityCheckExpectationsAction(
83
84
  self,
84
85
  context: CloudDataContext,
85
86
  base_url: str,
86
- organization_id: UUID,
87
+ domain_context: DomainContext,
87
88
  auth_key: str,
88
89
  metric_repository: MetricRepository | None = None,
89
90
  batch_inspector: BatchInspector | None = None,
90
91
  ):
91
92
  super().__init__(
92
- context=context, base_url=base_url, organization_id=organization_id, auth_key=auth_key
93
+ context=context, base_url=base_url, domain_context=domain_context, auth_key=auth_key
93
94
  )
94
95
  self._metric_repository = metric_repository or MetricRepository(
95
96
  data_store=CloudDataStore(self._context)
@@ -100,8 +101,6 @@ class GenerateDataQualityCheckExpectationsAction(
100
101
 
101
102
  @override
102
103
  def run(self, event: GenerateDataQualityCheckExpectationsEvent, id: str) -> ActionResult:
103
- self._workspace_id = event.workspace_id
104
-
105
104
  created_resources: list[CreatedResource] = []
106
105
  assets_with_errors: list[str] = []
107
106
  selected_dqis: Sequence[DataQualityIssues] = event.selected_data_quality_issues or []
@@ -206,7 +205,7 @@ class GenerateDataQualityCheckExpectationsAction(
206
205
  metric_list=[
207
206
  MetricTypes.TABLE_COLUMNS,
208
207
  MetricTypes.TABLE_COLUMN_TYPES,
209
- MetricTypes.COLUMN_NULL_COUNT,
208
+ MetricTypes.COLUMN_NON_NULL_COUNT,
210
209
  MetricTypes.TABLE_ROW_COUNT,
211
210
  ],
212
211
  )
@@ -225,7 +224,7 @@ class GenerateDataQualityCheckExpectationsAction(
225
224
  """
226
225
  url = urljoin(
227
226
  base=self._base_url,
228
- url=f"/api/v1/organizations/{self._organization_id}/workspaces/{self._workspace_id}/expectations/",
227
+ url=f"/api/v1/organizations/{self._domain_context.organization_id}/workspaces/{self._domain_context.workspace_id}/expectations/",
229
228
  )
230
229
  with create_session(access_token=self._auth_key) as session:
231
230
  response = session.get(
@@ -375,11 +374,11 @@ class GenerateDataQualityCheckExpectationsAction(
375
374
  metric
376
375
  for metric in metric_run.metrics
377
376
  if isinstance(metric, ColumnMetric)
378
- and metric.metric_name == MetricTypes.COLUMN_NULL_COUNT
377
+ and metric.metric_name == MetricTypes.COLUMN_NON_NULL_COUNT
379
378
  ]
380
379
 
381
380
  if not column_null_values_metric or len(column_null_values_metric) == 0:
382
- raise RuntimeError("missing COLUMN_NULL_COUNT metrics") # noqa: TRY003
381
+ raise RuntimeError("missing COLUMN_NON_NULL_COUNT metrics") # noqa: TRY003
383
382
 
384
383
  expectation_ids = []
385
384
  # Single-expectation approach using ExpectColumnProportionOfNonNullValuesToBeBetween
@@ -390,7 +389,7 @@ class GenerateDataQualityCheckExpectationsAction(
390
389
  )
391
390
  for column in columns_missing_completeness_coverage:
392
391
  column_name = column.column
393
- null_count = column.value
392
+ non_null_count = column.value
394
393
  row_count = table_row_count.value
395
394
  expectation: gx_expectations.Expectation
396
395
 
@@ -400,7 +399,6 @@ class GenerateDataQualityCheckExpectationsAction(
400
399
  max_param_name = f"{unique_id}_proportion_max"
401
400
 
402
401
  # Calculate non-null proportion
403
- non_null_count = row_count - null_count if row_count > 0 else 0
404
402
  non_null_proportion = non_null_count / row_count if row_count > 0 else 0
405
403
 
406
404
  if use_forecast:
@@ -527,7 +525,7 @@ class GenerateDataQualityCheckExpectationsAction(
527
525
  ) -> UUID:
528
526
  url = urljoin(
529
527
  base=self._base_url,
530
- url=f"/api/v1/organizations/{self._organization_id}/workspaces/{self._workspace_id}/expectations/{asset_id}",
528
+ url=f"/api/v1/organizations/{self._domain_context.organization_id}/workspaces/{self._domain_context.workspace_id}/expectations/{asset_id}",
531
529
  )
532
530
 
533
531
  expectation_payload = expectation.configuration.to_json_dict()
@@ -1,7 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  from urllib.parse import urljoin
4
- from uuid import UUID
5
4
 
6
5
  from great_expectations.core.http import create_session
7
6
  from great_expectations.datasource.fluent import SQLDatasource
@@ -34,7 +33,6 @@ class ListAssetNamesAction(AgentAction[ListAssetNamesEvent]):
34
33
 
35
34
  self._add_or_update_asset_names_list(
36
35
  datasource_id=str(datasource.id),
37
- workspace_id=event.workspace_id,
38
36
  asset_names=asset_names,
39
37
  )
40
38
 
@@ -44,13 +42,11 @@ class ListAssetNamesAction(AgentAction[ListAssetNamesEvent]):
44
42
  created_resources=[],
45
43
  )
46
44
 
47
- def _add_or_update_asset_names_list(
48
- self, datasource_id: str, workspace_id: UUID, asset_names: list[str]
49
- ) -> None:
45
+ def _add_or_update_asset_names_list(self, datasource_id: str, asset_names: list[str]) -> None:
50
46
  with create_session(access_token=self._auth_key) as session:
51
47
  url = urljoin(
52
48
  base=self._base_url,
53
- url=f"/api/v1/organizations/{self._organization_id}/workspaces/{workspace_id}/table-names/{datasource_id}",
49
+ url=f"/api/v1/organizations/{self._domain_context.organization_id}/workspaces/{self._domain_context.workspace_id}/table-names/{datasource_id}",
54
50
  )
55
51
  response = session.put(
56
52
  url=url,
@@ -1,7 +1,9 @@
1
1
  from __future__ import annotations
2
2
 
3
+ import logging
4
+ import socket
3
5
  from dataclasses import dataclass
4
- from typing import TYPE_CHECKING, Any
6
+ from typing import TYPE_CHECKING, Any, Final
5
7
 
6
8
  from typing_extensions import override
7
9
 
@@ -21,6 +23,8 @@ if TYPE_CHECKING:
21
23
  from great_expectations.data_context import CloudDataContext
22
24
  from great_expectations.datasource.fluent.interfaces import DataAsset, Datasource
23
25
 
26
+ LOGGER: Final[logging.Logger] = logging.getLogger(__name__)
27
+
24
28
 
25
29
  class RunCheckpointAction(AgentAction[RunCheckpointEvent]):
26
30
  @override
@@ -45,12 +49,28 @@ def run_checkpoint(
45
49
  id: str,
46
50
  expectation_parameters: dict[str, Any] | None = None,
47
51
  ) -> ActionResult:
52
+ """Run a checkpoint and return the result."""
53
+ hostname = socket.gethostname()
54
+ log_extra = {
55
+ "correlation_id": id,
56
+ "checkpoint_name": event.checkpoint_name,
57
+ "hostname": hostname,
58
+ }
59
+
48
60
  # the checkpoint_name property on possible events is optional for backwards compatibility,
49
61
  # but this action requires it in order to run:
50
62
  if not event.checkpoint_name:
51
63
  raise MissingCheckpointNameError
52
64
 
65
+ LOGGER.debug("Fetching checkpoint from context", extra=log_extra)
53
66
  checkpoint = context.checkpoints.get(name=event.checkpoint_name)
67
+ LOGGER.debug(
68
+ "Checkpoint fetched successfully",
69
+ extra={
70
+ **log_extra,
71
+ "validation_definitions_count": len(checkpoint.validation_definitions),
72
+ },
73
+ )
54
74
 
55
75
  # only GX-managed Checkpoints are currently validated here and they contain only one validation definition, but
56
76
  # the Checkpoint does allow for multiple validation definitions so we'll be defensive and ensure we only test each
@@ -66,15 +86,48 @@ def run_checkpoint(
66
86
  )
67
87
  data_sources_assets_by_data_source_name[ds_name].assets_by_name[vd.asset.name] = vd.asset
68
88
 
69
- for data_sources_assets in data_sources_assets_by_data_source_name.values():
89
+ # Test connections to all datasources and assets
90
+ for ds_name, data_sources_assets in data_sources_assets_by_data_source_name.items():
70
91
  data_source = data_sources_assets.data_source
92
+ LOGGER.debug(
93
+ "Testing datasource connection",
94
+ extra={**log_extra, "datasource_name": ds_name},
95
+ )
71
96
  data_source.test_connection(test_assets=False) # raises `TestConnectionError` on failure
72
- for data_asset in data_sources_assets.assets_by_name.values():
97
+ LOGGER.debug(
98
+ "Datasource connection successful",
99
+ extra={**log_extra, "datasource_name": ds_name},
100
+ )
101
+
102
+ for asset_name, data_asset in data_sources_assets.assets_by_name.items():
103
+ LOGGER.debug(
104
+ "Testing data asset connection",
105
+ extra={**log_extra, "datasource_name": ds_name, "asset_name": asset_name},
106
+ )
73
107
  data_asset.test_connection() # raises `TestConnectionError` on failure
108
+ LOGGER.debug(
109
+ "Data asset connection successful",
110
+ extra={**log_extra, "datasource_name": ds_name, "asset_name": asset_name},
111
+ )
74
112
 
113
+ LOGGER.debug(
114
+ "Running checkpoint",
115
+ extra={
116
+ **log_extra,
117
+ "datasources_count": len(data_sources_assets_by_data_source_name),
118
+ "has_expectation_parameters": expectation_parameters is not None,
119
+ },
120
+ )
75
121
  checkpoint_run_result = checkpoint.run(
76
122
  batch_parameters=event.splitter_options, expectation_parameters=expectation_parameters
77
123
  )
124
+ LOGGER.debug(
125
+ "Checkpoint run completed",
126
+ extra={
127
+ **log_extra,
128
+ "run_results_count": len(checkpoint_run_result.run_results),
129
+ },
130
+ )
78
131
 
79
132
  validation_results = checkpoint_run_result.run_results
80
133
  created_resources = []
@@ -88,6 +141,14 @@ def run_checkpoint(
88
141
  )
89
142
  created_resources.append(created_resource)
90
143
 
144
+ LOGGER.debug(
145
+ "Checkpoint action completed successfully",
146
+ extra={
147
+ **log_extra,
148
+ "created_resources_count": len(created_resources),
149
+ },
150
+ )
151
+
91
152
  return ActionResult(
92
153
  id=id,
93
154
  type=event.type,
@@ -1,7 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  from typing import TYPE_CHECKING
4
- from uuid import UUID
5
4
 
6
5
  from great_expectations.experimental.metric_repository.batch_inspector import (
7
6
  BatchInspector,
@@ -21,6 +20,7 @@ from great_expectations_cloud.agent.actions import ActionResult, AgentAction
21
20
  from great_expectations_cloud.agent.event_handler import register_event_action
22
21
  from great_expectations_cloud.agent.models import (
23
22
  CreatedResource,
23
+ DomainContext,
24
24
  RunMetricsListEvent,
25
25
  )
26
26
 
@@ -34,13 +34,13 @@ class MetricListAction(AgentAction[RunMetricsListEvent]):
34
34
  self,
35
35
  context: CloudDataContext,
36
36
  base_url: str,
37
- organization_id: UUID,
37
+ domain_context: DomainContext,
38
38
  auth_key: str,
39
39
  metric_repository: MetricRepository | None = None,
40
40
  batch_inspector: BatchInspector | None = None,
41
41
  ):
42
42
  super().__init__(
43
- context=context, base_url=base_url, organization_id=organization_id, auth_key=auth_key
43
+ context=context, base_url=base_url, domain_context=domain_context, auth_key=auth_key
44
44
  )
45
45
  self._metric_repository = metric_repository or MetricRepository(
46
46
  data_store=CloudDataStore(self._context)
@@ -1,6 +1,8 @@
1
1
  from __future__ import annotations
2
2
 
3
- from typing import TYPE_CHECKING
3
+ import logging
4
+ import socket
5
+ from typing import TYPE_CHECKING, Final
4
6
  from urllib.parse import urljoin
5
7
 
6
8
  from great_expectations.core.http import create_session
@@ -18,13 +20,15 @@ from great_expectations_cloud.agent.models import RunScheduledCheckpointEvent
18
20
  if TYPE_CHECKING:
19
21
  from great_expectations.data_context import CloudDataContext
20
22
 
23
+ LOGGER: Final[logging.Logger] = logging.getLogger(__name__)
24
+
21
25
 
22
26
  class RunScheduledCheckpointAction(AgentAction[RunScheduledCheckpointEvent]):
23
27
  @override
24
28
  def run(self, event: RunScheduledCheckpointEvent, id: str) -> ActionResult:
25
29
  expectation_parameters_url = urljoin(
26
30
  base=self._base_url,
27
- url=f"/api/v1/organizations/{self._organization_id}/workspaces/{event.workspace_id}/checkpoints/{event.checkpoint_id}/expectation-parameters",
31
+ url=f"/api/v1/organizations/{self._domain_context.organization_id}/workspaces/{self._domain_context.workspace_id}/checkpoints/{event.checkpoint_id}/expectation-parameters",
28
32
  )
29
33
  return run_scheduled_checkpoint(
30
34
  context=self._context,
@@ -38,16 +42,31 @@ class RunScheduledCheckpointAction(AgentAction[RunScheduledCheckpointEvent]):
38
42
  def run_scheduled_checkpoint(
39
43
  context: CloudDataContext, event: RunScheduledCheckpointEvent, id: str, auth_key: str, url: str
40
44
  ) -> ActionResult:
45
+ """Run a scheduled checkpoint, fetching expectation parameters from GX Cloud first."""
46
+ hostname = socket.gethostname()
47
+ log_extra = {
48
+ "correlation_id": id,
49
+ "checkpoint_id": str(event.checkpoint_id),
50
+ "schedule_id": str(event.schedule_id),
51
+ "hostname": hostname,
52
+ }
53
+
54
+ LOGGER.debug("Fetching expectation parameters from GX Cloud", extra=log_extra)
41
55
  with create_session(access_token=auth_key) as session:
42
56
  response = session.get(url=url)
43
57
 
44
58
  if not response.ok:
59
+ LOGGER.error(
60
+ "Failed to fetch expectation parameters",
61
+ extra={**log_extra, "response_status": response.status_code},
62
+ )
45
63
  raise GXCloudError(
46
64
  message=f"RunScheduledCheckpointAction encountered an error while connecting to GX Cloud. "
47
65
  f"Unable to retrieve expectation_parameters for Checkpoint with ID={event.checkpoint_id}.",
48
66
  response=response,
49
67
  )
50
68
  data = response.json()
69
+ LOGGER.debug("Expectation parameters fetched successfully", extra=log_extra)
51
70
 
52
71
  try:
53
72
  expectation_parameters = (
@@ -56,11 +75,16 @@ def run_scheduled_checkpoint(
56
75
  else None
57
76
  )
58
77
  except KeyError as e:
78
+ LOGGER.exception("Malformed response from GX Cloud", extra=log_extra)
59
79
  raise GXCloudError(
60
80
  message="Malformed response received from GX Cloud",
61
81
  response=response,
62
82
  ) from e
63
83
 
84
+ LOGGER.debug(
85
+ "Proceeding to run checkpoint",
86
+ extra={**log_extra, "has_expectation_parameters": expectation_parameters is not None},
87
+ )
64
88
  return run_checkpoint(context, event, id, expectation_parameters=expectation_parameters)
65
89
 
66
90
 
@@ -24,7 +24,7 @@ class RunWindowCheckpointAction(AgentAction[RunWindowCheckpointEvent]):
24
24
  def run(self, event: RunWindowCheckpointEvent, id: str) -> ActionResult:
25
25
  expectation_parameters_url = urljoin(
26
26
  base=self._base_url,
27
- url=f"/api/v1/organizations/{self._organization_id}/workspaces/{event.workspace_id}/checkpoints/{event.checkpoint_id}/expectation-parameters",
27
+ url=f"/api/v1/organizations/{self._domain_context.organization_id}/workspaces/{self._domain_context.workspace_id}/checkpoints/{event.checkpoint_id}/expectation-parameters",
28
28
  )
29
29
  return run_window_checkpoint(
30
30
  self._context,
@@ -3,13 +3,19 @@ from __future__ import annotations
3
3
  import asyncio
4
4
  import logging
5
5
  import os
6
+ import resource
6
7
  import signal
8
+ import socket
9
+ import sys
10
+ import threading
11
+ import time
7
12
  import traceback
8
13
  import warnings
9
14
  from collections import defaultdict
10
15
  from concurrent.futures import Future
11
16
  from concurrent.futures.thread import ThreadPoolExecutor
12
17
  from functools import partial
18
+ from http import HTTPStatus
13
19
  from importlib.metadata import version as metadata_version
14
20
  from typing import TYPE_CHECKING, Any, Callable, Final, Literal
15
21
  from urllib.parse import urljoin, urlparse
@@ -65,6 +71,7 @@ from great_expectations_cloud.agent.models import (
65
71
  AgentBaseExtraForbid,
66
72
  CreateScheduledJobAndSetJobStarted,
67
73
  CreateScheduledJobAndSetJobStartedRequest,
74
+ DomainContext,
68
75
  JobCompleted,
69
76
  JobStarted,
70
77
  JobStatus,
@@ -136,6 +143,9 @@ class GXAgent:
136
143
  _PYPI_GX_AGENT_PACKAGE_NAME = "great_expectations_cloud"
137
144
  _PYPI_GREAT_EXPECTATIONS_PACKAGE_NAME = "great_expectations"
138
145
 
146
+ # Heartbeat interval in seconds (log progress every 60 seconds during job processing)
147
+ _HEARTBEAT_INTERVAL_SECONDS = 60
148
+
139
149
  def __init__(self: Self):
140
150
  self._config = self._create_config()
141
151
 
@@ -159,6 +169,15 @@ class GXAgent:
159
169
  self._correlation_ids: defaultdict[str, int] = defaultdict(lambda: 0)
160
170
  self._listen_tries = 0
161
171
 
172
+ # Heartbeat tracking
173
+ self._heartbeat_stop_event: threading.Event | None = None
174
+ self._heartbeat_thread: threading.Thread | None = None
175
+ self._current_job_correlation_id: str | None = None
176
+ self._current_job_start_time: float | None = None
177
+
178
+ # Install signal handlers for graceful shutdown logging
179
+ self._install_signal_handlers()
180
+
162
181
  def run(self) -> None:
163
182
  """Open a connection to GX Cloud."""
164
183
 
@@ -218,6 +237,98 @@ class GXAgent:
218
237
  if subscriber is not None:
219
238
  subscriber.close()
220
239
 
240
+ def _install_signal_handlers(self) -> None:
241
+ """Install signal handlers to log when the process receives shutdown signals."""
242
+ original_sigterm = signal.getsignal(signal.SIGTERM)
243
+ original_sigint = signal.getsignal(signal.SIGINT)
244
+
245
+ def sigterm_handler(signum: int, frame: Any) -> None:
246
+ self._log_signal_received("SIGTERM", signum)
247
+ if callable(original_sigterm):
248
+ original_sigterm(signum, frame)
249
+ elif original_sigterm == signal.SIG_DFL:
250
+ raise SystemExit(128 + signum)
251
+
252
+ def sigint_handler(signum: int, frame: Any) -> None:
253
+ self._log_signal_received("SIGINT", signum)
254
+ if callable(original_sigint):
255
+ original_sigint(signum, frame)
256
+ elif original_sigint == signal.SIG_DFL:
257
+ raise KeyboardInterrupt
258
+
259
+ signal.signal(signal.SIGTERM, sigterm_handler)
260
+ signal.signal(signal.SIGINT, sigint_handler)
261
+
262
+ def _log_signal_received(self, signal_name: str, signum: int) -> None:
263
+ """Log when a shutdown signal is received, including current job info."""
264
+ memory_mb = self._get_memory_usage_mb()
265
+ LOGGER.warning(
266
+ f"Received {signal_name} signal - shutting down",
267
+ extra={
268
+ "signal": signal_name,
269
+ "signal_number": signum,
270
+ "hostname": socket.gethostname(),
271
+ "current_job_correlation_id": self._current_job_correlation_id,
272
+ "job_elapsed_seconds": (
273
+ time.time() - self._current_job_start_time
274
+ if self._current_job_start_time
275
+ else None
276
+ ),
277
+ "memory_usage_mb": memory_mb,
278
+ "has_active_task": self._current_task is not None and not self._current_task.done(),
279
+ },
280
+ )
281
+
282
+ def _get_memory_usage_mb(self) -> float:
283
+ """Get current memory usage in MB using resource module."""
284
+ # ru_maxrss is in KB on Linux, bytes on macOS
285
+ usage = resource.getrusage(resource.RUSAGE_SELF)
286
+ # On macOS, ru_maxrss is in bytes; on Linux, it's in KB
287
+ if sys.platform == "darwin":
288
+ return usage.ru_maxrss / (1024 * 1024)
289
+ return usage.ru_maxrss / 1024
290
+
291
+ def _start_heartbeat(self, correlation_id: str, org_id: UUID, workspace_id: UUID) -> None:
292
+ """Start a background thread that logs periodic heartbeats during job processing."""
293
+ self._current_job_correlation_id = correlation_id
294
+ self._current_job_start_time = time.time()
295
+ self._heartbeat_stop_event = threading.Event()
296
+
297
+ def heartbeat_loop() -> None:
298
+ stop_event = self._heartbeat_stop_event
299
+ if stop_event is None:
300
+ return
301
+ while not stop_event.wait(timeout=self._HEARTBEAT_INTERVAL_SECONDS):
302
+ if stop_event.is_set():
303
+ break
304
+ elapsed = time.time() - (self._current_job_start_time or time.time())
305
+ memory_mb = self._get_memory_usage_mb()
306
+ LOGGER.debug(
307
+ "job.heartbeat",
308
+ extra={
309
+ "correlation_id": correlation_id,
310
+ "organization_id": str(org_id),
311
+ "workspace_id": str(workspace_id),
312
+ "hostname": socket.gethostname(),
313
+ "elapsed_seconds": round(elapsed, 1),
314
+ "memory_usage_mb": round(memory_mb, 1),
315
+ },
316
+ )
317
+
318
+ self._heartbeat_thread = threading.Thread(target=heartbeat_loop, daemon=True)
319
+ self._heartbeat_thread.start()
320
+
321
+ def _stop_heartbeat(self) -> None:
322
+ """Stop the heartbeat thread."""
323
+ if self._heartbeat_stop_event:
324
+ self._heartbeat_stop_event.set()
325
+ if self._heartbeat_thread and self._heartbeat_thread.is_alive():
326
+ self._heartbeat_thread.join(timeout=2)
327
+ self._heartbeat_thread = None
328
+ self._heartbeat_stop_event = None
329
+ self._current_job_correlation_id = None
330
+ self._current_job_start_time = None
331
+
221
332
  @classmethod
222
333
  def get_current_gx_agent_version(cls) -> str:
223
334
  version: str = metadata_version(cls._PYPI_GX_AGENT_PACKAGE_NAME)
@@ -237,8 +348,26 @@ class GXAgent:
237
348
  Args:
238
349
  event_context: An Event with related properties and actions.
239
350
  """
351
+ # Track how many times this correlation_id has been seen BY THIS POD (for local diagnostics)
352
+ # Note: event_context.redelivered is set by RabbitMQ and indicates cross-pod redelivery
353
+ local_delivery_count = self._correlation_ids.get(event_context.correlation_id, 0)
354
+
240
355
  if self._reject_correlation_id(event_context.correlation_id) is True:
241
- # this event has been redelivered too many times - remove it from circulation
356
+ # this event has been redelivered too many times to THIS pod - remove it from circulation
357
+ LOGGER.error(
358
+ "Message redelivered too many times to this pod, removing from queue",
359
+ extra={
360
+ "event_type": event_context.event.type,
361
+ "correlation_id": event_context.correlation_id,
362
+ "organization_id": self.get_organization_id(event_context),
363
+ "workspace_id": str(self.get_workspace_id(event_context)),
364
+ "schedule_id": event_context.event.schedule_id
365
+ if isinstance(event_context.event, ScheduledEventBase)
366
+ else None,
367
+ "local_delivery_count": local_delivery_count,
368
+ "redelivered": event_context.redelivered,
369
+ },
370
+ )
242
371
  event_context.processed_with_failures()
243
372
  return
244
373
  elif self._can_accept_new_task() is not True:
@@ -252,6 +381,7 @@ class GXAgent:
252
381
  "schedule_id": event_context.event.schedule_id
253
382
  if isinstance(event_context.event, ScheduledEventBase)
254
383
  else None,
384
+ "redelivered": event_context.redelivered,
255
385
  },
256
386
  )
257
387
  # request that this message is redelivered later
@@ -260,6 +390,20 @@ class GXAgent:
260
390
  self._redeliver_msg_task = loop.create_task(event_context.redeliver_message())
261
391
  return
262
392
 
393
+ if event_context.redelivered:
394
+ LOGGER.warning(
395
+ "rabbitmq.message.redelivered",
396
+ extra={
397
+ "event_type": event_context.event.type,
398
+ "correlation_id": event_context.correlation_id,
399
+ "organization_id": self.get_organization_id(event_context),
400
+ "workspace_id": str(self.get_workspace_id(event_context)),
401
+ "schedule_id": event_context.event.schedule_id
402
+ if isinstance(event_context.event, ScheduledEventBase)
403
+ else None,
404
+ },
405
+ )
406
+
263
407
  self._current_task = self._executor.submit(
264
408
  self._handle_event,
265
409
  event_context=event_context,
@@ -341,8 +485,9 @@ class GXAgent:
341
485
  org_id=org_id,
342
486
  workspace_id=workspace_id,
343
487
  )
488
+ memory_mb = self._get_memory_usage_mb()
344
489
  LOGGER.info(
345
- "Starting job",
490
+ "job.started",
346
491
  extra={
347
492
  "event_type": event_context.event.type,
348
493
  "correlation_id": event_context.correlation_id,
@@ -351,9 +496,14 @@ class GXAgent:
351
496
  "schedule_id": event_context.event.schedule_id
352
497
  if isinstance(event_context.event, ScheduledEventBase)
353
498
  else None,
499
+ "hostname": socket.gethostname(),
500
+ "redelivered": event_context.redelivered,
501
+ "memory_usage_mb": round(memory_mb, 1),
354
502
  },
355
503
  )
356
504
 
505
+ self._start_heartbeat(event_context.correlation_id, org_id, workspace_id)
506
+
357
507
  self._set_sentry_tags(event_context)
358
508
 
359
509
  handler = EventHandler(context=data_context)
@@ -363,7 +513,7 @@ class GXAgent:
363
513
  id=event_context.correlation_id,
364
514
  base_url=base_url,
365
515
  auth_key=auth_key,
366
- organization_id=org_id,
516
+ domain_context=DomainContext(organization_id=org_id, workspace_id=workspace_id),
367
517
  )
368
518
  return result
369
519
 
@@ -378,11 +528,30 @@ class GXAgent:
378
528
  """
379
529
  # warning: this method will not be executed in the main thread
380
530
 
531
+ # Calculate job duration before stopping heartbeat (which clears start time)
532
+ job_elapsed_time = (
533
+ time.time() - self._current_job_start_time if self._current_job_start_time else None
534
+ )
535
+ self._stop_heartbeat()
536
+
381
537
  org_id = self.get_organization_id(event_context)
382
538
  workspace_id = self.get_workspace_id(event_context)
383
539
 
540
+ memory_mb = self._get_memory_usage_mb()
541
+ LOGGER.debug(
542
+ "job.thread_exiting",
543
+ extra={
544
+ "correlation_id": event_context.correlation_id,
545
+ "hostname": socket.gethostname(),
546
+ "has_exception": future.exception() is not None,
547
+ "cancelled": future.cancelled(),
548
+ "memory_usage_mb": round(memory_mb, 1),
549
+ },
550
+ )
551
+
384
552
  # get results or errors from the thread
385
553
  error = future.exception()
554
+
386
555
  if error is None:
387
556
  result: ActionResult = future.result()
388
557
 
@@ -393,16 +562,21 @@ class GXAgent:
393
562
  error_stack_trace="The version of the GX Agent you are using does not support this functionality. Please upgrade to the most recent image tagged with `stable`.",
394
563
  processed_by=self._get_processed_by(),
395
564
  )
396
- LOGGER.error(
397
- "Job completed with error. Ensure agent is up-to-date.",
565
+ LOGGER.warning(
566
+ "job.completed",
398
567
  extra={
399
568
  "event_type": event_context.event.type,
400
- "id": event_context.correlation_id,
569
+ "correlation_id": event_context.correlation_id,
570
+ "job_duration": job_elapsed_time,
571
+ "success": False,
401
572
  "organization_id": str(org_id),
402
573
  "workspace_id": str(workspace_id),
403
574
  "schedule_id": event_context.event.schedule_id
404
575
  if isinstance(event_context.event, ScheduledEventBase)
405
576
  else None,
577
+ "hostname": socket.gethostname(),
578
+ "error_type": "UnknownEvent",
579
+ "error_message": "Agent does not support this event type. Upgrade required.",
406
580
  },
407
581
  )
408
582
  else:
@@ -412,30 +586,40 @@ class GXAgent:
412
586
  processed_by=self._get_processed_by(),
413
587
  )
414
588
  LOGGER.info(
415
- "Completed job",
589
+ "job.completed",
416
590
  extra={
417
591
  "event_type": event_context.event.type,
418
592
  "correlation_id": event_context.correlation_id,
419
593
  "job_duration": (
420
594
  result.job_duration.total_seconds() if result.job_duration else None
421
595
  ),
596
+ "success": True,
422
597
  "organization_id": str(org_id),
423
598
  "workspace_id": str(workspace_id),
424
599
  "schedule_id": event_context.event.schedule_id
425
600
  if isinstance(event_context.event, ScheduledEventBase)
426
601
  else None,
602
+ "hostname": socket.gethostname(),
427
603
  },
428
604
  )
429
605
  else:
430
- status = build_failed_job_completed_status(error)
606
+ status = build_failed_job_completed_status(error, processed_by=self._get_processed_by())
431
607
  LOGGER.info(traceback.format_exc())
432
- LOGGER.info(
433
- "Job completed with error",
608
+ LOGGER.warning(
609
+ "job.completed",
434
610
  extra={
435
611
  "event_type": event_context.event.type,
436
612
  "correlation_id": event_context.correlation_id,
613
+ "job_duration": job_elapsed_time,
614
+ "success": False,
437
615
  "organization_id": str(org_id),
438
616
  "workspace_id": str(workspace_id),
617
+ "schedule_id": event_context.event.schedule_id
618
+ if isinstance(event_context.event, ScheduledEventBase)
619
+ else None,
620
+ "hostname": socket.gethostname(),
621
+ "error_type": type(error).__name__,
622
+ "error_message": str(error)[:500], # Truncate to avoid huge logs
439
623
  },
440
624
  )
441
625
 
@@ -655,6 +839,31 @@ class GXAgent:
655
839
  with create_session(access_token=self.get_auth_key()) as session:
656
840
  payload = CreateScheduledJobAndSetJobStartedRequest(data=data).json()
657
841
  response = session.post(agent_sessions_url, data=payload)
842
+
843
+ if response.status_code == HTTPStatus.BAD_REQUEST:
844
+ try:
845
+ response_body = response.json()
846
+ except Exception:
847
+ response_body = response.text
848
+ LOGGER.warning(
849
+ "Job already exists - this message was likely redelivered by RabbitMQ "
850
+ "after another runner already claimed it. Continuing to process anyway "
851
+ "as a safety measure in case the original runner failed.",
852
+ extra={
853
+ "correlation_id": str(event_context.correlation_id),
854
+ "event_type": str(event_context.event.type),
855
+ "organization_id": str(org_id),
856
+ "schedule_id": str(event_context.event.schedule_id),
857
+ "workspace_id": str(workspace_id),
858
+ "response_status": response.status_code,
859
+ "response_body": response_body,
860
+ },
861
+ )
862
+ # Note: We intentionally continue processing instead of NACKing.
863
+ # This ensures job completion even if the first runner fails.
864
+ # TODO: Once we add inProgress timeout in Mercury, we can
865
+ # safely NACK here to prevent duplicate processing.
866
+
658
867
  LOGGER.info(
659
868
  "Created scheduled job and set started",
660
869
  extra={
@@ -663,6 +872,7 @@ class GXAgent:
663
872
  "organization_id": str(org_id),
664
873
  "schedule_id": str(event_context.event.schedule_id),
665
874
  "workspace_id": str(workspace_id),
875
+ "response_status": response.status_code,
666
876
  },
667
877
  )
668
878
  GXAgent._log_http_error(
@@ -15,6 +15,7 @@ from pydantic import v1 as pydantic_v1
15
15
  from great_expectations_cloud.agent.actions.unknown import UnknownEventAction
16
16
  from great_expectations_cloud.agent.exceptions import GXAgentError
17
17
  from great_expectations_cloud.agent.models import (
18
+ DomainContext,
18
19
  Event,
19
20
  EventType,
20
21
  UnknownEvent,
@@ -67,11 +68,11 @@ class EventHandler:
67
68
  self._context = context
68
69
 
69
70
  def get_event_action(
70
- self, event: Event, base_url: str, auth_key: str, organization_id: UUID
71
+ self, event: Event, base_url: str, auth_key: str, domain_context: DomainContext
71
72
  ) -> AgentAction[Any]:
72
73
  """Get the action that should be run for the given event."""
73
74
 
74
- if not self._check_event_organization_id(event, organization_id):
75
+ if not self._check_event_organization_id(event, domain_context.organization_id):
75
76
  # Making message more generic
76
77
  raise GXAgentError("Unable to process job. Invalid input.") # noqa: TRY003
77
78
 
@@ -84,17 +85,17 @@ class EventHandler:
84
85
  return action_class(
85
86
  context=self._context,
86
87
  base_url=base_url,
87
- organization_id=organization_id,
88
+ domain_context=domain_context,
88
89
  auth_key=auth_key,
89
90
  )
90
91
 
91
- def handle_event( # Refactor opportunity
92
- self, event: Event, id: str, base_url: str, auth_key: str, organization_id: UUID
92
+ def handle_event(
93
+ self, event: Event, id: str, base_url: str, auth_key: str, domain_context: DomainContext
93
94
  ) -> ActionResult:
94
- start_time = datetime.now(tz=timezone.utc)
95
95
  """Transform an Event into an ActionResult."""
96
+ start_time = datetime.now(tz=timezone.utc)
96
97
  action = self.get_event_action(
97
- event=event, base_url=base_url, auth_key=auth_key, organization_id=organization_id
98
+ event=event, base_url=base_url, auth_key=auth_key, domain_context=domain_context
98
99
  )
99
100
  LOGGER.info(f"Handling event: {event.type} -> {action.__class__.__name__}")
100
101
  action_result = action.run(event=event, id=id)
@@ -27,6 +27,7 @@ class OnMessagePayload:
27
27
  correlation_id: str
28
28
  delivery_tag: int
29
29
  body: bytes
30
+ redelivered: bool = False # Set by RabbitMQ when message is redelivered
30
31
 
31
32
 
32
33
  class OnMessageFn(Protocol):
@@ -174,8 +175,12 @@ class AsyncRabbitMQClient:
174
175
  # param on_message is provided by the caller as an argument to AsyncRabbitMQClient.run
175
176
  correlation_id = header_frame.correlation_id
176
177
  delivery_tag = method_frame.delivery_tag
178
+ redelivered = method_frame.redelivered # RabbitMQ sets this flag on redelivery
177
179
  payload = OnMessagePayload(
178
- correlation_id=correlation_id, delivery_tag=delivery_tag, body=body
180
+ correlation_id=correlation_id,
181
+ delivery_tag=delivery_tag,
182
+ body=body,
183
+ redelivered=redelivered,
179
184
  )
180
185
  return on_message(payload)
181
186
 
@@ -190,10 +195,13 @@ class AsyncRabbitMQClient:
190
195
  def _on_consumer_canceled(self, method_frame: Basic.Cancel) -> None:
191
196
  """Callback invoked when the broker cancels the client's connection."""
192
197
  if self._channel is not None:
193
- LOGGER.info(
194
- "Consumer was cancelled remotely, shutting down",
198
+ LOGGER.warning(
199
+ "rabbitmq.consumer.cancelled",
195
200
  extra={
196
- "method_frame": method_frame,
201
+ "consumer_tag": method_frame.consumer_tag
202
+ if hasattr(method_frame, "consumer_tag")
203
+ else None,
204
+ "was_consuming": self.was_consuming,
197
205
  },
198
206
  )
199
207
  self._channel.close()
@@ -232,11 +240,31 @@ class AsyncRabbitMQClient:
232
240
  self._reconnect()
233
241
  self._log_pika_exception("Connection open failed", reason)
234
242
 
235
- def _on_connection_closed(
236
- self, connection: AsyncioConnection, _unused_reason: pika.Exception
237
- ) -> None:
243
+ def _on_connection_closed(self, connection: AsyncioConnection, reason: pika.Exception) -> None:
238
244
  """Callback invoked after the broker closes the connection"""
239
- LOGGER.debug("Connection to RabbitMQ has been closed")
245
+ # Use DEBUG for expected closes, WARNING for unexpected
246
+ log_level = LOGGER.debug if self._closing else LOGGER.warning
247
+ if isinstance(reason, (ConnectionClosed, ChannelClosed)):
248
+ log_level(
249
+ "rabbitmq.connection.closed",
250
+ extra={
251
+ "reply_code": reason.reply_code,
252
+ "reply_text": reason.reply_text,
253
+ "was_consuming": self.was_consuming,
254
+ "is_closing": self._closing,
255
+ },
256
+ )
257
+ else:
258
+ log_level(
259
+ "rabbitmq.connection.closed",
260
+ extra={
261
+ "reply_code": 0, # Unknown/non-AMQP error
262
+ "reply_text": str(reason),
263
+ "reason_type": type(reason).__name__,
264
+ "was_consuming": self.was_consuming,
265
+ "is_closing": self._closing,
266
+ },
267
+ )
240
268
  self._channel = None
241
269
  self._is_unrecoverable = True
242
270
  if self._closing:
@@ -37,6 +37,8 @@ class EventContext:
37
37
  can be removed from the queue.
38
38
  redeliver_message: async callable to signal that the broker should
39
39
  try to deliver this message again.
40
+ redelivered: True if RabbitMQ is redelivering this message (another
41
+ consumer failed to acknowledge it).
40
42
  """
41
43
 
42
44
  event: Event
@@ -44,6 +46,7 @@ class EventContext:
44
46
  processed_successfully: Callable[[], None]
45
47
  processed_with_failures: Callable[[], None]
46
48
  redeliver_message: Callable[[], Coroutine[OnMessageCallback, None, None]]
49
+ redelivered: bool = False
47
50
 
48
51
 
49
52
  class OnMessageCallback(Protocol):
@@ -142,6 +145,7 @@ class Subscriber:
142
145
  processed_successfully=ack_callback,
143
146
  processed_with_failures=nack_callback,
144
147
  redeliver_message=redeliver_message,
148
+ redelivered=payload.redelivered,
145
149
  )
146
150
 
147
151
  return on_message(event_context)
@@ -24,6 +24,18 @@ def all_subclasses(cls: type) -> list[type]:
24
24
  return all_sub_cls
25
25
 
26
26
 
27
+ class DomainContext(BaseModel):
28
+ """
29
+ Encapsulates domain-related context information.
30
+
31
+ This dataclass consolidates organization_id and workspace_id to reduce
32
+ parameter proliferation and improve code maintainability.
33
+ """
34
+
35
+ organization_id: UUID
36
+ workspace_id: UUID
37
+
38
+
27
39
  class AgentBaseExtraForbid(BaseModel):
28
40
  class Config:
29
41
  # 2024-03-04: ZEL-501 Strictly enforce models for handling outdated APIs
@@ -277,15 +289,23 @@ class CreateScheduledJobAndSetJobStartedRequest(AgentBaseExtraForbid):
277
289
  data: CreateScheduledJobAndSetJobStarted
278
290
 
279
291
 
280
- def build_failed_job_completed_status(error: BaseException) -> JobCompleted:
292
+ def build_failed_job_completed_status(
293
+ error: BaseException,
294
+ processed_by: Literal["agent", "runner"] | None = None,
295
+ ) -> JobCompleted:
281
296
  if isinstance(error, GXCoreError):
282
297
  status = JobCompleted(
283
298
  success=False,
284
299
  error_stack_trace=str(error),
285
300
  error_code=error.error_code,
286
301
  error_params=error.get_error_params(),
302
+ processed_by=processed_by,
287
303
  )
288
304
  else:
289
- status = JobCompleted(success=False, error_stack_trace=str(error))
305
+ status = JobCompleted(
306
+ success=False,
307
+ error_stack_trace=str(error),
308
+ processed_by=processed_by,
309
+ )
290
310
 
291
311
  return status
@@ -1,22 +1,22 @@
1
- Metadata-Version: 2.3
1
+ Metadata-Version: 2.4
2
2
  Name: great_expectations_cloud
3
- Version: 20250915.1.dev1
3
+ Version: 20260120.0.dev0
4
4
  Summary: Great Expectations Cloud
5
5
  License: Proprietary
6
+ License-File: LICENSE
6
7
  Author: The Great Expectations Team
7
8
  Author-email: team@greatexpectations.io
8
- Requires-Python: >=3.11,<3.12
9
+ Requires-Python: >=3.11.4,<3.12
9
10
  Classifier: Development Status :: 3 - Alpha
10
11
  Classifier: Intended Audience :: Developers
11
12
  Classifier: Intended Audience :: Science/Research
12
13
  Classifier: License :: Other/Proprietary License
13
14
  Classifier: Programming Language :: Python :: 3
14
- Classifier: Programming Language :: Python :: 3.11
15
15
  Classifier: Topic :: Scientific/Engineering
16
16
  Classifier: Topic :: Scientific/Engineering :: Information Analysis
17
17
  Classifier: Topic :: Software Development :: Quality Assurance
18
18
  Classifier: Topic :: Software Development :: Testing
19
- Requires-Dist: great-expectations[databricks,gx-redshift,mssql,postgresql,snowflake,trino] (==1.6.0)
19
+ Requires-Dist: great-expectations[databricks,gx-redshift,mssql,postgresql,snowflake,trino] (==1.11.1)
20
20
  Requires-Dist: orjson (>=3.9.7,<4.0.0,!=3.9.10)
21
21
  Requires-Dist: packaging (>=21.3,<26.0)
22
22
  Requires-Dist: pika (>=1.3.1,<2.0.0)
@@ -1,34 +1,34 @@
1
1
  great_expectations_cloud/__init__.py,sha256=1mr5RDyA2N38eynvEfVbuYIbjFadeJfqZ-X9CrqYiVo,150
2
2
  great_expectations_cloud/agent/__init__.py,sha256=FqDFYbGefmNhwlvJwJbNovkwzny6mwaYH5LtTi6VlSU,464
3
3
  great_expectations_cloud/agent/actions/__init__.py,sha256=TYPe2j8EgaziXXgSLEdgjnbHKL56O6cQL1kjPnGbRFI,949
4
- great_expectations_cloud/agent/actions/agent_action.py,sha256=mGAovvT4CDaQlZaeNcz5tqfMIt6cul2anzxPwFrSxeA,1186
5
- great_expectations_cloud/agent/actions/draft_datasource_config_action.py,sha256=eFiv3Mm7IavSvag8ctD6yooIT8b4n1LQFsKrRzX-lMM,5341
6
- great_expectations_cloud/agent/actions/generate_data_quality_check_expectations_action.py,sha256=WBAgt0AvJ7T0AqdLZqYsaj-afj4J8K4jSa6u_9TXka0,23635
7
- great_expectations_cloud/agent/actions/list_asset_names.py,sha256=KNuYgV7fPmL5yTq3lKgEenHBZHoi4h6QMAmbkmHulNQ,2641
8
- great_expectations_cloud/agent/actions/run_checkpoint.py,sha256=N2d07JDCG06kMve7yjPZQFlaGKoJw5dCbpKuBWw6Ssg,3751
9
- great_expectations_cloud/agent/actions/run_metric_list_action.py,sha256=tW64pNYJElkUmcd9baKllTSyR-NYyU-40QvMwSwwLqQ,3234
10
- great_expectations_cloud/agent/actions/run_scheduled_checkpoint.py,sha256=_7IYfMhIBlXzanvl627e83c79ecDlk_bMQAQaQHhdx8,2427
11
- great_expectations_cloud/agent/actions/run_window_checkpoint.py,sha256=_pseC6tytSiAXsqJVf0Jjirpga8I1IAPEWazPWZ8iJg,2287
4
+ great_expectations_cloud/agent/actions/agent_action.py,sha256=F9zOgVmNJ_V2RhRbDXNMjd46-QVNFN8Lp_bmPvh5cwU,1189
5
+ great_expectations_cloud/agent/actions/draft_datasource_config_action.py,sha256=NVN2GBSty-XvCW7pHDkLImHd3V0iJzpUNIh8rNGDuzs,5241
6
+ great_expectations_cloud/agent/actions/generate_data_quality_check_expectations_action.py,sha256=dhiy_lkcePDy45fEX0uhk-m89_aYKtV6P5jfd-Dcax8,23611
7
+ great_expectations_cloud/agent/actions/list_asset_names.py,sha256=pOL5ip8ZZJbZhDNSp44rjYkx93rKdf3U6f4fY-JLhvg,2576
8
+ great_expectations_cloud/agent/actions/run_checkpoint.py,sha256=xlDYX6BHvHr3loz74hu4ARq5VStJ7-QYf80GPi-PuTY,5697
9
+ great_expectations_cloud/agent/actions/run_metric_list_action.py,sha256=69nyR0vXjz_lKAHYczuEMQtbNIv0lf-DMiOBXmkwpuQ,3237
10
+ great_expectations_cloud/agent/actions/run_scheduled_checkpoint.py,sha256=33lp6F_J1asgBmaHoqLLDL8wRnBw9MEZlQ3bosGQkwk,3425
11
+ great_expectations_cloud/agent/actions/run_window_checkpoint.py,sha256=MCMbgY3dNUx546sNpXg5p3rCWNzDa6EQOUt20Nr5udo,2317
12
12
  great_expectations_cloud/agent/actions/unknown.py,sha256=mtWw9tDZqGZSiUWj7PtIlLFJ1dM-7AHBX3SO16-u2EM,739
13
13
  great_expectations_cloud/agent/actions/utils.py,sha256=0lzeASN1TYWJ35-H-sCRkcPMHzWU05SzKIyTv0CzvA8,1665
14
- great_expectations_cloud/agent/agent.py,sha256=JXZTtExQ-gMM6KPiqSH0xa95ugQC4F-fxclzO4bS4Jc,30961
14
+ great_expectations_cloud/agent/agent.py,sha256=JdyWvLdkzMq_kJgLsuhCDZz7NIOjBQZxaUv_UiwiEwg,40826
15
15
  great_expectations_cloud/agent/agent_warnings.py,sha256=9-xl_AI2V9Py4o7KzFOQjG3lYx-vZ36fq4w2iiPNiUw,362
16
16
  great_expectations_cloud/agent/cli.py,sha256=a_HmPxBMlVD59BEmkZnlbOOAFlezVMx9djZ2XIW-3W0,2885
17
17
  great_expectations_cloud/agent/config.py,sha256=c1BOr-TrxZnciWNRuu4atGtfRh-XSmwIS0osDCzQa04,1348
18
18
  great_expectations_cloud/agent/constants.py,sha256=SAEtcOwI-SxZiZSVoCFfEC5oCtYdgmyUmK7for_zg_Q,246
19
- great_expectations_cloud/agent/event_handler.py,sha256=kePpZ4EMIY0YWKeYH4W7lKY8C8OeER_Dkhwwadf3nTc,6069
19
+ great_expectations_cloud/agent/event_handler.py,sha256=wcvAM0i6pnIsWlYstnZ2ThpByJYCyKrRmh0solC02bk,6091
20
20
  great_expectations_cloud/agent/exceptions.py,sha256=XIDBVSmBfFpVQA5i9rZqEtORIdi5E4_lwup0jP90TUk,1506
21
21
  great_expectations_cloud/agent/message_service/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
22
- great_expectations_cloud/agent/message_service/asyncio_rabbit_mq_client.py,sha256=B2EwgG_Qdm1s_xkxbGPQxdRBkDSuhkKyT6rGFpXuqQ0,12391
23
- great_expectations_cloud/agent/message_service/subscriber.py,sha256=K8szy9uM1MUBkMaMZt9o5ExnjqEn1hw6zYaH7pQkjoM,5975
24
- great_expectations_cloud/agent/models.py,sha256=ea3d6eul8ECvsIubVSEJzeJbX6X9M40oEhaWhj4yuVI,10007
22
+ great_expectations_cloud/agent/message_service/asyncio_rabbit_mq_client.py,sha256=z1iYArWz5qhtsEYSK6Qd2LbiwZsEJOzOvunvtHVRuBo,13630
23
+ great_expectations_cloud/agent/message_service/subscriber.py,sha256=ZEwwLaQe-yypYJWMkpTvRqEgzH7FbYH0-ean8LK1Qt8,6174
24
+ great_expectations_cloud/agent/models.py,sha256=PKPxOXApET47s5Q-c9OqolgZoYghoULuvMFcCsl1soI,10478
25
25
  great_expectations_cloud/agent/run.py,sha256=V33RLoB1PFmJ0h0RfHG4SB5lN_Za8tW2Dua6GUpN9yY,639
26
26
  great_expectations_cloud/agent/utils.py,sha256=3OvdcXeK1gk2oJgqG4jPvBRwlMCn8LioULW3YgRtj98,2950
27
27
  great_expectations_cloud/logging/README.md,sha256=vbwU689x8SkGjzoBYQzZOzAvh28fR0RCa1XY5WD-Dgs,1762
28
28
  great_expectations_cloud/logging/logging_cfg.py,sha256=W6mlm4_Z2bjzM5TuKmFg_WZor2XoJm4DAoLGaf2O__I,6579
29
29
  great_expectations_cloud/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
30
- great_expectations_cloud-20250915.1.dev1.dist-info/LICENSE,sha256=_JJnoX6N_OkrAwlCRizCwil0tIjDAy2TG3GiJ50sM6k,2084
31
- great_expectations_cloud-20250915.1.dev1.dist-info/METADATA,sha256=QSzFeoscVVOO7Hssw3wo7WEfwFZwU2c8L0CnGakvaQg,12357
32
- great_expectations_cloud-20250915.1.dev1.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
33
- great_expectations_cloud-20250915.1.dev1.dist-info/entry_points.txt,sha256=ofJgdeS2gSzxXLyCAjfNhIaN1wmSyR7EAMs5qhVaXE4,68
34
- great_expectations_cloud-20250915.1.dev1.dist-info/RECORD,,
30
+ great_expectations_cloud-20260120.0.dev0.dist-info/METADATA,sha256=UzLgYArcgE5nmCdO27EcqNQVSf7hClmv5Ol0xanWx1c,12331
31
+ great_expectations_cloud-20260120.0.dev0.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
32
+ great_expectations_cloud-20260120.0.dev0.dist-info/entry_points.txt,sha256=ofJgdeS2gSzxXLyCAjfNhIaN1wmSyR7EAMs5qhVaXE4,68
33
+ great_expectations_cloud-20260120.0.dev0.dist-info/licenses/LICENSE,sha256=_JJnoX6N_OkrAwlCRizCwil0tIjDAy2TG3GiJ50sM6k,2084
34
+ great_expectations_cloud-20260120.0.dev0.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: poetry-core 2.1.3
2
+ Generator: poetry-core 2.2.1
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any