great-expectations-cloud 20240523.0.dev0__py3-none-any.whl → 20251124.0.dev1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. great_expectations_cloud/agent/__init__.py +3 -0
  2. great_expectations_cloud/agent/actions/__init__.py +8 -5
  3. great_expectations_cloud/agent/actions/agent_action.py +21 -6
  4. great_expectations_cloud/agent/actions/draft_datasource_config_action.py +45 -24
  5. great_expectations_cloud/agent/actions/generate_data_quality_check_expectations_action.py +557 -0
  6. great_expectations_cloud/agent/actions/list_asset_names.py +65 -0
  7. great_expectations_cloud/agent/actions/run_checkpoint.py +74 -27
  8. great_expectations_cloud/agent/actions/run_metric_list_action.py +11 -5
  9. great_expectations_cloud/agent/actions/run_scheduled_checkpoint.py +67 -0
  10. great_expectations_cloud/agent/actions/run_window_checkpoint.py +66 -0
  11. great_expectations_cloud/agent/actions/utils.py +35 -0
  12. great_expectations_cloud/agent/agent.py +444 -101
  13. great_expectations_cloud/agent/cli.py +2 -2
  14. great_expectations_cloud/agent/config.py +19 -5
  15. great_expectations_cloud/agent/event_handler.py +49 -12
  16. great_expectations_cloud/agent/exceptions.py +9 -0
  17. great_expectations_cloud/agent/message_service/asyncio_rabbit_mq_client.py +80 -14
  18. great_expectations_cloud/agent/message_service/subscriber.py +8 -5
  19. great_expectations_cloud/agent/models.py +197 -20
  20. great_expectations_cloud/agent/utils.py +84 -0
  21. great_expectations_cloud/logging/logging_cfg.py +20 -4
  22. great_expectations_cloud/py.typed +0 -0
  23. {great_expectations_cloud-20240523.0.dev0.dist-info → great_expectations_cloud-20251124.0.dev1.dist-info}/METADATA +54 -46
  24. great_expectations_cloud-20251124.0.dev1.dist-info/RECORD +34 -0
  25. {great_expectations_cloud-20240523.0.dev0.dist-info → great_expectations_cloud-20251124.0.dev1.dist-info}/WHEEL +1 -1
  26. great_expectations_cloud/agent/actions/data_assistants/__init__.py +0 -8
  27. great_expectations_cloud/agent/actions/data_assistants/run_missingness_data_assistant.py +0 -45
  28. great_expectations_cloud/agent/actions/data_assistants/run_onboarding_data_assistant.py +0 -45
  29. great_expectations_cloud/agent/actions/data_assistants/utils.py +0 -123
  30. great_expectations_cloud/agent/actions/list_table_names.py +0 -76
  31. great_expectations_cloud-20240523.0.dev0.dist-info/RECORD +0 -32
  32. {great_expectations_cloud-20240523.0.dev0.dist-info → great_expectations_cloud-20251124.0.dev1.dist-info}/entry_points.txt +0 -0
  33. {great_expectations_cloud-20240523.0.dev0.dist-info → great_expectations_cloud-20251124.0.dev1.dist-info/licenses}/LICENSE +0 -0
@@ -1,31 +1,68 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import uuid
4
- from typing import Any, Dict, Literal, Optional, Sequence, Set, Union
4
+ from collections.abc import Sequence
5
+ from typing import TYPE_CHECKING, Annotated, Any, Literal, Optional, Union
5
6
  from uuid import UUID
6
7
 
7
- from great_expectations.compatibility.pydantic import BaseModel, Extra, Field
8
+ from great_expectations.expectations.metadata_types import DataQualityIssues
8
9
  from great_expectations.experimental.metric_repository.metrics import MetricTypes
9
- from typing_extensions import Annotated
10
+ from pydantic.v1 import BaseModel, Extra, Field
10
11
 
11
12
  from great_expectations_cloud.agent.exceptions import GXCoreError
12
13
 
13
14
 
14
- class AgentBaseModel(BaseModel): # type: ignore[misc] # BaseSettings is has Any type
15
+ def all_subclasses(cls: type) -> list[type]:
16
+ """
17
+ Recursively gather every subclass of `cls` (including nested ones).
18
+ """
19
+ direct = cls.__subclasses__()
20
+ all_sub_cls: list[type] = []
21
+ for C in direct:
22
+ all_sub_cls.append(C)
23
+ all_sub_cls.extend(all_subclasses(C))
24
+ return all_sub_cls
25
+
26
+
27
+ class DomainContext(BaseModel):
28
+ """
29
+ Encapsulates domain-related context information.
30
+
31
+ This dataclass consolidates organization_id and workspace_id to reduce
32
+ parameter proliferation and improve code maintainability.
33
+ """
34
+
35
+ organization_id: UUID
36
+ workspace_id: UUID
37
+
38
+
39
+ class AgentBaseExtraForbid(BaseModel):
15
40
  class Config:
16
41
  # 2024-03-04: ZEL-501 Strictly enforce models for handling outdated APIs
17
42
  extra: str = Extra.forbid
18
43
 
19
44
 
20
- class EventBase(AgentBaseModel):
45
+ class AgentBaseExtraIgnore(BaseModel):
46
+ class Config:
47
+ # Extra fields on Events are not strictly enforced
48
+ extra: str = Extra.ignore
49
+
50
+
51
+ class EventBase(AgentBaseExtraIgnore):
21
52
  type: str
53
+ organization_id: Optional[UUID] = None # noqa: UP045
54
+ workspace_id: UUID
55
+
56
+
57
+ class ScheduledEventBase(EventBase):
58
+ schedule_id: UUID
22
59
 
23
60
 
24
61
  class RunDataAssistantEvent(EventBase):
25
62
  type: str
26
63
  datasource_name: str
27
64
  data_asset_name: str
28
- expectation_suite_name: Optional[str] = None
65
+ expectation_suite_name: Optional[str] = None # noqa: UP045
29
66
 
30
67
 
31
68
  class RunOnboardingDataAssistantEvent(RunDataAssistantEvent):
@@ -42,9 +79,29 @@ class RunMissingnessDataAssistantEvent(RunDataAssistantEvent):
42
79
 
43
80
  class RunCheckpointEvent(EventBase):
44
81
  type: Literal["run_checkpoint_request"] = "run_checkpoint_request"
45
- datasource_names_to_asset_names: Dict[str, Set[str]]
82
+ datasource_names_to_asset_names: dict[str, set[str]]
83
+ checkpoint_id: uuid.UUID
84
+ splitter_options: Optional[dict[str, Any]] = None # noqa: UP045
85
+ # TODO: Remove optional once fully migrated to greatexpectations v1
86
+ checkpoint_name: Optional[str] = None # noqa: UP045
87
+
88
+
89
+ class RunScheduledCheckpointEvent(ScheduledEventBase):
90
+ type: Literal["run_scheduled_checkpoint.received"] = "run_scheduled_checkpoint.received"
91
+ datasource_names_to_asset_names: dict[str, set[str]]
92
+ checkpoint_id: uuid.UUID
93
+ splitter_options: Optional[dict[str, Any]] = None # noqa: UP045
94
+ # TODO: Remove optional once fully migrated to greatexpectations v1
95
+ checkpoint_name: Optional[str] = None # noqa: UP045
96
+
97
+
98
+ class RunWindowCheckpointEvent(EventBase):
99
+ type: Literal["run_window_checkpoint.received"] = "run_window_checkpoint.received"
100
+ datasource_names_to_asset_names: dict[str, set[str]]
46
101
  checkpoint_id: uuid.UUID
47
- splitter_options: Optional[Dict[str, Any]] = None
102
+ splitter_options: Optional[dict[str, Any]] = None # noqa: UP045
103
+ # TODO: Remove optional once fully migrated to greatexpectations v1
104
+ checkpoint_name: Optional[str] = None # noqa: UP045
48
105
 
49
106
 
50
107
  class RunColumnDescriptiveMetricsEvent(EventBase):
@@ -62,7 +119,7 @@ class RunMetricsListEvent(EventBase):
62
119
  metric_names: Sequence[MetricTypes]
63
120
 
64
121
 
65
- class ListTableNamesEvent(EventBase):
122
+ class ListAssetNamesEvent(EventBase):
66
123
  type: Literal["list_table_names_request.received"] = "list_table_names_request.received"
67
124
  datasource_name: str
68
125
 
@@ -72,46 +129,166 @@ class DraftDatasourceConfigEvent(EventBase):
72
129
  config_id: UUID
73
130
 
74
131
 
75
- class UnknownEvent(EventBase):
132
+ class GenerateDataQualityCheckExpectationsEvent(EventBase):
133
+ type: Literal["generate_data_quality_check_expectations_request.received"] = (
134
+ "generate_data_quality_check_expectations_request.received"
135
+ )
136
+ datasource_name: str
137
+ data_assets: Sequence[str]
138
+ selected_data_quality_issues: Sequence[DataQualityIssues] | None = None
139
+ use_forecast: bool = False # feature flag
140
+ created_via: str | None = None
141
+
142
+
143
+ class RunRdAgentEvent(EventBase):
144
+ type: Literal["rd_agent_action.received"] = "rd_agent_action.received"
145
+ datasource_name: str
146
+ data_asset_name: str
147
+ batch_definition_name: str
148
+ batch_parameters: Optional[dict[str, Any]] = None # noqa: UP045
149
+ use_core_metrics: bool = False
150
+
151
+
152
+ class UnknownEvent(AgentBaseExtraForbid):
76
153
  type: Literal["unknown_event"] = "unknown_event"
77
154
 
78
155
 
79
- Event = Annotated[
80
- Union[
156
+ class MissingEventSubclasses(RuntimeError):
157
+ def __init__(self) -> None:
158
+ super().__init__("No valid Event subclasses found")
159
+
160
+
161
+ # Type alias for any event class that can be used in the dynamic system
162
+ EventType = type[Union[AgentBaseExtraForbid, AgentBaseExtraIgnore]]
163
+
164
+
165
+ #
166
+ # Dynamically build Event union from all subclasses of AgentBaseExtraForbid and AgentBaseExtraIgnore
167
+ #
168
+ def _build_event_union() -> tuple[type, ...]:
169
+ """Build a discriminated Union of all Event subclasses dynamically."""
170
+ # Collect all subclasses from both base classes
171
+ forbid_subs = all_subclasses(AgentBaseExtraForbid)
172
+ ignore_subs = all_subclasses(AgentBaseExtraIgnore)
173
+
174
+ # Combine and filter to only include classes with a 'type' field and a discriminator value
175
+ all_event_classes = []
176
+ for cls in forbid_subs + ignore_subs:
177
+ # Check if the class has a 'type' field and it's properly defined with a Literal type
178
+ if hasattr(cls, "__fields__") and "type" in cls.__fields__:
179
+ type_field = cls.__fields__["type"]
180
+ # Check if it has a default value (discriminator value)
181
+ if hasattr(type_field, "default") and type_field.default is not None:
182
+ all_event_classes.append(cls)
183
+
184
+ if not all_event_classes:
185
+ raise MissingEventSubclasses()
186
+
187
+ # Remove duplicates (preserves order for deterministic results)
188
+ return tuple(dict.fromkeys(all_event_classes))
189
+
190
+
191
+ # Build the dynamic Event union
192
+ _event_classes = _build_event_union()
193
+
194
+ if TYPE_CHECKING:
195
+ # For static type checking, provide a concrete union of known event types
196
+ Event = Union[
81
197
  RunOnboardingDataAssistantEvent,
82
198
  RunMissingnessDataAssistantEvent,
83
199
  RunCheckpointEvent,
200
+ RunScheduledCheckpointEvent,
201
+ RunWindowCheckpointEvent,
84
202
  RunColumnDescriptiveMetricsEvent,
85
203
  RunMetricsListEvent,
86
204
  DraftDatasourceConfigEvent,
87
- ListTableNamesEvent,
205
+ ListAssetNamesEvent,
206
+ GenerateDataQualityCheckExpectationsEvent,
207
+ RunRdAgentEvent,
88
208
  UnknownEvent,
89
- ],
90
- Field(discriminator="type"),
91
- ]
209
+ ]
210
+ else:
211
+ # At runtime, use the dynamic union
212
+ Event = Annotated[Union[_event_classes], Field(discriminator="type")]
213
+
214
+
215
+ def reload_event_union() -> None:
216
+ """Rebuild the Event union dynamically.
217
+
218
+ Call this method after subclassing one of the EventBase models in order
219
+ to make it available in the Event union."""
220
+ global Event # noqa: PLW0603
221
+ reloaded_event_classes = _build_event_union()
222
+ Event = Annotated[Union[reloaded_event_classes], Field(discriminator="type")] # type: ignore[valid-type]
223
+
92
224
 
225
+ def get_event_union() -> Any:
226
+ """Canonical way to access the Event union for non-typing use cases.
93
227
 
94
- class CreatedResource(AgentBaseModel):
228
+ The Event union can be dynamically extended when the Agent codebase is extended.
229
+ Those subclasses will be defined after the Event model is defined in this module.
230
+ This function allows callers to access the full union that includes those subclasses.
231
+
232
+ Returns:
233
+ A dynamically constructed discriminated Union type suitable for pydantic parsing.
234
+
235
+ Type Bounds:
236
+ All members of the returned Union are subclasses of either:
237
+ - AgentBaseExtraForbid: Event classes with strict field validation
238
+ - AgentBaseExtraIgnore: Event classes that ignore extra fields
239
+
240
+ The actual return type is equivalent to:
241
+ Annotated[Union[<all_event_subclasses>], Field(discriminator="type")]
242
+
243
+ This cannot be statically typed due to the dynamic discovery of subclasses,
244
+ so we use Any. At runtime, this represents a properly typed
245
+ discriminated union of concrete event model classes.
246
+ """
247
+ reload_event_union()
248
+ return Event
249
+
250
+
251
+ class CreatedResource(AgentBaseExtraForbid):
95
252
  resource_id: str
96
253
  type: str
97
254
 
98
255
 
99
- class JobStarted(AgentBaseModel):
256
+ class JobStarted(AgentBaseExtraForbid):
100
257
  status: Literal["started"] = "started"
101
258
 
102
259
 
103
- class JobCompleted(AgentBaseModel):
260
+ class JobCompleted(AgentBaseExtraForbid):
104
261
  status: Literal["completed"] = "completed"
105
262
  success: bool
106
263
  created_resources: Sequence[CreatedResource] = []
107
264
  error_stack_trace: Union[str, None] = None
108
265
  error_code: Union[str, None] = None
109
- error_params: Union[Dict[str, str], None] = None
266
+ error_params: Union[dict[str, str], None] = None
267
+ processed_by: Union[Literal["agent", "runner"], None] = None
110
268
 
111
269
 
112
270
  JobStatus = Union[JobStarted, JobCompleted]
113
271
 
114
272
 
273
+ class UpdateJobStatusRequest(AgentBaseExtraForbid):
274
+ data: JobStatus
275
+
276
+
277
+ class CreateScheduledJobAndSetJobStarted(AgentBaseExtraForbid):
278
+ type: Literal["run_scheduled_checkpoint.received"] = "run_scheduled_checkpoint.received"
279
+ correlation_id: UUID
280
+ schedule_id: UUID
281
+ checkpoint_id: UUID
282
+ datasource_names_to_asset_names: dict[str, set[str]]
283
+ splitter_options: Optional[dict[str, Any]] = None # noqa: UP045
284
+ # TODO: Remove optional once fully migrated to greatexpectations v1
285
+ checkpoint_name: Optional[str] = None # noqa: UP045
286
+
287
+
288
+ class CreateScheduledJobAndSetJobStartedRequest(AgentBaseExtraForbid):
289
+ data: CreateScheduledJobAndSetJobStarted
290
+
291
+
115
292
  def build_failed_job_completed_status(error: BaseException) -> JobCompleted:
116
293
  if isinstance(error, GXCoreError):
117
294
  status = JobCompleted(
@@ -0,0 +1,84 @@
1
+ from __future__ import annotations
2
+
3
+ from dataclasses import dataclass
4
+
5
+ INPUT_RANGE = (0.0, 1.0)
6
+ OUTPUT_RANGE = (0.0, 10.0)
7
+ ROUND_PRECISION = 1
8
+
9
+
10
+ @dataclass
11
+ class TriangularInterpolationOptions:
12
+ """Options for triangular interpolation."""
13
+
14
+ input_range: tuple[float, float] = INPUT_RANGE
15
+ output_range: tuple[float, float] = OUTPUT_RANGE
16
+ round_precision: int = ROUND_PRECISION
17
+
18
+
19
+ def triangular_interpolation(
20
+ value: float, options: TriangularInterpolationOptions | None = None
21
+ ) -> float:
22
+ """
23
+ Maps a value between input range to a triangular pattern between output range.
24
+ Values between input_min-midpoint map from output_min to output_max linearly.
25
+ Values between midpoint-input_max map from output_max to output_min linearly.
26
+ The midpoint is automatically calculated as the middle of the input range.
27
+ Result is always rounded to the specified precision.
28
+
29
+ Args:
30
+ value (float): Input value between input_min and input_max
31
+ options (TriangularInterpolationOptions, optional): Configuration options for the interpolation.
32
+ If not provided, default values will be used.
33
+
34
+ Returns:
35
+ float: Interpolated value between output_min and output_max, rounded to specified precision
36
+ """
37
+ if options is None:
38
+ options = TriangularInterpolationOptions()
39
+
40
+ # Extract values from options for readability
41
+ input_min, input_max = options.input_range
42
+ output_min, output_max = options.output_range
43
+ round_precision = options.round_precision
44
+
45
+ # Calculate midpoint
46
+ midpoint = input_min + (input_max - input_min) / 2
47
+
48
+ # Ensure input is between input_min and input_max
49
+ clamped_value = max(input_min, min(input_max, value))
50
+
51
+ if clamped_value <= midpoint:
52
+ # First half: linear mapping from input_min→output_min to midpoint→output_max
53
+ normalized_value = (clamped_value - input_min) / (midpoint - input_min)
54
+ result = output_min + normalized_value * (output_max - output_min)
55
+ else:
56
+ # Second half: linear mapping from midpoint→output_max to input_max→output_min
57
+ normalized_value = (clamped_value - midpoint) / (input_max - midpoint)
58
+ result = output_max - normalized_value * (output_max - output_min)
59
+
60
+ # Round to specified precision
61
+ result = round(result, round_precision)
62
+
63
+ return result
64
+
65
+
66
+ def param_safe_unique_id(length: int) -> str:
67
+ """
68
+ Generate a random string of alphabetic characters.
69
+
70
+ Args:
71
+ length (int): The length of the string to generate
72
+
73
+ Returns:
74
+ str: A random string of alphabetic characters
75
+ """
76
+ import random # noqa: PLC0415
77
+
78
+ result = ""
79
+ characters = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
80
+ counter = 0
81
+ while counter < length:
82
+ result += characters[random.randint(0, len(characters) - 1)] # noqa: S311 # doesn't need to be cryptographically secure
83
+ counter += 1
84
+ return result
@@ -99,6 +99,8 @@ def configure_logger(log_settings: LogSettings) -> None:
99
99
  root.handlers[0].setFormatter(fmt)
100
100
 
101
101
  root.setLevel(log_settings.log_level.numeric_level)
102
+ # 2024-08-12: Reduce noise of pika reconnects
103
+ logging.getLogger("pika").setLevel(logging.WARNING)
102
104
 
103
105
  # TODO Define file loggers as dictConfig as well
104
106
  if not log_settings.skip_log_file:
@@ -172,6 +174,18 @@ class JSONFormatter(logging.Formatter):
172
174
 
173
175
  @override
174
176
  def format(self, record: logging.LogRecord) -> str:
177
+ """
178
+ TODO Support fstrings substitution containing '%s' syntax
179
+
180
+ Example from snowflake-connector-python:
181
+ logger.error(
182
+ "Snowflake Connector for Python Version: %s, "
183
+ "Python Version: %s, Platform: %s",
184
+ SNOWFLAKE_CONNECTOR_VERSION,
185
+ PYTHON_VERSION,
186
+ PLATFORM,
187
+ )
188
+ """
175
189
  log_full = record.__dict__
176
190
 
177
191
  log_full["event"] = record.msg
@@ -182,9 +196,6 @@ class JSONFormatter(logging.Formatter):
182
196
  if record.exc_info:
183
197
  log_full["exc_info"] = str(record.exc_info)
184
198
 
185
- if record.args:
186
- log_full["args"] = str(record.args)
187
-
188
199
  log_subset = {
189
200
  key: value
190
201
  for key, value in log_full.items()
@@ -196,4 +207,9 @@ class JSONFormatter(logging.Formatter):
196
207
  **self.custom_tags,
197
208
  }
198
209
 
199
- return json.dumps(complete_dict)
210
+ try:
211
+ return json.dumps(complete_dict)
212
+ except TypeError:
213
+ # Use repr() to avoid infinite recursion due to throwing another error
214
+ complete_dict = {key: repr(value) for key, value in complete_dict.items()}
215
+ return json.dumps(complete_dict)
File without changes
@@ -1,37 +1,35 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.4
2
2
  Name: great_expectations_cloud
3
- Version: 20240523.0.dev0
3
+ Version: 20251124.0.dev1
4
4
  Summary: Great Expectations Cloud
5
- Home-page: https://greatexpectations.io
6
5
  License: Proprietary
6
+ License-File: LICENSE
7
7
  Author: The Great Expectations Team
8
8
  Author-email: team@greatexpectations.io
9
- Requires-Python: >=3.8,<3.12
9
+ Requires-Python: >=3.11,<3.12
10
10
  Classifier: Development Status :: 3 - Alpha
11
11
  Classifier: Intended Audience :: Developers
12
12
  Classifier: Intended Audience :: Science/Research
13
13
  Classifier: License :: Other/Proprietary License
14
14
  Classifier: Programming Language :: Python :: 3
15
- Classifier: Programming Language :: Python :: 3.8
16
- Classifier: Programming Language :: Python :: 3.9
17
- Classifier: Programming Language :: Python :: 3.10
18
15
  Classifier: Programming Language :: Python :: 3.11
19
16
  Classifier: Topic :: Scientific/Engineering
20
17
  Classifier: Topic :: Scientific/Engineering :: Information Analysis
21
18
  Classifier: Topic :: Software Development :: Quality Assurance
22
19
  Classifier: Topic :: Software Development :: Testing
23
- Provides-Extra: postgres
24
- Provides-Extra: snowflake
25
- Requires-Dist: great-expectations (>=0.18.13,<0.19.0)
20
+ Requires-Dist: great-expectations[databricks,gx-redshift,mssql,postgresql,snowflake,trino] (==1.9.1)
26
21
  Requires-Dist: orjson (>=3.9.7,<4.0.0,!=3.9.10)
27
- Requires-Dist: packaging (>=21.3,<25.0)
22
+ Requires-Dist: packaging (>=21.3,<26.0)
28
23
  Requires-Dist: pika (>=1.3.1,<2.0.0)
29
- Requires-Dist: pydantic (<3)
30
- Requires-Dist: tenacity (>=8.2.3,<9.0.0)
24
+ Requires-Dist: pydantic (>=2.8.1,<3)
25
+ Requires-Dist: setuptools (==80.9.0)
26
+ Requires-Dist: sqlalchemy (>=2.0)
27
+ Requires-Dist: tenacity (>=8.2.3,<10.0.0)
28
+ Project-URL: Homepage, https://greatexpectations.io
31
29
  Project-URL: Repository, https://github.com/great-expectations/cloud
32
30
  Description-Content-Type: text/markdown
33
31
 
34
- # cloud
32
+ # GX cloud
35
33
 
36
34
  [![PyPI](https://img.shields.io/pypi/v/great_expectations_cloud)](https://pypi.org/project/great-expectations_cloud/#history)
37
35
  [![Docker Pulls](https://img.shields.io/docker/pulls/greatexpectations/agent)](https://hub.docker.com/r/greatexpectations/agent)
@@ -42,19 +40,30 @@ Description-Content-Type: text/markdown
42
40
 
43
41
  ## Quick Start
44
42
 
45
- ### Python
43
+ To use the GX Agent, you will need to have a Great Expectations Cloud account. You can sign up for free at [https://app.greatexpectations.io](https://app.greatexpectations.io).
46
44
 
47
- #### Install
45
+ Deployment instructions for the GX Agent can be found in the [GX Cloud documentation](https://docs.greatexpectations.io/docs/cloud/deploy/deploy_gx_agent).
48
46
 
49
- ```console
50
- pip install great_expectations_cloud
51
- ```
47
+ ## Dev Setup
52
48
 
53
- ##### Optional Dependencies
49
+ The following instructions are for those who are contributing to the GX Agent, to deploy and use the GX agent please see the Quick Start section above.
54
50
 
55
- ```console
56
- pip install 'great_expectations_cloud[sql]'
57
- ```
51
+ See also [CONTRIBUTING.md](https://github.com/great-expectations/cloud/blob/main/CONTRIBUTING.md)
52
+
53
+ 1. [Install or upgrade `poetry` to the latest version](https://python-poetry.org/docs/#installing-with-pipx)
54
+ - `pipx install poetry` or `pipx upgrade poetry`
55
+ 2. Set up virtual environment and install dependencies
56
+ - `poetry sync`
57
+ 3. Activate your virtual environment
58
+ - `eval $(poetry env activate)`
59
+ 4. Set up precommit hooks
60
+ - `pre-commit install`
61
+
62
+ ### Troubleshooting
63
+
64
+ If you run into issues, you can try `pipx reinstall-all`
65
+
66
+ ### Running locally for development
58
67
 
59
68
  ```console
60
69
  $ gx-agent --help
@@ -71,45 +80,33 @@ optional arguments:
71
80
  --version Show the GX Agent version.
72
81
  ```
73
82
 
74
- #### Set env variables
83
+ #### Set ENV variables
75
84
 
76
85
  `GX_CLOUD_ACCESS_TOKEN`
77
86
  `GX_CLOUD_ORGANIZATION_ID`
78
87
 
79
- ### Start the Agent
88
+ If you want to override where the GX Agent looks for the RabbitMQ queue you can also set
89
+ `AMQP_HOST_OVERRIDE` and `AMQP_PORT_OVERRIDE`. For example, if you are running a local dockerized RabbitMQ
90
+ service exposed on localhost port 5672, you can set `AMQP_HOST_OVERRIDE=127.0.0.1` and
91
+ `AMQP_PORT_OVERRIDE=5672`.
92
+
93
+ ### Start the GX Agent
80
94
 
81
- If you intend to run the Agent against local services (Cloud backend or datasources) run the Agent outside of the container.
95
+ If you intend to run the GX Agent against local services (Cloud backend or datasources) run the Agent outside of the container.
82
96
 
83
97
  ```
84
98
  gx-agent
85
99
  ```
86
100
 
87
- ### Docker
88
-
89
- [Building and running the Agent with Docker](#building-and-running-the-gx-agent-image)
90
-
91
- ## Dev Setup
92
-
93
- See also [CONTRIBUTING.md](https://github.com/great-expectations/cloud/blob/main/CONTRIBUTING.md)
94
-
95
- 1. [Install `poetry`](https://python-poetry.org/docs/#installation)
96
- - [`pipx install poetry`](https://python-poetry.org/docs/#installing-with-pipx)
97
- 2. Set up virtual environment and install dependencies
98
- - `poetry install --sync`
99
- 3. Activate your virtual environment
100
- - `poetry shell`
101
- 4. Set up precommit hooks
102
- - `pre-commit install`
103
-
104
101
  ### Developer Tasks
105
102
 
106
- Common developer tasks are available via `invoke` (defined in `tasks.py`)
103
+ Common developer tasks are available via `invoke` (defined in `tasks.py`).
107
104
 
108
105
  `invoke --list` to see available tasks.
109
106
 
110
107
  #### Synchronize Dependencies
111
108
 
112
- To ensure you are using the latest version of the core and development dependencies run `poetry install --sync`.
109
+ To ensure you are using the latest version of the core and development dependencies run `poetry sync`.
113
110
  Also available as an invoke task.
114
111
 
115
112
  ```console
@@ -118,6 +115,11 @@ invoke deps
118
115
 
119
116
  #### Updating `poetry.lock` dependencies
120
117
 
118
+ Use the latest version of poetry
119
+ ```console
120
+ pipx upgrade poetry
121
+ ```
122
+
121
123
  The dependencies installed in our CI and the Docker build step are determined by the [poetry.lock file](https://python-poetry.org/docs/basic-usage/#installing-with-poetrylock).
122
124
 
123
125
  [To update only a specific dependency](https://python-poetry.org/docs/cli/#update) (such as `great_expectations`) ...
@@ -126,6 +128,8 @@ The dependencies installed in our CI and the Docker build step are determined by
126
128
  poetry update great_expectations
127
129
  ```
128
130
 
131
+ **Note:** If `poetry update` does not find the latest version of `great_expectations`, you can manually update the version in `pyproject.toml`, and then update the lockfile using `poetry lock`.
132
+
129
133
  [To resolve and update all dependencies ...](https://python-poetry.org/docs/cli/#lock)
130
134
 
131
135
  ```console
@@ -249,13 +253,15 @@ We use the GitHub Actions workflow to automate the release and pre-release proce
249
253
  A visual representation of the workflow is shown [here](https://github.com/great-expectations/cloud/blob/main/.github/workflows/agent_release_workflows.png)
250
254
 
251
255
  ### Dependabot and Releases/Pre-releases
256
+
252
257
  GitHub's Dependabot regularly checks our dependencies for vulnerabilty-based updates and proposes PRs to update dependency version numbers accordingly.
253
258
 
254
259
  Dependabot may only update the `poetry.lock` file. If only changes to `poetry.lock` are made, this may be done in a pre-release.
255
260
 
256
261
  For changes to the `pyproject.toml` file:
262
+
257
263
  - If the version of a tool in the `[tool.poetry.group.dev.dependencies]` group is updated, this may be done without any version bump.
258
- - While doing this, make sure any version references in the pre-commit config `.pre-commit-config.yaml` are kept in sync (e.g., ruff).
264
+ - While doing this, make sure any version references in the pre-commit config `.pre-commit-config.yaml` are kept in sync (e.g., ruff).
259
265
  - For other dependency updates or package build metadata changes, a new release should be orchestrated. This includes updates in the following sections:
260
266
  - `[tool.poetry.dependencies]`
261
267
  - `[tool.poetry.group.*.dependencies]` where `*` is the name of the group (not including the `dev` group)
@@ -263,3 +269,5 @@ For changes to the `pyproject.toml` file:
263
269
  - Only modifying dev dependencies.
264
270
  - Only modifying tests that do not change functionality.
265
271
 
272
+ NOTE: Dependabot does not have permissions to access secrets in our CI. You may notice that integration tests fail on PRs that dependabot creates. If you add a commit (as a GX member) to the PR, the tests will run again and pass because they now have access to the secrets. That commit can be anything, including an empty commit e.g. `git commit -m "some message" --allow-empty`.
273
+
@@ -0,0 +1,34 @@
1
+ great_expectations_cloud/__init__.py,sha256=1mr5RDyA2N38eynvEfVbuYIbjFadeJfqZ-X9CrqYiVo,150
2
+ great_expectations_cloud/agent/__init__.py,sha256=FqDFYbGefmNhwlvJwJbNovkwzny6mwaYH5LtTi6VlSU,464
3
+ great_expectations_cloud/agent/actions/__init__.py,sha256=TYPe2j8EgaziXXgSLEdgjnbHKL56O6cQL1kjPnGbRFI,949
4
+ great_expectations_cloud/agent/actions/agent_action.py,sha256=F9zOgVmNJ_V2RhRbDXNMjd46-QVNFN8Lp_bmPvh5cwU,1189
5
+ great_expectations_cloud/agent/actions/draft_datasource_config_action.py,sha256=NVN2GBSty-XvCW7pHDkLImHd3V0iJzpUNIh8rNGDuzs,5241
6
+ great_expectations_cloud/agent/actions/generate_data_quality_check_expectations_action.py,sha256=dhiy_lkcePDy45fEX0uhk-m89_aYKtV6P5jfd-Dcax8,23611
7
+ great_expectations_cloud/agent/actions/list_asset_names.py,sha256=pOL5ip8ZZJbZhDNSp44rjYkx93rKdf3U6f4fY-JLhvg,2576
8
+ great_expectations_cloud/agent/actions/run_checkpoint.py,sha256=N2d07JDCG06kMve7yjPZQFlaGKoJw5dCbpKuBWw6Ssg,3751
9
+ great_expectations_cloud/agent/actions/run_metric_list_action.py,sha256=69nyR0vXjz_lKAHYczuEMQtbNIv0lf-DMiOBXmkwpuQ,3237
10
+ great_expectations_cloud/agent/actions/run_scheduled_checkpoint.py,sha256=_aYXRsqmiuUOxX1aLppBbJCyBvo0gxZJ1fS6-W-uV1s,2457
11
+ great_expectations_cloud/agent/actions/run_window_checkpoint.py,sha256=MCMbgY3dNUx546sNpXg5p3rCWNzDa6EQOUt20Nr5udo,2317
12
+ great_expectations_cloud/agent/actions/unknown.py,sha256=mtWw9tDZqGZSiUWj7PtIlLFJ1dM-7AHBX3SO16-u2EM,739
13
+ great_expectations_cloud/agent/actions/utils.py,sha256=0lzeASN1TYWJ35-H-sCRkcPMHzWU05SzKIyTv0CzvA8,1665
14
+ great_expectations_cloud/agent/agent.py,sha256=GtN-9t5yUZ7JBfKhZZaEqiDtwP8bRp-9XiiYgwVG_Do,31037
15
+ great_expectations_cloud/agent/agent_warnings.py,sha256=9-xl_AI2V9Py4o7KzFOQjG3lYx-vZ36fq4w2iiPNiUw,362
16
+ great_expectations_cloud/agent/cli.py,sha256=a_HmPxBMlVD59BEmkZnlbOOAFlezVMx9djZ2XIW-3W0,2885
17
+ great_expectations_cloud/agent/config.py,sha256=c1BOr-TrxZnciWNRuu4atGtfRh-XSmwIS0osDCzQa04,1348
18
+ great_expectations_cloud/agent/constants.py,sha256=SAEtcOwI-SxZiZSVoCFfEC5oCtYdgmyUmK7for_zg_Q,246
19
+ great_expectations_cloud/agent/event_handler.py,sha256=wcvAM0i6pnIsWlYstnZ2ThpByJYCyKrRmh0solC02bk,6091
20
+ great_expectations_cloud/agent/exceptions.py,sha256=XIDBVSmBfFpVQA5i9rZqEtORIdi5E4_lwup0jP90TUk,1506
21
+ great_expectations_cloud/agent/message_service/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
22
+ great_expectations_cloud/agent/message_service/asyncio_rabbit_mq_client.py,sha256=B2EwgG_Qdm1s_xkxbGPQxdRBkDSuhkKyT6rGFpXuqQ0,12391
23
+ great_expectations_cloud/agent/message_service/subscriber.py,sha256=K8szy9uM1MUBkMaMZt9o5ExnjqEn1hw6zYaH7pQkjoM,5975
24
+ great_expectations_cloud/agent/models.py,sha256=jgCDUJRDdCrXoU3NzfaJXmL8Y9gUqR3cO14nMsXFUDA,10298
25
+ great_expectations_cloud/agent/run.py,sha256=V33RLoB1PFmJ0h0RfHG4SB5lN_Za8tW2Dua6GUpN9yY,639
26
+ great_expectations_cloud/agent/utils.py,sha256=3OvdcXeK1gk2oJgqG4jPvBRwlMCn8LioULW3YgRtj98,2950
27
+ great_expectations_cloud/logging/README.md,sha256=vbwU689x8SkGjzoBYQzZOzAvh28fR0RCa1XY5WD-Dgs,1762
28
+ great_expectations_cloud/logging/logging_cfg.py,sha256=W6mlm4_Z2bjzM5TuKmFg_WZor2XoJm4DAoLGaf2O__I,6579
29
+ great_expectations_cloud/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
30
+ great_expectations_cloud-20251124.0.dev1.dist-info/METADATA,sha256=_giplgtZY20l8Fa0ktYuMWgvf_0s5COpQMBTWB1I6WY,12379
31
+ great_expectations_cloud-20251124.0.dev1.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
32
+ great_expectations_cloud-20251124.0.dev1.dist-info/entry_points.txt,sha256=ofJgdeS2gSzxXLyCAjfNhIaN1wmSyR7EAMs5qhVaXE4,68
33
+ great_expectations_cloud-20251124.0.dev1.dist-info/licenses/LICENSE,sha256=_JJnoX6N_OkrAwlCRizCwil0tIjDAy2TG3GiJ50sM6k,2084
34
+ great_expectations_cloud-20251124.0.dev1.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: poetry-core 1.8.1
2
+ Generator: poetry-core 2.2.1
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
@@ -1,8 +0,0 @@
1
- from __future__ import annotations
2
-
3
- from great_expectations_cloud.agent.actions.data_assistants.run_missingness_data_assistant import (
4
- RunMissingnessDataAssistantAction,
5
- )
6
- from great_expectations_cloud.agent.actions.data_assistants.run_onboarding_data_assistant import (
7
- RunOnboardingDataAssistantAction,
8
- )