great-expectations-cloud 20250717.0.dev0__tar.gz → 20251211.0.dev1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {great_expectations_cloud-20250717.0.dev0 → great_expectations_cloud-20251211.0.dev1}/PKG-INFO +7 -5
- {great_expectations_cloud-20250717.0.dev0 → great_expectations_cloud-20251211.0.dev1}/README.md +2 -0
- {great_expectations_cloud-20250717.0.dev0 → great_expectations_cloud-20251211.0.dev1}/great_expectations_cloud/agent/actions/agent_action.py +3 -3
- {great_expectations_cloud-20250717.0.dev0 → great_expectations_cloud-20251211.0.dev1}/great_expectations_cloud/agent/actions/draft_datasource_config_action.py +2 -2
- {great_expectations_cloud-20250717.0.dev0 → great_expectations_cloud-20251211.0.dev1}/great_expectations_cloud/agent/actions/generate_data_quality_check_expectations_action.py +47 -15
- {great_expectations_cloud-20250717.0.dev0 → great_expectations_cloud-20251211.0.dev1}/great_expectations_cloud/agent/actions/list_asset_names.py +4 -5
- {great_expectations_cloud-20250717.0.dev0 → great_expectations_cloud-20251211.0.dev1}/great_expectations_cloud/agent/actions/run_metric_list_action.py +3 -3
- {great_expectations_cloud-20250717.0.dev0 → great_expectations_cloud-20251211.0.dev1}/great_expectations_cloud/agent/actions/run_scheduled_checkpoint.py +3 -4
- {great_expectations_cloud-20250717.0.dev0 → great_expectations_cloud-20251211.0.dev1}/great_expectations_cloud/agent/actions/run_window_checkpoint.py +2 -4
- {great_expectations_cloud-20250717.0.dev0 → great_expectations_cloud-20251211.0.dev1}/great_expectations_cloud/agent/actions/utils.py +13 -4
- {great_expectations_cloud-20250717.0.dev0 → great_expectations_cloud-20251211.0.dev1}/great_expectations_cloud/agent/agent.py +63 -35
- {great_expectations_cloud-20250717.0.dev0 → great_expectations_cloud-20251211.0.dev1}/great_expectations_cloud/agent/event_handler.py +8 -7
- {great_expectations_cloud-20250717.0.dev0 → great_expectations_cloud-20251211.0.dev1}/great_expectations_cloud/agent/models.py +13 -0
- {great_expectations_cloud-20250717.0.dev0 → great_expectations_cloud-20251211.0.dev1}/pyproject.toml +7 -8
- {great_expectations_cloud-20250717.0.dev0 → great_expectations_cloud-20251211.0.dev1}/LICENSE +0 -0
- {great_expectations_cloud-20250717.0.dev0 → great_expectations_cloud-20251211.0.dev1}/great_expectations_cloud/__init__.py +0 -0
- {great_expectations_cloud-20250717.0.dev0 → great_expectations_cloud-20251211.0.dev1}/great_expectations_cloud/agent/__init__.py +0 -0
- {great_expectations_cloud-20250717.0.dev0 → great_expectations_cloud-20251211.0.dev1}/great_expectations_cloud/agent/actions/__init__.py +0 -0
- {great_expectations_cloud-20250717.0.dev0 → great_expectations_cloud-20251211.0.dev1}/great_expectations_cloud/agent/actions/run_checkpoint.py +0 -0
- {great_expectations_cloud-20250717.0.dev0 → great_expectations_cloud-20251211.0.dev1}/great_expectations_cloud/agent/actions/unknown.py +0 -0
- {great_expectations_cloud-20250717.0.dev0 → great_expectations_cloud-20251211.0.dev1}/great_expectations_cloud/agent/agent_warnings.py +0 -0
- {great_expectations_cloud-20250717.0.dev0 → great_expectations_cloud-20251211.0.dev1}/great_expectations_cloud/agent/cli.py +0 -0
- {great_expectations_cloud-20250717.0.dev0 → great_expectations_cloud-20251211.0.dev1}/great_expectations_cloud/agent/config.py +0 -0
- {great_expectations_cloud-20250717.0.dev0 → great_expectations_cloud-20251211.0.dev1}/great_expectations_cloud/agent/constants.py +0 -0
- {great_expectations_cloud-20250717.0.dev0 → great_expectations_cloud-20251211.0.dev1}/great_expectations_cloud/agent/exceptions.py +0 -0
- {great_expectations_cloud-20250717.0.dev0 → great_expectations_cloud-20251211.0.dev1}/great_expectations_cloud/agent/message_service/__init__.py +0 -0
- {great_expectations_cloud-20250717.0.dev0 → great_expectations_cloud-20251211.0.dev1}/great_expectations_cloud/agent/message_service/asyncio_rabbit_mq_client.py +0 -0
- {great_expectations_cloud-20250717.0.dev0 → great_expectations_cloud-20251211.0.dev1}/great_expectations_cloud/agent/message_service/subscriber.py +0 -0
- {great_expectations_cloud-20250717.0.dev0 → great_expectations_cloud-20251211.0.dev1}/great_expectations_cloud/agent/run.py +0 -0
- {great_expectations_cloud-20250717.0.dev0 → great_expectations_cloud-20251211.0.dev1}/great_expectations_cloud/agent/utils.py +0 -0
- {great_expectations_cloud-20250717.0.dev0 → great_expectations_cloud-20251211.0.dev1}/great_expectations_cloud/logging/README.md +0 -0
- {great_expectations_cloud-20250717.0.dev0 → great_expectations_cloud-20251211.0.dev1}/great_expectations_cloud/logging/logging_cfg.py +0 -0
- {great_expectations_cloud-20250717.0.dev0 → great_expectations_cloud-20251211.0.dev1}/great_expectations_cloud/py.typed +0 -0
{great_expectations_cloud-20250717.0.dev0 → great_expectations_cloud-20251211.0.dev1}/PKG-INFO
RENAMED
|
@@ -1,22 +1,22 @@
|
|
|
1
|
-
Metadata-Version: 2.
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
2
|
Name: great_expectations_cloud
|
|
3
|
-
Version:
|
|
3
|
+
Version: 20251211.0.dev1
|
|
4
4
|
Summary: Great Expectations Cloud
|
|
5
5
|
License: Proprietary
|
|
6
|
+
License-File: LICENSE
|
|
6
7
|
Author: The Great Expectations Team
|
|
7
8
|
Author-email: team@greatexpectations.io
|
|
8
|
-
Requires-Python: >=3.11,<3.12
|
|
9
|
+
Requires-Python: >=3.11.4,<3.12
|
|
9
10
|
Classifier: Development Status :: 3 - Alpha
|
|
10
11
|
Classifier: Intended Audience :: Developers
|
|
11
12
|
Classifier: Intended Audience :: Science/Research
|
|
12
13
|
Classifier: License :: Other/Proprietary License
|
|
13
14
|
Classifier: Programming Language :: Python :: 3
|
|
14
|
-
Classifier: Programming Language :: Python :: 3.11
|
|
15
15
|
Classifier: Topic :: Scientific/Engineering
|
|
16
16
|
Classifier: Topic :: Scientific/Engineering :: Information Analysis
|
|
17
17
|
Classifier: Topic :: Software Development :: Quality Assurance
|
|
18
18
|
Classifier: Topic :: Software Development :: Testing
|
|
19
|
-
Requires-Dist: great-expectations[databricks,gx-redshift,postgresql,snowflake] (==1.
|
|
19
|
+
Requires-Dist: great-expectations[databricks,gx-redshift,mssql,postgresql,snowflake,trino] (==1.9.3)
|
|
20
20
|
Requires-Dist: orjson (>=3.9.7,<4.0.0,!=3.9.10)
|
|
21
21
|
Requires-Dist: packaging (>=21.3,<26.0)
|
|
22
22
|
Requires-Dist: pika (>=1.3.1,<2.0.0)
|
|
@@ -127,6 +127,8 @@ The dependencies installed in our CI and the Docker build step are determined by
|
|
|
127
127
|
poetry update great_expectations
|
|
128
128
|
```
|
|
129
129
|
|
|
130
|
+
**Note:** If `poetry update` does not find the latest version of `great_expectations`, you can manually update the version in `pyproject.toml`, and then update the lockfile using `poetry lock`.
|
|
131
|
+
|
|
130
132
|
[To resolve and update all dependencies ...](https://python-poetry.org/docs/cli/#lock)
|
|
131
133
|
|
|
132
134
|
```console
|
{great_expectations_cloud-20250717.0.dev0 → great_expectations_cloud-20251211.0.dev1}/README.md
RENAMED
|
@@ -97,6 +97,8 @@ The dependencies installed in our CI and the Docker build step are determined by
|
|
|
97
97
|
poetry update great_expectations
|
|
98
98
|
```
|
|
99
99
|
|
|
100
|
+
**Note:** If `poetry update` does not find the latest version of `great_expectations`, you can manually update the version in `pyproject.toml`, and then update the lockfile using `poetry lock`.
|
|
101
|
+
|
|
100
102
|
[To resolve and update all dependencies ...](https://python-poetry.org/docs/cli/#lock)
|
|
101
103
|
|
|
102
104
|
```console
|
|
@@ -4,7 +4,6 @@ import datetime
|
|
|
4
4
|
from abc import abstractmethod
|
|
5
5
|
from collections.abc import Sequence
|
|
6
6
|
from typing import TYPE_CHECKING, Generic, Optional, TypeVar, Union
|
|
7
|
-
from uuid import UUID
|
|
8
7
|
|
|
9
8
|
from pydantic.v1 import BaseModel
|
|
10
9
|
|
|
@@ -12,6 +11,7 @@ from great_expectations_cloud.agent.models import (
|
|
|
12
11
|
AgentBaseExtraForbid,
|
|
13
12
|
AgentBaseExtraIgnore,
|
|
14
13
|
CreatedResource,
|
|
14
|
+
DomainContext,
|
|
15
15
|
)
|
|
16
16
|
|
|
17
17
|
if TYPE_CHECKING:
|
|
@@ -32,11 +32,11 @@ _EventT = TypeVar("_EventT", bound=Union[AgentBaseExtraForbid, AgentBaseExtraIgn
|
|
|
32
32
|
|
|
33
33
|
class AgentAction(Generic[_EventT]):
|
|
34
34
|
def __init__(
|
|
35
|
-
self, context: CloudDataContext, base_url: str,
|
|
35
|
+
self, context: CloudDataContext, base_url: str, domain_context: DomainContext, auth_key: str
|
|
36
36
|
):
|
|
37
37
|
self._context = context
|
|
38
38
|
self._base_url = base_url
|
|
39
|
-
self.
|
|
39
|
+
self._domain_context = domain_context
|
|
40
40
|
self._auth_key = auth_key
|
|
41
41
|
|
|
42
42
|
@abstractmethod
|
|
@@ -70,7 +70,7 @@ class DraftDatasourceConfigAction(AgentAction[DraftDatasourceConfigEvent]):
|
|
|
70
70
|
with create_session(access_token=self._auth_key) as session:
|
|
71
71
|
url = urljoin(
|
|
72
72
|
base=self._base_url,
|
|
73
|
-
url=f"/api/v1/organizations/{self.
|
|
73
|
+
url=f"/api/v1/organizations/{self._domain_context.organization_id}/workspaces/{self._domain_context.workspace_id}/draft-table-names/{config_id}",
|
|
74
74
|
)
|
|
75
75
|
response = session.put(
|
|
76
76
|
url=url,
|
|
@@ -87,7 +87,7 @@ class DraftDatasourceConfigAction(AgentAction[DraftDatasourceConfigEvent]):
|
|
|
87
87
|
def get_draft_config(self, config_id: UUID) -> dict[str, Any]:
|
|
88
88
|
resource_url = urljoin(
|
|
89
89
|
base=self._base_url,
|
|
90
|
-
url=f"/api/v1/organizations/{self.
|
|
90
|
+
url=f"/api/v1/organizations/{self._domain_context.organization_id}/workspaces/{self._domain_context.workspace_id}/draft-datasources/{config_id}",
|
|
91
91
|
)
|
|
92
92
|
with create_session(access_token=self._auth_key) as session:
|
|
93
93
|
response = session.get(resource_url)
|
|
@@ -10,9 +10,13 @@ from uuid import UUID
|
|
|
10
10
|
|
|
11
11
|
import great_expectations.expectations as gx_expectations
|
|
12
12
|
from great_expectations.core.http import create_session
|
|
13
|
-
from great_expectations.exceptions import
|
|
13
|
+
from great_expectations.exceptions import (
|
|
14
|
+
GXCloudError,
|
|
15
|
+
InvalidExpectationConfigurationError,
|
|
16
|
+
)
|
|
14
17
|
from great_expectations.expectations.metadata_types import (
|
|
15
18
|
DataQualityIssues,
|
|
19
|
+
FailureSeverity,
|
|
16
20
|
)
|
|
17
21
|
from great_expectations.expectations.window import Offset, Window
|
|
18
22
|
from great_expectations.experimental.metric_repository.batch_inspector import (
|
|
@@ -39,6 +43,7 @@ from great_expectations_cloud.agent.event_handler import register_event_action
|
|
|
39
43
|
from great_expectations_cloud.agent.exceptions import GXAgentError
|
|
40
44
|
from great_expectations_cloud.agent.models import (
|
|
41
45
|
CreatedResource,
|
|
46
|
+
DomainContext,
|
|
42
47
|
GenerateDataQualityCheckExpectationsEvent,
|
|
43
48
|
)
|
|
44
49
|
from great_expectations_cloud.agent.utils import (
|
|
@@ -48,9 +53,7 @@ from great_expectations_cloud.agent.utils import (
|
|
|
48
53
|
)
|
|
49
54
|
|
|
50
55
|
if TYPE_CHECKING:
|
|
51
|
-
from great_expectations.core.suite_parameters import
|
|
52
|
-
SuiteParameterDict,
|
|
53
|
-
)
|
|
56
|
+
from great_expectations.core.suite_parameters import SuiteParameterDict
|
|
54
57
|
from great_expectations.data_context import CloudDataContext
|
|
55
58
|
from great_expectations.datasource.fluent import DataAsset
|
|
56
59
|
|
|
@@ -81,13 +84,13 @@ class GenerateDataQualityCheckExpectationsAction(
|
|
|
81
84
|
self,
|
|
82
85
|
context: CloudDataContext,
|
|
83
86
|
base_url: str,
|
|
84
|
-
|
|
87
|
+
domain_context: DomainContext,
|
|
85
88
|
auth_key: str,
|
|
86
89
|
metric_repository: MetricRepository | None = None,
|
|
87
90
|
batch_inspector: BatchInspector | None = None,
|
|
88
91
|
):
|
|
89
92
|
super().__init__(
|
|
90
|
-
context=context, base_url=base_url,
|
|
93
|
+
context=context, base_url=base_url, domain_context=domain_context, auth_key=auth_key
|
|
91
94
|
)
|
|
92
95
|
self._metric_repository = metric_repository or MetricRepository(
|
|
93
96
|
data_store=CloudDataStore(self._context)
|
|
@@ -155,6 +158,7 @@ class GenerateDataQualityCheckExpectationsAction(
|
|
|
155
158
|
asset_id=data_asset.id,
|
|
156
159
|
pre_existing_completeness_change_expectations=pre_existing_completeness_change_expectations,
|
|
157
160
|
created_via=created_via,
|
|
161
|
+
use_forecast=event.use_forecast,
|
|
158
162
|
)
|
|
159
163
|
for exp_id in completeness_change_expectation_ids:
|
|
160
164
|
created_resources.append(
|
|
@@ -201,7 +205,7 @@ class GenerateDataQualityCheckExpectationsAction(
|
|
|
201
205
|
metric_list=[
|
|
202
206
|
MetricTypes.TABLE_COLUMNS,
|
|
203
207
|
MetricTypes.TABLE_COLUMN_TYPES,
|
|
204
|
-
MetricTypes.
|
|
208
|
+
MetricTypes.COLUMN_NON_NULL_COUNT,
|
|
205
209
|
MetricTypes.TABLE_ROW_COUNT,
|
|
206
210
|
],
|
|
207
211
|
)
|
|
@@ -220,7 +224,7 @@ class GenerateDataQualityCheckExpectationsAction(
|
|
|
220
224
|
"""
|
|
221
225
|
url = urljoin(
|
|
222
226
|
base=self._base_url,
|
|
223
|
-
url=f"/api/v1/organizations/{self.
|
|
227
|
+
url=f"/api/v1/organizations/{self._domain_context.organization_id}/workspaces/{self._domain_context.workspace_id}/expectations/",
|
|
224
228
|
)
|
|
225
229
|
with create_session(access_token=self._auth_key) as session:
|
|
226
230
|
response = session.get(
|
|
@@ -316,6 +320,7 @@ class GenerateDataQualityCheckExpectationsAction(
|
|
|
316
320
|
strict_max=strict_max,
|
|
317
321
|
min_value=min_value,
|
|
318
322
|
max_value=max_value,
|
|
323
|
+
severity=FailureSeverity.WARNING,
|
|
319
324
|
)
|
|
320
325
|
expectation_id = self._create_expectation_for_asset(
|
|
321
326
|
expectation=expectation, asset_id=asset_id, created_via=created_via
|
|
@@ -338,7 +343,8 @@ class GenerateDataQualityCheckExpectationsAction(
|
|
|
338
343
|
raise RuntimeError("missing TABLE_COLUMNS metric") # noqa: TRY003
|
|
339
344
|
|
|
340
345
|
expectation = gx_expectations.ExpectTableColumnsToMatchSet(
|
|
341
|
-
column_set=table_columns_metric.value
|
|
346
|
+
column_set=table_columns_metric.value,
|
|
347
|
+
severity=FailureSeverity.WARNING,
|
|
342
348
|
)
|
|
343
349
|
expectation_id = self._create_expectation_for_asset(
|
|
344
350
|
expectation=expectation, asset_id=asset_id, created_via=created_via
|
|
@@ -353,6 +359,7 @@ class GenerateDataQualityCheckExpectationsAction(
|
|
|
353
359
|
dict[Any, Any]
|
|
354
360
|
], # list of ExpectationConfiguration dicts
|
|
355
361
|
created_via: str | None,
|
|
362
|
+
use_forecast: bool = False,
|
|
356
363
|
) -> list[UUID]:
|
|
357
364
|
table_row_count = next(
|
|
358
365
|
metric
|
|
@@ -367,11 +374,11 @@ class GenerateDataQualityCheckExpectationsAction(
|
|
|
367
374
|
metric
|
|
368
375
|
for metric in metric_run.metrics
|
|
369
376
|
if isinstance(metric, ColumnMetric)
|
|
370
|
-
and metric.metric_name == MetricTypes.
|
|
377
|
+
and metric.metric_name == MetricTypes.COLUMN_NON_NULL_COUNT
|
|
371
378
|
]
|
|
372
379
|
|
|
373
380
|
if not column_null_values_metric or len(column_null_values_metric) == 0:
|
|
374
|
-
raise RuntimeError("missing
|
|
381
|
+
raise RuntimeError("missing COLUMN_NON_NULL_COUNT metrics") # noqa: TRY003
|
|
375
382
|
|
|
376
383
|
expectation_ids = []
|
|
377
384
|
# Single-expectation approach using ExpectColumnProportionOfNonNullValuesToBeBetween
|
|
@@ -382,7 +389,7 @@ class GenerateDataQualityCheckExpectationsAction(
|
|
|
382
389
|
)
|
|
383
390
|
for column in columns_missing_completeness_coverage:
|
|
384
391
|
column_name = column.column
|
|
385
|
-
|
|
392
|
+
non_null_count = column.value
|
|
386
393
|
row_count = table_row_count.value
|
|
387
394
|
expectation: gx_expectations.Expectation
|
|
388
395
|
|
|
@@ -392,18 +399,42 @@ class GenerateDataQualityCheckExpectationsAction(
|
|
|
392
399
|
max_param_name = f"{unique_id}_proportion_max"
|
|
393
400
|
|
|
394
401
|
# Calculate non-null proportion
|
|
395
|
-
non_null_count = row_count - null_count if row_count > 0 else 0
|
|
396
402
|
non_null_proportion = non_null_count / row_count if row_count > 0 else 0
|
|
397
403
|
|
|
398
|
-
if
|
|
404
|
+
if use_forecast:
|
|
405
|
+
expectation = gx_expectations.ExpectColumnProportionOfNonNullValuesToBeBetween(
|
|
406
|
+
windows=[
|
|
407
|
+
Window(
|
|
408
|
+
constraint_fn=ExpectationConstraintFunction.FORECAST,
|
|
409
|
+
parameter_name=min_param_name,
|
|
410
|
+
range=1,
|
|
411
|
+
offset=Offset(positive=0.0, negative=0.0),
|
|
412
|
+
strict=True,
|
|
413
|
+
),
|
|
414
|
+
Window(
|
|
415
|
+
constraint_fn=ExpectationConstraintFunction.FORECAST,
|
|
416
|
+
parameter_name=max_param_name,
|
|
417
|
+
range=1,
|
|
418
|
+
offset=Offset(positive=0.0, negative=0.0),
|
|
419
|
+
strict=True,
|
|
420
|
+
),
|
|
421
|
+
],
|
|
422
|
+
column=column_name,
|
|
423
|
+
min_value={"$PARAMETER": min_param_name},
|
|
424
|
+
max_value={"$PARAMETER": max_param_name},
|
|
425
|
+
severity=FailureSeverity.WARNING,
|
|
426
|
+
)
|
|
427
|
+
elif non_null_proportion == 0:
|
|
399
428
|
expectation = gx_expectations.ExpectColumnProportionOfNonNullValuesToBeBetween(
|
|
400
429
|
column=column_name,
|
|
401
430
|
max_value=0,
|
|
431
|
+
severity=FailureSeverity.WARNING,
|
|
402
432
|
)
|
|
403
433
|
elif non_null_proportion == 1:
|
|
404
434
|
expectation = gx_expectations.ExpectColumnProportionOfNonNullValuesToBeBetween(
|
|
405
435
|
column=column_name,
|
|
406
436
|
min_value=1,
|
|
437
|
+
severity=FailureSeverity.WARNING,
|
|
407
438
|
)
|
|
408
439
|
else:
|
|
409
440
|
# Use triangular interpolation to compute min/max values
|
|
@@ -435,6 +466,7 @@ class GenerateDataQualityCheckExpectationsAction(
|
|
|
435
466
|
column=column_name,
|
|
436
467
|
min_value={"$PARAMETER": min_param_name},
|
|
437
468
|
max_value={"$PARAMETER": max_param_name},
|
|
469
|
+
severity=FailureSeverity.WARNING,
|
|
438
470
|
)
|
|
439
471
|
|
|
440
472
|
expectation_id = self._create_expectation_for_asset(
|
|
@@ -493,7 +525,7 @@ class GenerateDataQualityCheckExpectationsAction(
|
|
|
493
525
|
) -> UUID:
|
|
494
526
|
url = urljoin(
|
|
495
527
|
base=self._base_url,
|
|
496
|
-
url=f"/api/v1/organizations/{self.
|
|
528
|
+
url=f"/api/v1/organizations/{self._domain_context.organization_id}/workspaces/{self._domain_context.workspace_id}/expectations/{asset_id}",
|
|
497
529
|
)
|
|
498
530
|
|
|
499
531
|
expectation_payload = expectation.configuration.to_json_dict()
|
|
@@ -13,9 +13,7 @@ from great_expectations_cloud.agent.actions.agent_action import (
|
|
|
13
13
|
)
|
|
14
14
|
from great_expectations_cloud.agent.actions.utils import get_asset_names
|
|
15
15
|
from great_expectations_cloud.agent.event_handler import register_event_action
|
|
16
|
-
from great_expectations_cloud.agent.models import
|
|
17
|
-
ListAssetNamesEvent,
|
|
18
|
-
)
|
|
16
|
+
from great_expectations_cloud.agent.models import ListAssetNamesEvent
|
|
19
17
|
|
|
20
18
|
|
|
21
19
|
class ListAssetNamesAction(AgentAction[ListAssetNamesEvent]):
|
|
@@ -34,7 +32,8 @@ class ListAssetNamesAction(AgentAction[ListAssetNamesEvent]):
|
|
|
34
32
|
asset_names = get_asset_names(datasource)
|
|
35
33
|
|
|
36
34
|
self._add_or_update_asset_names_list(
|
|
37
|
-
datasource_id=str(datasource.id),
|
|
35
|
+
datasource_id=str(datasource.id),
|
|
36
|
+
asset_names=asset_names,
|
|
38
37
|
)
|
|
39
38
|
|
|
40
39
|
return ActionResult(
|
|
@@ -47,7 +46,7 @@ class ListAssetNamesAction(AgentAction[ListAssetNamesEvent]):
|
|
|
47
46
|
with create_session(access_token=self._auth_key) as session:
|
|
48
47
|
url = urljoin(
|
|
49
48
|
base=self._base_url,
|
|
50
|
-
url=f"/api/v1/organizations/{self.
|
|
49
|
+
url=f"/api/v1/organizations/{self._domain_context.organization_id}/workspaces/{self._domain_context.workspace_id}/table-names/{datasource_id}",
|
|
51
50
|
)
|
|
52
51
|
response = session.put(
|
|
53
52
|
url=url,
|
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
from typing import TYPE_CHECKING
|
|
4
|
-
from uuid import UUID
|
|
5
4
|
|
|
6
5
|
from great_expectations.experimental.metric_repository.batch_inspector import (
|
|
7
6
|
BatchInspector,
|
|
@@ -21,6 +20,7 @@ from great_expectations_cloud.agent.actions import ActionResult, AgentAction
|
|
|
21
20
|
from great_expectations_cloud.agent.event_handler import register_event_action
|
|
22
21
|
from great_expectations_cloud.agent.models import (
|
|
23
22
|
CreatedResource,
|
|
23
|
+
DomainContext,
|
|
24
24
|
RunMetricsListEvent,
|
|
25
25
|
)
|
|
26
26
|
|
|
@@ -34,13 +34,13 @@ class MetricListAction(AgentAction[RunMetricsListEvent]):
|
|
|
34
34
|
self,
|
|
35
35
|
context: CloudDataContext,
|
|
36
36
|
base_url: str,
|
|
37
|
-
|
|
37
|
+
domain_context: DomainContext,
|
|
38
38
|
auth_key: str,
|
|
39
39
|
metric_repository: MetricRepository | None = None,
|
|
40
40
|
batch_inspector: BatchInspector | None = None,
|
|
41
41
|
):
|
|
42
42
|
super().__init__(
|
|
43
|
-
context=context, base_url=base_url,
|
|
43
|
+
context=context, base_url=base_url, domain_context=domain_context, auth_key=auth_key
|
|
44
44
|
)
|
|
45
45
|
self._metric_repository = metric_repository or MetricRepository(
|
|
46
46
|
data_store=CloudDataStore(self._context)
|
|
@@ -13,9 +13,7 @@ from great_expectations_cloud.agent.actions.agent_action import (
|
|
|
13
13
|
)
|
|
14
14
|
from great_expectations_cloud.agent.actions.run_checkpoint import run_checkpoint
|
|
15
15
|
from great_expectations_cloud.agent.event_handler import register_event_action
|
|
16
|
-
from great_expectations_cloud.agent.models import
|
|
17
|
-
RunScheduledCheckpointEvent,
|
|
18
|
-
)
|
|
16
|
+
from great_expectations_cloud.agent.models import RunScheduledCheckpointEvent
|
|
19
17
|
|
|
20
18
|
if TYPE_CHECKING:
|
|
21
19
|
from great_expectations.data_context import CloudDataContext
|
|
@@ -26,7 +24,7 @@ class RunScheduledCheckpointAction(AgentAction[RunScheduledCheckpointEvent]):
|
|
|
26
24
|
def run(self, event: RunScheduledCheckpointEvent, id: str) -> ActionResult:
|
|
27
25
|
expectation_parameters_url = urljoin(
|
|
28
26
|
base=self._base_url,
|
|
29
|
-
url=f"/api/v1/organizations/{self.
|
|
27
|
+
url=f"/api/v1/organizations/{self._domain_context.organization_id}/workspaces/{self._domain_context.workspace_id}/checkpoints/{event.checkpoint_id}/expectation-parameters",
|
|
30
28
|
)
|
|
31
29
|
return run_scheduled_checkpoint(
|
|
32
30
|
context=self._context,
|
|
@@ -50,6 +48,7 @@ def run_scheduled_checkpoint(
|
|
|
50
48
|
response=response,
|
|
51
49
|
)
|
|
52
50
|
data = response.json()
|
|
51
|
+
|
|
53
52
|
try:
|
|
54
53
|
expectation_parameters = (
|
|
55
54
|
data["data"]["expectation_parameters"]
|
|
@@ -13,9 +13,7 @@ from great_expectations_cloud.agent.actions.agent_action import (
|
|
|
13
13
|
)
|
|
14
14
|
from great_expectations_cloud.agent.actions.run_checkpoint import run_checkpoint
|
|
15
15
|
from great_expectations_cloud.agent.event_handler import register_event_action
|
|
16
|
-
from great_expectations_cloud.agent.models import
|
|
17
|
-
RunWindowCheckpointEvent,
|
|
18
|
-
)
|
|
16
|
+
from great_expectations_cloud.agent.models import RunWindowCheckpointEvent
|
|
19
17
|
|
|
20
18
|
if TYPE_CHECKING:
|
|
21
19
|
from great_expectations.data_context import CloudDataContext
|
|
@@ -26,7 +24,7 @@ class RunWindowCheckpointAction(AgentAction[RunWindowCheckpointEvent]):
|
|
|
26
24
|
def run(self, event: RunWindowCheckpointEvent, id: str) -> ActionResult:
|
|
27
25
|
expectation_parameters_url = urljoin(
|
|
28
26
|
base=self._base_url,
|
|
29
|
-
url=f"/api/v1/organizations/{self.
|
|
27
|
+
url=f"/api/v1/organizations/{self._domain_context.organization_id}/workspaces/{self._domain_context.workspace_id}/checkpoints/{event.checkpoint_id}/expectation-parameters",
|
|
30
28
|
)
|
|
31
29
|
return run_window_checkpoint(
|
|
32
30
|
self._context,
|
|
@@ -7,10 +7,13 @@ from sqlalchemy import inspect
|
|
|
7
7
|
|
|
8
8
|
if TYPE_CHECKING:
|
|
9
9
|
from sqlalchemy.engine import Inspector
|
|
10
|
+
from sqlalchemy.sql.compiler import IdentifierPreparer
|
|
10
11
|
|
|
11
12
|
|
|
12
13
|
def get_asset_names(datasource: SQLDatasource) -> list[str]:
|
|
13
14
|
inspector: Inspector = inspect(datasource.get_engine())
|
|
15
|
+
identifier_preparer: IdentifierPreparer = inspector.dialect.identifier_preparer
|
|
16
|
+
|
|
14
17
|
if isinstance(datasource, SnowflakeDatasource) and datasource.schema_:
|
|
15
18
|
# Snowflake-SQLAlchemy uses the default_schema if no schema is provided to get_table_names
|
|
16
19
|
# Or if the role does not have access to the schema (it silently fails and defaults to using default_schema)
|
|
@@ -19,8 +22,14 @@ def get_asset_names(datasource: SQLDatasource) -> list[str]:
|
|
|
19
22
|
# Also converting to list to ensure JSON serializable
|
|
20
23
|
tables = list(inspector.get_table_names(schema=datasource.schema_))
|
|
21
24
|
views = list(inspector.get_view_names(schema=datasource.schema_))
|
|
22
|
-
|
|
25
|
+
asset_names = tables + views
|
|
26
|
+
else:
|
|
27
|
+
tables = list(inspector.get_table_names())
|
|
28
|
+
views = list(inspector.get_view_names())
|
|
29
|
+
asset_names = tables + views
|
|
23
30
|
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
31
|
+
# the identifier preparer adds quotes when they are necessary
|
|
32
|
+
quoted_asset_names: list[str] = [
|
|
33
|
+
identifier_preparer.quote(asset_name) for asset_name in asset_names
|
|
34
|
+
]
|
|
35
|
+
return quoted_asset_names
|
|
@@ -2,7 +2,8 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
import asyncio
|
|
4
4
|
import logging
|
|
5
|
-
import
|
|
5
|
+
import os
|
|
6
|
+
import signal
|
|
6
7
|
import traceback
|
|
7
8
|
import warnings
|
|
8
9
|
from collections import defaultdict
|
|
@@ -16,13 +17,12 @@ from uuid import UUID
|
|
|
16
17
|
|
|
17
18
|
import orjson
|
|
18
19
|
import requests
|
|
20
|
+
from great_expectations import __version__, get_context
|
|
21
|
+
from great_expectations.core import http
|
|
19
22
|
from great_expectations.core.http import create_session
|
|
20
23
|
from great_expectations.data_context.cloud_constants import CLOUD_DEFAULT_BASE_URL
|
|
21
|
-
from great_expectations.data_context.data_context.context_factory import get_context
|
|
22
24
|
from great_expectations.data_context.types.base import ProgressBarsConfig
|
|
23
|
-
from pika.adapters.utils.connection_workflow import
|
|
24
|
-
AMQPConnectorException,
|
|
25
|
-
)
|
|
25
|
+
from pika.adapters.utils.connection_workflow import AMQPConnectorException
|
|
26
26
|
from pika.exceptions import (
|
|
27
27
|
AMQPConnectionError,
|
|
28
28
|
AMQPError,
|
|
@@ -45,9 +45,7 @@ from great_expectations_cloud.agent.config import (
|
|
|
45
45
|
generate_config_validation_error_text,
|
|
46
46
|
)
|
|
47
47
|
from great_expectations_cloud.agent.constants import USER_AGENT_HEADER, HeaderName
|
|
48
|
-
from great_expectations_cloud.agent.event_handler import
|
|
49
|
-
EventHandler,
|
|
50
|
-
)
|
|
48
|
+
from great_expectations_cloud.agent.event_handler import EventHandler
|
|
51
49
|
from great_expectations_cloud.agent.exceptions import (
|
|
52
50
|
GXAgentConfigError,
|
|
53
51
|
GXAgentError,
|
|
@@ -67,6 +65,7 @@ from great_expectations_cloud.agent.models import (
|
|
|
67
65
|
AgentBaseExtraForbid,
|
|
68
66
|
CreateScheduledJobAndSetJobStarted,
|
|
69
67
|
CreateScheduledJobAndSetJobStartedRequest,
|
|
68
|
+
DomainContext,
|
|
70
69
|
JobCompleted,
|
|
71
70
|
JobStarted,
|
|
72
71
|
JobStatus,
|
|
@@ -150,19 +149,6 @@ class GXAgent:
|
|
|
150
149
|
"great_expectations_version": great_expectations_version,
|
|
151
150
|
},
|
|
152
151
|
)
|
|
153
|
-
LOGGER.debug("Loading a DataContext - this might take a moment.")
|
|
154
|
-
|
|
155
|
-
with warnings.catch_warnings():
|
|
156
|
-
# suppress warnings about GX version
|
|
157
|
-
warnings.filterwarnings("ignore", message="You are using great_expectations version")
|
|
158
|
-
self._context: CloudDataContext = get_context(
|
|
159
|
-
cloud_mode=True,
|
|
160
|
-
user_agent_str=self.user_agent_str,
|
|
161
|
-
)
|
|
162
|
-
self._configure_progress_bars(data_context=self._context)
|
|
163
|
-
LOGGER.debug("DataContext is ready.")
|
|
164
|
-
|
|
165
|
-
self._set_http_session_headers(data_context=self._context)
|
|
166
152
|
|
|
167
153
|
# Create a thread pool with a single worker, so we can run long-lived
|
|
168
154
|
# GX processes and maintain our connection to the broker. Note that
|
|
@@ -218,7 +204,7 @@ class GXAgent:
|
|
|
218
204
|
LOGGER.exception("The connection to GX Cloud has encountered an error.")
|
|
219
205
|
except GXAgentUnrecoverableConnectionError:
|
|
220
206
|
LOGGER.exception("The connection to GX Cloud has encountered an unrecoverable error.")
|
|
221
|
-
|
|
207
|
+
os.kill(os.getpid(), signal.SIGTERM)
|
|
222
208
|
except (
|
|
223
209
|
AuthenticationError,
|
|
224
210
|
ProbableAuthenticationError,
|
|
@@ -263,6 +249,7 @@ class GXAgent:
|
|
|
263
249
|
"event_type": event_context.event.type,
|
|
264
250
|
"correlation_id": event_context.correlation_id,
|
|
265
251
|
"organization_id": self.get_organization_id(event_context),
|
|
252
|
+
"workspace_id": str(self.get_workspace_id(event_context)),
|
|
266
253
|
"schedule_id": event_context.event.schedule_id
|
|
267
254
|
if isinstance(event_context.event, ScheduledEventBase)
|
|
268
255
|
else None,
|
|
@@ -287,8 +274,23 @@ class GXAgent:
|
|
|
287
274
|
self._current_task.add_done_callback(on_exit_callback)
|
|
288
275
|
|
|
289
276
|
def get_data_context(self, event_context: EventContext) -> CloudDataContext:
|
|
290
|
-
"""
|
|
291
|
-
|
|
277
|
+
"""Create a new CloudDataContext for each job using the event's workspace_id."""
|
|
278
|
+
with warnings.catch_warnings():
|
|
279
|
+
warnings.filterwarnings("ignore", message="You are using great_expectations version")
|
|
280
|
+
workspace_id = self.get_workspace_id(event_context)
|
|
281
|
+
|
|
282
|
+
LOGGER.debug("Loading a DataContext - this might take a moment.")
|
|
283
|
+
|
|
284
|
+
context: CloudDataContext = get_context(
|
|
285
|
+
cloud_mode=True,
|
|
286
|
+
user_agent_str=self.user_agent_str,
|
|
287
|
+
cloud_workspace_id=str(workspace_id),
|
|
288
|
+
)
|
|
289
|
+
self._configure_progress_bars(data_context=context)
|
|
290
|
+
|
|
291
|
+
LOGGER.debug("DataContext is ready.")
|
|
292
|
+
|
|
293
|
+
return context
|
|
292
294
|
|
|
293
295
|
def get_organization_id(self, event_context: EventContext) -> UUID:
|
|
294
296
|
"""Helper method to get the organization ID. Overridden in GX-Runner."""
|
|
@@ -298,6 +300,13 @@ class GXAgent:
|
|
|
298
300
|
"""Helper method to get the auth key. Overridden in GX-Runner."""
|
|
299
301
|
return self._get_config().gx_cloud_access_token
|
|
300
302
|
|
|
303
|
+
def get_workspace_id(self, event_context: EventContext) -> UUID:
|
|
304
|
+
"""Helper method to get the workspace ID from the event."""
|
|
305
|
+
workspace_id: UUID | None = getattr(event_context.event, "workspace_id", None)
|
|
306
|
+
if workspace_id is None:
|
|
307
|
+
raise GXAgentError()
|
|
308
|
+
return workspace_id
|
|
309
|
+
|
|
301
310
|
def _set_sentry_tags(self, even_context: EventContext) -> None:
|
|
302
311
|
"""Used by GX-Runner to set tags for Sentry logging. No-op in the Agent."""
|
|
303
312
|
pass
|
|
@@ -320,14 +329,18 @@ class GXAgent:
|
|
|
320
329
|
)
|
|
321
330
|
|
|
322
331
|
org_id = self.get_organization_id(event_context)
|
|
332
|
+
workspace_id = self.get_workspace_id(event_context)
|
|
323
333
|
base_url = self._get_config().gx_cloud_base_url
|
|
324
334
|
auth_key = self.get_auth_key()
|
|
325
335
|
|
|
326
336
|
if isinstance(event_context.event, ScheduledEventBase):
|
|
327
|
-
self._create_scheduled_job_and_set_started(event_context, org_id)
|
|
337
|
+
self._create_scheduled_job_and_set_started(event_context, org_id, workspace_id)
|
|
328
338
|
else:
|
|
329
339
|
self._update_status(
|
|
330
|
-
correlation_id=event_context.correlation_id,
|
|
340
|
+
correlation_id=event_context.correlation_id,
|
|
341
|
+
status=JobStarted(),
|
|
342
|
+
org_id=org_id,
|
|
343
|
+
workspace_id=workspace_id,
|
|
331
344
|
)
|
|
332
345
|
LOGGER.info(
|
|
333
346
|
"Starting job",
|
|
@@ -335,6 +348,7 @@ class GXAgent:
|
|
|
335
348
|
"event_type": event_context.event.type,
|
|
336
349
|
"correlation_id": event_context.correlation_id,
|
|
337
350
|
"organization_id": str(org_id),
|
|
351
|
+
"workspace_id": str(workspace_id),
|
|
338
352
|
"schedule_id": event_context.event.schedule_id
|
|
339
353
|
if isinstance(event_context.event, ScheduledEventBase)
|
|
340
354
|
else None,
|
|
@@ -350,7 +364,7 @@ class GXAgent:
|
|
|
350
364
|
id=event_context.correlation_id,
|
|
351
365
|
base_url=base_url,
|
|
352
366
|
auth_key=auth_key,
|
|
353
|
-
organization_id=org_id,
|
|
367
|
+
domain_context=DomainContext(organization_id=org_id, workspace_id=workspace_id),
|
|
354
368
|
)
|
|
355
369
|
return result
|
|
356
370
|
|
|
@@ -366,6 +380,7 @@ class GXAgent:
|
|
|
366
380
|
# warning: this method will not be executed in the main thread
|
|
367
381
|
|
|
368
382
|
org_id = self.get_organization_id(event_context)
|
|
383
|
+
workspace_id = self.get_workspace_id(event_context)
|
|
369
384
|
|
|
370
385
|
# get results or errors from the thread
|
|
371
386
|
error = future.exception()
|
|
@@ -385,6 +400,7 @@ class GXAgent:
|
|
|
385
400
|
"event_type": event_context.event.type,
|
|
386
401
|
"id": event_context.correlation_id,
|
|
387
402
|
"organization_id": str(org_id),
|
|
403
|
+
"workspace_id": str(workspace_id),
|
|
388
404
|
"schedule_id": event_context.event.schedule_id
|
|
389
405
|
if isinstance(event_context.event, ScheduledEventBase)
|
|
390
406
|
else None,
|
|
@@ -405,6 +421,7 @@ class GXAgent:
|
|
|
405
421
|
result.job_duration.total_seconds() if result.job_duration else None
|
|
406
422
|
),
|
|
407
423
|
"organization_id": str(org_id),
|
|
424
|
+
"workspace_id": str(workspace_id),
|
|
408
425
|
"schedule_id": event_context.event.schedule_id
|
|
409
426
|
if isinstance(event_context.event, ScheduledEventBase)
|
|
410
427
|
else None,
|
|
@@ -419,12 +436,16 @@ class GXAgent:
|
|
|
419
436
|
"event_type": event_context.event.type,
|
|
420
437
|
"correlation_id": event_context.correlation_id,
|
|
421
438
|
"organization_id": str(org_id),
|
|
439
|
+
"workspace_id": str(workspace_id),
|
|
422
440
|
},
|
|
423
441
|
)
|
|
424
442
|
|
|
425
443
|
try:
|
|
426
444
|
self._update_status(
|
|
427
|
-
correlation_id=event_context.correlation_id,
|
|
445
|
+
correlation_id=event_context.correlation_id,
|
|
446
|
+
status=status,
|
|
447
|
+
org_id=org_id,
|
|
448
|
+
workspace_id=workspace_id,
|
|
428
449
|
)
|
|
429
450
|
except Exception:
|
|
430
451
|
LOGGER.exception(
|
|
@@ -433,6 +454,7 @@ class GXAgent:
|
|
|
433
454
|
"correlation_id": event_context.correlation_id,
|
|
434
455
|
"status": str(status),
|
|
435
456
|
"organization_id": str(org_id),
|
|
457
|
+
"workspace_id": str(workspace_id),
|
|
436
458
|
},
|
|
437
459
|
)
|
|
438
460
|
# We do not want to cause an infinite loop of errors
|
|
@@ -552,7 +574,9 @@ class GXAgent:
|
|
|
552
574
|
)
|
|
553
575
|
)
|
|
554
576
|
|
|
555
|
-
def _update_status(
|
|
577
|
+
def _update_status(
|
|
578
|
+
self, correlation_id: str, status: JobStatus, org_id: UUID, workspace_id: UUID
|
|
579
|
+
) -> None:
|
|
556
580
|
"""Update GX Cloud on the status of a job.
|
|
557
581
|
|
|
558
582
|
Args:
|
|
@@ -565,11 +589,12 @@ class GXAgent:
|
|
|
565
589
|
"correlation_id": correlation_id,
|
|
566
590
|
"status": str(status),
|
|
567
591
|
"organization_id": str(org_id),
|
|
592
|
+
"workspace_id": str(workspace_id),
|
|
568
593
|
},
|
|
569
594
|
)
|
|
570
595
|
agent_sessions_url = urljoin(
|
|
571
596
|
self._get_config().gx_cloud_base_url,
|
|
572
|
-
f"/api/v1/organizations/{org_id}/agent-jobs/{correlation_id}",
|
|
597
|
+
f"/api/v1/organizations/{org_id}/workspaces/{workspace_id}/agent-jobs/{correlation_id}",
|
|
573
598
|
)
|
|
574
599
|
with create_session(access_token=self.get_auth_key()) as session:
|
|
575
600
|
data = UpdateJobStatusRequest(data=status).json()
|
|
@@ -580,6 +605,7 @@ class GXAgent:
|
|
|
580
605
|
"correlation_id": correlation_id,
|
|
581
606
|
"status": str(status),
|
|
582
607
|
"organization_id": str(org_id),
|
|
608
|
+
"workspace_id": str(workspace_id),
|
|
583
609
|
},
|
|
584
610
|
)
|
|
585
611
|
GXAgent._log_http_error(
|
|
@@ -587,7 +613,7 @@ class GXAgent:
|
|
|
587
613
|
)
|
|
588
614
|
|
|
589
615
|
def _create_scheduled_job_and_set_started(
|
|
590
|
-
self, event_context: EventContext, org_id: UUID
|
|
616
|
+
self, event_context: EventContext, org_id: UUID, workspace_id: UUID
|
|
591
617
|
) -> None:
|
|
592
618
|
"""Create a job in GX Cloud for scheduled events.
|
|
593
619
|
|
|
@@ -609,13 +635,14 @@ class GXAgent:
|
|
|
609
635
|
"correlation_id": str(event_context.correlation_id),
|
|
610
636
|
"event_type": str(event_context.event.type),
|
|
611
637
|
"organization_id": str(org_id),
|
|
638
|
+
"workspace_id": str(workspace_id),
|
|
612
639
|
"schedule_id": str(event_context.event.schedule_id),
|
|
613
640
|
},
|
|
614
641
|
)
|
|
615
642
|
|
|
616
643
|
agent_sessions_url = urljoin(
|
|
617
644
|
self._get_config().gx_cloud_base_url,
|
|
618
|
-
f"/api/v1/organizations/{org_id}/agent-jobs",
|
|
645
|
+
f"/api/v1/organizations/{org_id}/workspaces/{workspace_id}/agent-jobs",
|
|
619
646
|
)
|
|
620
647
|
data = CreateScheduledJobAndSetJobStarted(
|
|
621
648
|
type="run_scheduled_checkpoint.received",
|
|
@@ -636,6 +663,7 @@ class GXAgent:
|
|
|
636
663
|
"event_type": str(event_context.event.type),
|
|
637
664
|
"organization_id": str(org_id),
|
|
638
665
|
"schedule_id": str(event_context.event.schedule_id),
|
|
666
|
+
"workspace_id": str(workspace_id),
|
|
639
667
|
},
|
|
640
668
|
)
|
|
641
669
|
GXAgent._log_http_error(
|
|
@@ -658,7 +686,9 @@ class GXAgent:
|
|
|
658
686
|
"""
|
|
659
687
|
Sets headers on all stores in the data context.
|
|
660
688
|
"""
|
|
661
|
-
from great_expectations.data_context.store.gx_cloud_store_backend import
|
|
689
|
+
from great_expectations.data_context.store.gx_cloud_store_backend import ( # noqa: PLC0415
|
|
690
|
+
GXCloudStoreBackend,
|
|
691
|
+
)
|
|
662
692
|
|
|
663
693
|
# OSS doesn't use the same session for all requests, so we need to set the header for each store
|
|
664
694
|
stores = list(data_context.stores.values())
|
|
@@ -686,8 +716,6 @@ class GXAgent:
|
|
|
686
716
|
Note: the Agent-Job-Id header value will be set for all GX Cloud request until this method is
|
|
687
717
|
called again.
|
|
688
718
|
"""
|
|
689
|
-
from great_expectations import __version__ # noqa: PLC0415
|
|
690
|
-
from great_expectations.core import http # noqa: PLC0415
|
|
691
719
|
|
|
692
720
|
header_name = self.get_header_name()
|
|
693
721
|
user_agent_header_value = self.user_agent_str
|
|
@@ -15,6 +15,7 @@ from pydantic import v1 as pydantic_v1
|
|
|
15
15
|
from great_expectations_cloud.agent.actions.unknown import UnknownEventAction
|
|
16
16
|
from great_expectations_cloud.agent.exceptions import GXAgentError
|
|
17
17
|
from great_expectations_cloud.agent.models import (
|
|
18
|
+
DomainContext,
|
|
18
19
|
Event,
|
|
19
20
|
EventType,
|
|
20
21
|
UnknownEvent,
|
|
@@ -67,11 +68,11 @@ class EventHandler:
|
|
|
67
68
|
self._context = context
|
|
68
69
|
|
|
69
70
|
def get_event_action(
|
|
70
|
-
self, event: Event, base_url: str, auth_key: str,
|
|
71
|
+
self, event: Event, base_url: str, auth_key: str, domain_context: DomainContext
|
|
71
72
|
) -> AgentAction[Any]:
|
|
72
73
|
"""Get the action that should be run for the given event."""
|
|
73
74
|
|
|
74
|
-
if not self._check_event_organization_id(event, organization_id):
|
|
75
|
+
if not self._check_event_organization_id(event, domain_context.organization_id):
|
|
75
76
|
# Making message more generic
|
|
76
77
|
raise GXAgentError("Unable to process job. Invalid input.") # noqa: TRY003
|
|
77
78
|
|
|
@@ -84,17 +85,17 @@ class EventHandler:
|
|
|
84
85
|
return action_class(
|
|
85
86
|
context=self._context,
|
|
86
87
|
base_url=base_url,
|
|
87
|
-
|
|
88
|
+
domain_context=domain_context,
|
|
88
89
|
auth_key=auth_key,
|
|
89
90
|
)
|
|
90
91
|
|
|
91
|
-
def handle_event(
|
|
92
|
-
self, event: Event, id: str, base_url: str, auth_key: str,
|
|
92
|
+
def handle_event(
|
|
93
|
+
self, event: Event, id: str, base_url: str, auth_key: str, domain_context: DomainContext
|
|
93
94
|
) -> ActionResult:
|
|
94
|
-
start_time = datetime.now(tz=timezone.utc)
|
|
95
95
|
"""Transform an Event into an ActionResult."""
|
|
96
|
+
start_time = datetime.now(tz=timezone.utc)
|
|
96
97
|
action = self.get_event_action(
|
|
97
|
-
event=event, base_url=base_url, auth_key=auth_key,
|
|
98
|
+
event=event, base_url=base_url, auth_key=auth_key, domain_context=domain_context
|
|
98
99
|
)
|
|
99
100
|
LOGGER.info(f"Handling event: {event.type} -> {action.__class__.__name__}")
|
|
100
101
|
action_result = action.run(event=event, id=id)
|
|
@@ -24,6 +24,18 @@ def all_subclasses(cls: type) -> list[type]:
|
|
|
24
24
|
return all_sub_cls
|
|
25
25
|
|
|
26
26
|
|
|
27
|
+
class DomainContext(BaseModel):
|
|
28
|
+
"""
|
|
29
|
+
Encapsulates domain-related context information.
|
|
30
|
+
|
|
31
|
+
This dataclass consolidates organization_id and workspace_id to reduce
|
|
32
|
+
parameter proliferation and improve code maintainability.
|
|
33
|
+
"""
|
|
34
|
+
|
|
35
|
+
organization_id: UUID
|
|
36
|
+
workspace_id: UUID
|
|
37
|
+
|
|
38
|
+
|
|
27
39
|
class AgentBaseExtraForbid(BaseModel):
|
|
28
40
|
class Config:
|
|
29
41
|
# 2024-03-04: ZEL-501 Strictly enforce models for handling outdated APIs
|
|
@@ -39,6 +51,7 @@ class AgentBaseExtraIgnore(BaseModel):
|
|
|
39
51
|
class EventBase(AgentBaseExtraIgnore):
|
|
40
52
|
type: str
|
|
41
53
|
organization_id: Optional[UUID] = None # noqa: UP045
|
|
54
|
+
workspace_id: UUID
|
|
42
55
|
|
|
43
56
|
|
|
44
57
|
class ScheduledEventBase(EventBase):
|
{great_expectations_cloud-20250717.0.dev0 → great_expectations_cloud-20251211.0.dev1}/pyproject.toml
RENAMED
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
[tool.poetry]
|
|
2
2
|
name = "great_expectations_cloud"
|
|
3
|
-
version = "
|
|
4
|
-
|
|
3
|
+
version = "20251211.0.dev1"
|
|
5
4
|
description = "Great Expectations Cloud"
|
|
6
5
|
authors = ["The Great Expectations Team <team@greatexpectations.io>"]
|
|
7
6
|
repository = "https://github.com/great-expectations/cloud"
|
|
@@ -23,8 +22,8 @@ include = [
|
|
|
23
22
|
]
|
|
24
23
|
|
|
25
24
|
[tool.poetry.dependencies]
|
|
26
|
-
python = ">=3.11,<3.12"
|
|
27
|
-
great-expectations = { version = "1.
|
|
25
|
+
python = ">=3.11.4,<3.12"
|
|
26
|
+
great-expectations = { version = "1.9.3", extras = ["gx-redshift", "snowflake", "postgresql", "databricks", "mssql", "trino"] }
|
|
28
27
|
pydantic = ">=2.8.1,<3"
|
|
29
28
|
pika = "^1.3.1"
|
|
30
29
|
setuptools = "80.9.0"
|
|
@@ -38,16 +37,16 @@ sqlalchemy = ">=2.0"
|
|
|
38
37
|
[tool.poetry.group.dev.dependencies]
|
|
39
38
|
coverage = { extras = ["toml"], version = "^7.5.1" }
|
|
40
39
|
freezegun = "^1.4.0"
|
|
41
|
-
invoke = "^2.2.
|
|
42
|
-
mypy = "1.
|
|
40
|
+
invoke = "^2.2.1"
|
|
41
|
+
mypy = "1.19.0"
|
|
43
42
|
pre-commit = ">=3.3.3,<5.0.0"
|
|
44
43
|
pyfakefs = "^5.4.1"
|
|
45
|
-
pytest = ">=7.4,<
|
|
44
|
+
pytest = ">=7.4,<10.0"
|
|
46
45
|
pytest-cov = ">=5"
|
|
47
46
|
pytest-icdiff = "*"
|
|
48
47
|
pytest-mock = "*"
|
|
49
48
|
responses = ">=0.23.1,<0.26.0"
|
|
50
|
-
ruff = "0.
|
|
49
|
+
ruff = "0.14.8"
|
|
51
50
|
tenacity = ">=8.2.3,<10.0.0"
|
|
52
51
|
tomlkit = ">=0.12.1,<0.14.0"
|
|
53
52
|
types-requests = "^2.31"
|
{great_expectations_cloud-20250717.0.dev0 → great_expectations_cloud-20251211.0.dev1}/LICENSE
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|