acryl-datahub 1.1.0.3rc1__py3-none-any.whl → 1.1.0.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of acryl-datahub might be problematic. Click here for more details.

Files changed (68) hide show
  1. {acryl_datahub-1.1.0.3rc1.dist-info → acryl_datahub-1.1.0.4.dist-info}/METADATA +2474 -2474
  2. {acryl_datahub-1.1.0.3rc1.dist-info → acryl_datahub-1.1.0.4.dist-info}/RECORD +68 -68
  3. datahub/_version.py +1 -1
  4. datahub/cli/check_cli.py +27 -0
  5. datahub/cli/delete_cli.py +117 -19
  6. datahub/emitter/rest_emitter.py +18 -1
  7. datahub/ingestion/api/source.py +2 -0
  8. datahub/ingestion/glossary/classification_mixin.py +5 -0
  9. datahub/ingestion/graph/client.py +42 -2
  10. datahub/ingestion/source/bigquery_v2/bigquery.py +18 -0
  11. datahub/ingestion/source/bigquery_v2/common.py +1 -1
  12. datahub/ingestion/source/dbt/dbt_cloud.py +3 -0
  13. datahub/ingestion/source/dbt/dbt_common.py +3 -1
  14. datahub/ingestion/source/dbt/dbt_core.py +3 -0
  15. datahub/ingestion/source/dremio/dremio_api.py +98 -68
  16. datahub/ingestion/source/dremio/dremio_config.py +2 -0
  17. datahub/ingestion/source/dremio/dremio_reporting.py +23 -2
  18. datahub/ingestion/source/dremio/dremio_source.py +90 -77
  19. datahub/ingestion/source/dremio/dremio_sql_queries.py +82 -21
  20. datahub/ingestion/source/file.py +3 -0
  21. datahub/ingestion/source/ge_data_profiler.py +48 -8
  22. datahub/ingestion/source/ge_profiling_config.py +11 -0
  23. datahub/ingestion/source/iceberg/iceberg.py +3 -1
  24. datahub/ingestion/source/kafka/kafka.py +16 -0
  25. datahub/ingestion/source/looker/looker_source.py +1 -0
  26. datahub/ingestion/source/powerbi/powerbi.py +1 -0
  27. datahub/ingestion/source/qlik_sense/qlik_sense.py +1 -0
  28. datahub/ingestion/source/redshift/redshift.py +21 -1
  29. datahub/ingestion/source/sac/sac.py +3 -1
  30. datahub/ingestion/source/sigma/sigma.py +1 -0
  31. datahub/ingestion/source/snowflake/snowflake_config.py +3 -6
  32. datahub/ingestion/source/snowflake/snowflake_summary.py +5 -0
  33. datahub/ingestion/source/snowflake/snowflake_utils.py +2 -7
  34. datahub/ingestion/source/snowflake/snowflake_v2.py +2 -0
  35. datahub/ingestion/source/sql/clickhouse.py +3 -1
  36. datahub/ingestion/source/sql/cockroachdb.py +0 -1
  37. datahub/ingestion/source/sql/hana.py +3 -1
  38. datahub/ingestion/source/sql/hive_metastore.py +3 -1
  39. datahub/ingestion/source/sql/mariadb.py +0 -1
  40. datahub/ingestion/source/sql/mssql/source.py +8 -1
  41. datahub/ingestion/source/sql/mysql.py +0 -1
  42. datahub/ingestion/source/sql/postgres.py +0 -1
  43. datahub/ingestion/source/sql/sql_common.py +12 -0
  44. datahub/ingestion/source/superset.py +1 -1
  45. datahub/ingestion/source/tableau/tableau.py +1 -0
  46. datahub/ingestion/source/unity/source.py +1 -0
  47. datahub/ingestion/source/usage/clickhouse_usage.py +4 -1
  48. datahub/ingestion/source/usage/starburst_trino_usage.py +3 -0
  49. datahub/metadata/_internal_schema_classes.py +25 -0
  50. datahub/metadata/schema.avsc +18 -1
  51. datahub/metadata/schemas/ContainerProperties.avsc +6 -0
  52. datahub/metadata/schemas/DataFlowInfo.avsc +6 -0
  53. datahub/metadata/schemas/DataHubPolicyInfo.avsc +12 -1
  54. datahub/metadata/schemas/DataJobInfo.avsc +6 -0
  55. datahub/metadata/schemas/DataProcessKey.avsc +6 -0
  56. datahub/metadata/schemas/DatasetKey.avsc +6 -0
  57. datahub/metadata/schemas/IcebergWarehouseInfo.avsc +6 -0
  58. datahub/metadata/schemas/MLModelDeploymentKey.avsc +6 -0
  59. datahub/metadata/schemas/MLModelGroupKey.avsc +6 -0
  60. datahub/metadata/schemas/MLModelKey.avsc +6 -0
  61. datahub/metadata/schemas/MetadataChangeEvent.avsc +18 -1
  62. datahub/sdk/main_client.py +9 -10
  63. datahub/sql_parsing/sqlglot_lineage.py +22 -0
  64. datahub/utilities/stats_collections.py +4 -0
  65. {acryl_datahub-1.1.0.3rc1.dist-info → acryl_datahub-1.1.0.4.dist-info}/WHEEL +0 -0
  66. {acryl_datahub-1.1.0.3rc1.dist-info → acryl_datahub-1.1.0.4.dist-info}/entry_points.txt +0 -0
  67. {acryl_datahub-1.1.0.3rc1.dist-info → acryl_datahub-1.1.0.4.dist-info}/licenses/LICENSE +0 -0
  68. {acryl_datahub-1.1.0.3rc1.dist-info → acryl_datahub-1.1.0.4.dist-info}/top_level.txt +0 -0
@@ -10,6 +10,7 @@ import humanfriendly
10
10
  import pydantic
11
11
  import redshift_connector
12
12
 
13
+ from datahub.configuration.common import AllowDenyPattern
13
14
  from datahub.configuration.pattern_utils import is_schema_allowed
14
15
  from datahub.emitter.mce_builder import (
15
16
  make_data_platform_urn,
@@ -140,12 +141,15 @@ logger: logging.Logger = logging.getLogger(__name__)
140
141
  SourceCapability.USAGE_STATS,
141
142
  "Enabled by default, can be disabled via configuration `include_usage_statistics`",
142
143
  )
143
- @capability(SourceCapability.DELETION_DETECTION, "Enabled via stateful ingestion")
144
+ @capability(
145
+ SourceCapability.DELETION_DETECTION, "Enabled by default via stateful ingestion"
146
+ )
144
147
  @capability(
145
148
  SourceCapability.CLASSIFICATION,
146
149
  "Optionally enabled via `classification.enabled`",
147
150
  supported=True,
148
151
  )
152
+ @capability(SourceCapability.TEST_CONNECTION, "Enabled by default")
149
153
  class RedshiftSource(StatefulIngestionSourceBase, TestableSource):
150
154
  """
151
155
  This plugin extracts the following:
@@ -354,7 +358,23 @@ class RedshiftSource(StatefulIngestionSourceBase, TestableSource):
354
358
  ).workunit_processor,
355
359
  ]
356
360
 
361
+ def _warn_deprecated_configs(self):
362
+ if (
363
+ self.config.match_fully_qualified_names is not None
364
+ and not self.config.match_fully_qualified_names
365
+ and self.config.schema_pattern is not None
366
+ and self.config.schema_pattern != AllowDenyPattern.allow_all()
367
+ ):
368
+ self.report.report_warning(
369
+ message="Please update `schema_pattern` to match against fully qualified schema name `<database_name>.<schema_name>` and set config `match_fully_qualified_names : True`."
370
+ "Current default `match_fully_qualified_names: False` is only to maintain backward compatibility. "
371
+ "The config option `match_fully_qualified_names` will be removed in future and the default behavior will be like `match_fully_qualified_names: True`.",
372
+ context="Config option deprecation warning",
373
+ title="Config option deprecation warning",
374
+ )
375
+
357
376
  def get_workunits_internal(self) -> Iterable[Union[MetadataWorkUnit, SqlWorkUnit]]:
377
+ self._warn_deprecated_configs()
358
378
  connection = self._try_get_redshift_connection(self.config)
359
379
 
360
380
  if connection is None:
@@ -178,7 +178,9 @@ class SACSourceReport(StaleEntityRemovalSourceReport):
178
178
  SourceCapability.LINEAGE_COARSE,
179
179
  "Enabled by default (only for Live Data Models)",
180
180
  )
181
- @capability(SourceCapability.DELETION_DETECTION, "Enabled via stateful ingestion")
181
+ @capability(
182
+ SourceCapability.DELETION_DETECTION, "Enabled by default via stateful ingestion"
183
+ )
182
184
  @capability(
183
185
  SourceCapability.SCHEMA_METADATA,
184
186
  "Enabled by default (only for Import Data Models)",
@@ -105,6 +105,7 @@ logger = logging.getLogger(__name__)
105
105
  SourceCapability.OWNERSHIP,
106
106
  "Enabled by default, configured using `ingest_owner`",
107
107
  )
108
+ @capability(SourceCapability.TEST_CONNECTION, "Enabled by default")
108
109
  class SigmaSource(StatefulIngestionSourceBase, TestableSource):
109
110
  """
110
111
  This plugin extracts the following:
@@ -154,14 +154,11 @@ class SnowflakeIdentifierConfig(
154
154
 
155
155
  email_domain: Optional[str] = pydantic.Field(
156
156
  default=None,
157
- description="Email domain of your organization so users can be displayed on UI appropriately.",
157
+ description="Email domain of your organization so users can be displayed on UI appropriately. This is used only if we cannot infer email ID.",
158
158
  )
159
159
 
160
- email_as_user_identifier: bool = Field(
161
- default=True,
162
- description="Format user urns as an email, if the snowflake user's email is set. If `email_domain` is "
163
- "provided, generates email addresses for snowflake users with unset emails, based on their "
164
- "username.",
160
+ _email_as_user_identifier = pydantic_removed_field(
161
+ "email_as_user_identifier",
165
162
  )
166
163
 
167
164
 
@@ -20,6 +20,7 @@ from datahub.ingestion.source.snowflake.snowflake_schema_gen import (
20
20
  SnowflakeSchemaGenerator,
21
21
  )
22
22
  from datahub.ingestion.source.snowflake.snowflake_utils import (
23
+ SnowflakeFilter,
23
24
  SnowflakeIdentifierBuilder,
24
25
  )
25
26
  from datahub.ingestion.source_report.time_window import BaseTimeWindowReport
@@ -81,6 +82,10 @@ class SnowflakeSummarySource(Source):
81
82
  profiler=None,
82
83
  aggregator=None,
83
84
  snowsight_url_builder=None,
85
+ filters=SnowflakeFilter(
86
+ filter_config=self.config,
87
+ structured_reporter=self.report,
88
+ ),
84
89
  )
85
90
 
86
91
  # Databases.
@@ -325,15 +325,10 @@ class SnowflakeIdentifierBuilder:
325
325
  user_email: Optional[str],
326
326
  ) -> str:
327
327
  if user_email:
328
- return self.snowflake_identifier(
329
- user_email
330
- if self.identifier_config.email_as_user_identifier is True
331
- else user_email.split("@")[0]
332
- )
328
+ return self.snowflake_identifier(user_email)
333
329
  return self.snowflake_identifier(
334
330
  f"{user_name}@{self.identifier_config.email_domain}"
335
- if self.identifier_config.email_as_user_identifier is True
336
- and self.identifier_config.email_domain is not None
331
+ if self.identifier_config.email_domain is not None
337
332
  else user_name
338
333
  )
339
334
 
@@ -131,6 +131,7 @@ logger: logging.Logger = logging.getLogger(__name__)
131
131
  "Optionally enabled via `classification.enabled`",
132
132
  supported=True,
133
133
  )
134
+ @capability(SourceCapability.TEST_CONNECTION, "Enabled by default")
134
135
  class SnowflakeV2Source(
135
136
  SnowflakeCommonMixin,
136
137
  StatefulIngestionSourceBase,
@@ -311,6 +312,7 @@ class SnowflakeV2Source(
311
312
  SourceCapability.PLATFORM_INSTANCE,
312
313
  SourceCapability.DOMAINS,
313
314
  SourceCapability.DELETION_DETECTION,
315
+ SourceCapability.TEST_CONNECTION,
314
316
  )
315
317
  ]
316
318
 
@@ -379,7 +379,9 @@ clickhouse_datetime_format = "%Y-%m-%d %H:%M:%S"
379
379
  @platform_name("ClickHouse")
380
380
  @config_class(ClickHouseConfig)
381
381
  @support_status(SupportStatus.CERTIFIED)
382
- @capability(SourceCapability.DELETION_DETECTION, "Enabled via stateful ingestion")
382
+ @capability(
383
+ SourceCapability.DELETION_DETECTION, "Enabled by default via stateful ingestion"
384
+ )
383
385
  @capability(SourceCapability.DATA_PROFILING, "Optionally enabled via configuration")
384
386
  class ClickHouseSource(TwoTierSQLAlchemySource):
385
387
  """
@@ -26,7 +26,6 @@ class CockroachDBConfig(PostgresConfig):
26
26
  @capability(SourceCapability.PLATFORM_INSTANCE, "Enabled by default")
27
27
  @capability(SourceCapability.DOMAINS, "Supported via the `domain` config field")
28
28
  @capability(SourceCapability.DATA_PROFILING, "Optionally enabled via configuration")
29
- @capability(SourceCapability.DELETION_DETECTION, "Enabled via stateful ingestion")
30
29
  class CockroachDBSource(PostgresSource):
31
30
  config: CockroachDBConfig
32
31
 
@@ -27,7 +27,9 @@ class HanaConfig(BasicSQLAlchemyConfig):
27
27
  @capability(SourceCapability.PLATFORM_INSTANCE, "Enabled by default")
28
28
  @capability(SourceCapability.DOMAINS, "Supported via the `domain` config field")
29
29
  @capability(SourceCapability.DATA_PROFILING, "Optionally enabled via configuration")
30
- @capability(SourceCapability.DELETION_DETECTION, "Enabled via stateful ingestion")
30
+ @capability(
31
+ SourceCapability.DELETION_DETECTION, "Enabled by default via stateful ingestion"
32
+ )
31
33
  class HanaSource(SQLAlchemySource):
32
34
  def __init__(self, config: HanaConfig, ctx: PipelineContext):
33
35
  super().__init__(config, ctx, "hana")
@@ -161,7 +161,9 @@ class HiveMetastore(BasicSQLAlchemyConfig):
161
161
  @platform_name("Hive Metastore")
162
162
  @config_class(HiveMetastore)
163
163
  @support_status(SupportStatus.CERTIFIED)
164
- @capability(SourceCapability.DELETION_DETECTION, "Enabled via stateful ingestion")
164
+ @capability(
165
+ SourceCapability.DELETION_DETECTION, "Enabled by default via stateful ingestion"
166
+ )
165
167
  @capability(SourceCapability.DATA_PROFILING, "Not Supported", False)
166
168
  @capability(SourceCapability.CLASSIFICATION, "Not Supported", False)
167
169
  @capability(
@@ -15,7 +15,6 @@ from datahub.ingestion.source.sql.mysql import MySQLConfig, MySQLSource
15
15
  @capability(SourceCapability.PLATFORM_INSTANCE, "Enabled by default")
16
16
  @capability(SourceCapability.DOMAINS, "Supported via the `domain` config field")
17
17
  @capability(SourceCapability.DATA_PROFILING, "Optionally enabled via configuration")
18
- @capability(SourceCapability.DELETION_DETECTION, "Enabled via stateful ingestion")
19
18
  class MariaDBSource(MySQLSource):
20
19
  def get_platform(self):
21
20
  return "mariadb"
@@ -174,7 +174,14 @@ class SQLServerConfig(BasicSQLAlchemyConfig):
174
174
  @capability(SourceCapability.DOMAINS, "Supported via the `domain` config field")
175
175
  @capability(SourceCapability.DATA_PROFILING, "Optionally enabled via configuration")
176
176
  @capability(SourceCapability.DESCRIPTIONS, "Enabled by default")
177
- @capability(SourceCapability.DELETION_DETECTION, "Enabled via stateful ingestion")
177
+ @capability(
178
+ SourceCapability.LINEAGE_COARSE,
179
+ "Enabled by default to get lineage for stored procedures via `include_lineage` and for views via `include_view_lineage`",
180
+ )
181
+ @capability(
182
+ SourceCapability.LINEAGE_FINE,
183
+ "Enabled by default to get lineage for stored procedures via `include_lineage` and for views via `include_view_column_lineage`",
184
+ )
178
185
  class SQLServerSource(SQLAlchemySource):
179
186
  """
180
187
  This plugin extracts the following:
@@ -65,7 +65,6 @@ class MySQLConfig(MySQLConnectionConfig, TwoTierSQLAlchemyConfig):
65
65
  @capability(SourceCapability.PLATFORM_INSTANCE, "Enabled by default")
66
66
  @capability(SourceCapability.DOMAINS, "Supported via the `domain` config field")
67
67
  @capability(SourceCapability.DATA_PROFILING, "Optionally enabled via configuration")
68
- @capability(SourceCapability.DELETION_DETECTION, "Enabled via stateful ingestion")
69
68
  class MySQLSource(TwoTierSQLAlchemySource):
70
69
  """
71
70
  This plugin extracts the following:
@@ -131,7 +131,6 @@ class PostgresConfig(BasePostgresConfig):
131
131
  @capability(SourceCapability.DOMAINS, "Enabled by default")
132
132
  @capability(SourceCapability.PLATFORM_INSTANCE, "Enabled by default")
133
133
  @capability(SourceCapability.DATA_PROFILING, "Optionally enabled via configuration")
134
- @capability(SourceCapability.LINEAGE_COARSE, "Optionally enabled via configuration")
135
134
  class PostgresSource(SQLAlchemySource):
136
135
  """
137
136
  This plugin extracts the following:
@@ -302,6 +302,18 @@ class ProfileMetadata:
302
302
  "Enabled by default",
303
303
  supported=True,
304
304
  )
305
+ @capability(
306
+ SourceCapability.LINEAGE_COARSE,
307
+ "Enabled by default to get lineage for views via `include_view_lineage`",
308
+ )
309
+ @capability(
310
+ SourceCapability.LINEAGE_FINE,
311
+ "Enabled by default to get lineage for views via `include_view_column_lineage`",
312
+ )
313
+ @capability(SourceCapability.TEST_CONNECTION, "Enabled by default")
314
+ @capability(
315
+ SourceCapability.DELETION_DETECTION, "Enabled by default via stateful ingestion"
316
+ )
305
317
  class SQLAlchemySource(StatefulIngestionSourceBase, TestableSource):
306
318
  """A Base class for all SQL Sources that use SQLAlchemy to extend"""
307
319
 
@@ -1087,7 +1087,7 @@ class SupersetSource(StatefulIngestionSourceBase):
1087
1087
  datasource_urn = self.get_datasource_urn_from_id(
1088
1088
  dataset_response, self.platform
1089
1089
  )
1090
- dataset_url = f"{self.config.display_uri}{dataset_response.get('result', {}).get('url', '')}"
1090
+ dataset_url = f"{self.config.display_uri}/explore/?datasource_type=table&datasource_id={dataset.id}"
1091
1091
 
1092
1092
  modified_actor = f"urn:li:corpuser:{self.owner_info.get((dataset_data.get('changed_by') or {}).get('id', -1), 'unknown')}"
1093
1093
  now = datetime.now().strftime("%I:%M%p on %B %d, %Y")
@@ -879,6 +879,7 @@ def report_user_role(report: TableauSourceReport, server: Server) -> None:
879
879
  SourceCapability.LINEAGE_FINE,
880
880
  "Enabled by default, configure using `extract_column_level_lineage`",
881
881
  )
882
+ @capability(SourceCapability.TEST_CONNECTION, "Enabled by default")
882
883
  class TableauSource(StatefulIngestionSourceBase, TestableSource):
883
884
  platform = "tableau"
884
885
 
@@ -162,6 +162,7 @@ logger: logging.Logger = logging.getLogger(__name__)
162
162
  "Optionally enabled via `stateful_ingestion.remove_stale_metadata`",
163
163
  supported=True,
164
164
  )
165
+ @capability(SourceCapability.TEST_CONNECTION, "Enabled by default")
165
166
  @support_status(SupportStatus.INCUBATING)
166
167
  class UnityCatalogSource(StatefulIngestionSourceBase, TestableSource):
167
168
  """
@@ -85,8 +85,11 @@ class ClickHouseUsageConfig(ClickHouseConfig, BaseUsageConfig, EnvConfigMixin):
85
85
  @platform_name("ClickHouse")
86
86
  @config_class(ClickHouseUsageConfig)
87
87
  @support_status(SupportStatus.CERTIFIED)
88
- @capability(SourceCapability.DELETION_DETECTION, "Enabled via stateful ingestion")
88
+ @capability(
89
+ SourceCapability.DELETION_DETECTION, "Enabled by default via stateful ingestion"
90
+ )
89
91
  @capability(SourceCapability.DATA_PROFILING, "Optionally enabled via configuration")
92
+ @capability(SourceCapability.USAGE_STATS, "Enabled by default to get usage stats")
90
93
  @dataclasses.dataclass
91
94
  class ClickHouseUsageSource(Source):
92
95
  """
@@ -15,7 +15,9 @@ from sqlalchemy.engine import Engine
15
15
  import datahub.emitter.mce_builder as builder
16
16
  from datahub.configuration.time_window_config import get_time_bucket
17
17
  from datahub.ingestion.api.decorators import (
18
+ SourceCapability,
18
19
  SupportStatus,
20
+ capability,
19
21
  config_class,
20
22
  platform_name,
21
23
  support_status,
@@ -112,6 +114,7 @@ class TrinoUsageReport(SourceReport):
112
114
  @platform_name("Trino")
113
115
  @config_class(TrinoUsageConfig)
114
116
  @support_status(SupportStatus.CERTIFIED)
117
+ @capability(SourceCapability.USAGE_STATS, "Enabled by default to get usage stats")
115
118
  @dataclasses.dataclass
116
119
  class TrinoUsageSource(Source):
117
120
  """
@@ -4590,6 +4590,15 @@ class FabricTypeClass(object):
4590
4590
  RVW = "RVW"
4591
4591
  """Designates review fabrics"""
4592
4592
 
4593
+ PRD = "PRD"
4594
+ """Alternative Prod spelling"""
4595
+
4596
+ TST = "TST"
4597
+ """Alternative Test spelling"""
4598
+
4599
+ SIT = "SIT"
4600
+ """System Integration Testing"""
4601
+
4593
4602
  SANDBOX = "SANDBOX"
4594
4603
  """Designates sandbox fabrics"""
4595
4604
 
@@ -21504,6 +21513,7 @@ class DataHubResourceFilterClass(DictWrapper):
21504
21513
  resources: Union[None, List[str]]=None,
21505
21514
  allResources: Optional[bool]=None,
21506
21515
  filter: Union[None, "PolicyMatchFilterClass"]=None,
21516
+ privilegeConstraints: Union[None, "PolicyMatchFilterClass"]=None,
21507
21517
  ):
21508
21518
  super().__init__()
21509
21519
 
@@ -21515,12 +21525,14 @@ class DataHubResourceFilterClass(DictWrapper):
21515
21525
  else:
21516
21526
  self.allResources = allResources
21517
21527
  self.filter = filter
21528
+ self.privilegeConstraints = privilegeConstraints
21518
21529
 
21519
21530
  def _restore_defaults(self) -> None:
21520
21531
  self.type = self.RECORD_SCHEMA.fields_dict["type"].default
21521
21532
  self.resources = self.RECORD_SCHEMA.fields_dict["resources"].default
21522
21533
  self.allResources = self.RECORD_SCHEMA.fields_dict["allResources"].default
21523
21534
  self.filter = self.RECORD_SCHEMA.fields_dict["filter"].default
21535
+ self.privilegeConstraints = self.RECORD_SCHEMA.fields_dict["privilegeConstraints"].default
21524
21536
 
21525
21537
 
21526
21538
  @property
@@ -21565,6 +21577,16 @@ class DataHubResourceFilterClass(DictWrapper):
21565
21577
  self._inner_dict['filter'] = value
21566
21578
 
21567
21579
 
21580
+ @property
21581
+ def privilegeConstraints(self) -> Union[None, "PolicyMatchFilterClass"]:
21582
+ """Constraints around what sub-resources operations are allowed to modify, i.e. NOT_EQUALS - cannot modify a particular defined tag, EQUALS - can only modify a particular defined tag, STARTS_WITH - can only modify a tag starting with xyz"""
21583
+ return self._inner_dict.get('privilegeConstraints') # type: ignore
21584
+
21585
+ @privilegeConstraints.setter
21586
+ def privilegeConstraints(self, value: Union[None, "PolicyMatchFilterClass"]) -> None:
21587
+ self._inner_dict['privilegeConstraints'] = value
21588
+
21589
+
21568
21590
  class DataHubRoleInfoClass(_Aspect):
21569
21591
  """Information about a DataHub Role."""
21570
21592
 
@@ -21633,6 +21655,9 @@ class PolicyMatchConditionClass(object):
21633
21655
  STARTS_WITH = "STARTS_WITH"
21634
21656
  """Whether the field value starts with the value"""
21635
21657
 
21658
+ NOT_EQUALS = "NOT_EQUALS"
21659
+ """Whether the field does not match the value"""
21660
+
21636
21661
 
21637
21662
 
21638
21663
  class PolicyMatchCriterionClass(DictWrapper):
@@ -9502,13 +9502,16 @@
9502
9502
  "DEV": "Designates development fabrics",
9503
9503
  "EI": "Designates early-integration fabrics",
9504
9504
  "NON_PROD": "Designates non-production fabrics",
9505
+ "PRD": "Alternative Prod spelling",
9505
9506
  "PRE": "Designates pre-production fabrics",
9506
9507
  "PROD": "Designates production fabrics",
9507
9508
  "QA": "Designates quality assurance fabrics",
9508
9509
  "RVW": "Designates review fabrics",
9509
9510
  "SANDBOX": "Designates sandbox fabrics",
9511
+ "SIT": "System Integration Testing",
9510
9512
  "STG": "Designates staging fabrics",
9511
9513
  "TEST": "Designates testing fabrics",
9514
+ "TST": "Alternative Test spelling",
9512
9515
  "UAT": "Designates user acceptance testing fabrics"
9513
9516
  },
9514
9517
  "name": "FabricType",
@@ -9525,6 +9528,9 @@
9525
9528
  "PROD",
9526
9529
  "CORP",
9527
9530
  "RVW",
9531
+ "PRD",
9532
+ "TST",
9533
+ "SIT",
9528
9534
  "SANDBOX"
9529
9535
  ],
9530
9536
  "doc": "Fabric group type"
@@ -16441,13 +16447,15 @@
16441
16447
  "type": "enum",
16442
16448
  "symbolDocs": {
16443
16449
  "EQUALS": "Whether the field matches the value",
16450
+ "NOT_EQUALS": "Whether the field does not match the value",
16444
16451
  "STARTS_WITH": "Whether the field value starts with the value"
16445
16452
  },
16446
16453
  "name": "PolicyMatchCondition",
16447
16454
  "namespace": "com.linkedin.pegasus2avro.policy",
16448
16455
  "symbols": [
16449
16456
  "EQUALS",
16450
- "STARTS_WITH"
16457
+ "STARTS_WITH",
16458
+ "NOT_EQUALS"
16451
16459
  ],
16452
16460
  "doc": "The matching condition in a filter criterion"
16453
16461
  },
@@ -16469,6 +16477,15 @@
16469
16477
  "name": "filter",
16470
16478
  "default": null,
16471
16479
  "doc": "Filter to apply privileges to"
16480
+ },
16481
+ {
16482
+ "type": [
16483
+ "null",
16484
+ "com.linkedin.pegasus2avro.policy.PolicyMatchFilter"
16485
+ ],
16486
+ "name": "privilegeConstraints",
16487
+ "default": null,
16488
+ "doc": "Constraints around what sub-resources operations are allowed to modify, i.e. NOT_EQUALS - cannot modify a particular defined tag, EQUALS - can only modify a particular defined tag, STARTS_WITH - can only modify a tag starting with xyz"
16472
16489
  }
16473
16490
  ],
16474
16491
  "doc": "Information used to filter DataHub resource."
@@ -93,13 +93,16 @@
93
93
  "DEV": "Designates development fabrics",
94
94
  "EI": "Designates early-integration fabrics",
95
95
  "NON_PROD": "Designates non-production fabrics",
96
+ "PRD": "Alternative Prod spelling",
96
97
  "PRE": "Designates pre-production fabrics",
97
98
  "PROD": "Designates production fabrics",
98
99
  "QA": "Designates quality assurance fabrics",
99
100
  "RVW": "Designates review fabrics",
100
101
  "SANDBOX": "Designates sandbox fabrics",
102
+ "SIT": "System Integration Testing",
101
103
  "STG": "Designates staging fabrics",
102
104
  "TEST": "Designates testing fabrics",
105
+ "TST": "Alternative Test spelling",
103
106
  "UAT": "Designates user acceptance testing fabrics"
104
107
  },
105
108
  "name": "FabricType",
@@ -116,6 +119,9 @@
116
119
  "PROD",
117
120
  "CORP",
118
121
  "RVW",
122
+ "PRD",
123
+ "TST",
124
+ "SIT",
119
125
  "SANDBOX"
120
126
  ],
121
127
  "doc": "Fabric group type"
@@ -147,13 +147,16 @@
147
147
  "DEV": "Designates development fabrics",
148
148
  "EI": "Designates early-integration fabrics",
149
149
  "NON_PROD": "Designates non-production fabrics",
150
+ "PRD": "Alternative Prod spelling",
150
151
  "PRE": "Designates pre-production fabrics",
151
152
  "PROD": "Designates production fabrics",
152
153
  "QA": "Designates quality assurance fabrics",
153
154
  "RVW": "Designates review fabrics",
154
155
  "SANDBOX": "Designates sandbox fabrics",
156
+ "SIT": "System Integration Testing",
155
157
  "STG": "Designates staging fabrics",
156
158
  "TEST": "Designates testing fabrics",
159
+ "TST": "Alternative Test spelling",
157
160
  "UAT": "Designates user acceptance testing fabrics"
158
161
  },
159
162
  "name": "FabricType",
@@ -170,6 +173,9 @@
170
173
  "PROD",
171
174
  "CORP",
172
175
  "RVW",
176
+ "PRD",
177
+ "TST",
178
+ "SIT",
173
179
  "SANDBOX"
174
180
  ],
175
181
  "doc": "Fabric group type"
@@ -110,13 +110,15 @@
110
110
  "type": "enum",
111
111
  "symbolDocs": {
112
112
  "EQUALS": "Whether the field matches the value",
113
+ "NOT_EQUALS": "Whether the field does not match the value",
113
114
  "STARTS_WITH": "Whether the field value starts with the value"
114
115
  },
115
116
  "name": "PolicyMatchCondition",
116
117
  "namespace": "com.linkedin.pegasus2avro.policy",
117
118
  "symbols": [
118
119
  "EQUALS",
119
- "STARTS_WITH"
120
+ "STARTS_WITH",
121
+ "NOT_EQUALS"
120
122
  ],
121
123
  "doc": "The matching condition in a filter criterion"
122
124
  },
@@ -138,6 +140,15 @@
138
140
  "name": "filter",
139
141
  "default": null,
140
142
  "doc": "Filter to apply privileges to"
143
+ },
144
+ {
145
+ "type": [
146
+ "null",
147
+ "com.linkedin.pegasus2avro.policy.PolicyMatchFilter"
148
+ ],
149
+ "name": "privilegeConstraints",
150
+ "default": null,
151
+ "doc": "Constraints around what sub-resources operations are allowed to modify, i.e. NOT_EQUALS - cannot modify a particular defined tag, EQUALS - can only modify a particular defined tag, STARTS_WITH - can only modify a tag starting with xyz"
141
152
  }
142
153
  ],
143
154
  "doc": "Information used to filter DataHub resource."
@@ -213,13 +213,16 @@
213
213
  "DEV": "Designates development fabrics",
214
214
  "EI": "Designates early-integration fabrics",
215
215
  "NON_PROD": "Designates non-production fabrics",
216
+ "PRD": "Alternative Prod spelling",
216
217
  "PRE": "Designates pre-production fabrics",
217
218
  "PROD": "Designates production fabrics",
218
219
  "QA": "Designates quality assurance fabrics",
219
220
  "RVW": "Designates review fabrics",
220
221
  "SANDBOX": "Designates sandbox fabrics",
222
+ "SIT": "System Integration Testing",
221
223
  "STG": "Designates staging fabrics",
222
224
  "TEST": "Designates testing fabrics",
225
+ "TST": "Alternative Test spelling",
223
226
  "UAT": "Designates user acceptance testing fabrics"
224
227
  },
225
228
  "name": "FabricType",
@@ -236,6 +239,9 @@
236
239
  "PROD",
237
240
  "CORP",
238
241
  "RVW",
242
+ "PRD",
243
+ "TST",
244
+ "SIT",
239
245
  "SANDBOX"
240
246
  ],
241
247
  "doc": "Fabric group type"
@@ -46,13 +46,16 @@
46
46
  "DEV": "Designates development fabrics",
47
47
  "EI": "Designates early-integration fabrics",
48
48
  "NON_PROD": "Designates non-production fabrics",
49
+ "PRD": "Alternative Prod spelling",
49
50
  "PRE": "Designates pre-production fabrics",
50
51
  "PROD": "Designates production fabrics",
51
52
  "QA": "Designates quality assurance fabrics",
52
53
  "RVW": "Designates review fabrics",
53
54
  "SANDBOX": "Designates sandbox fabrics",
55
+ "SIT": "System Integration Testing",
54
56
  "STG": "Designates staging fabrics",
55
57
  "TEST": "Designates testing fabrics",
58
+ "TST": "Alternative Test spelling",
56
59
  "UAT": "Designates user acceptance testing fabrics"
57
60
  },
58
61
  "name": "FabricType",
@@ -69,6 +72,9 @@
69
72
  "PROD",
70
73
  "CORP",
71
74
  "RVW",
75
+ "PRD",
76
+ "TST",
77
+ "SIT",
72
78
  "SANDBOX"
73
79
  ],
74
80
  "doc": "Fabric group type"
@@ -83,13 +83,16 @@
83
83
  "DEV": "Designates development fabrics",
84
84
  "EI": "Designates early-integration fabrics",
85
85
  "NON_PROD": "Designates non-production fabrics",
86
+ "PRD": "Alternative Prod spelling",
86
87
  "PRE": "Designates pre-production fabrics",
87
88
  "PROD": "Designates production fabrics",
88
89
  "QA": "Designates quality assurance fabrics",
89
90
  "RVW": "Designates review fabrics",
90
91
  "SANDBOX": "Designates sandbox fabrics",
92
+ "SIT": "System Integration Testing",
91
93
  "STG": "Designates staging fabrics",
92
94
  "TEST": "Designates testing fabrics",
95
+ "TST": "Alternative Test spelling",
93
96
  "UAT": "Designates user acceptance testing fabrics"
94
97
  },
95
98
  "name": "FabricType",
@@ -106,6 +109,9 @@
106
109
  "PROD",
107
110
  "CORP",
108
111
  "RVW",
112
+ "PRD",
113
+ "TST",
114
+ "SIT",
109
115
  "SANDBOX"
110
116
  ],
111
117
  "doc": "Fabric group type"
@@ -58,13 +58,16 @@
58
58
  "DEV": "Designates development fabrics",
59
59
  "EI": "Designates early-integration fabrics",
60
60
  "NON_PROD": "Designates non-production fabrics",
61
+ "PRD": "Alternative Prod spelling",
61
62
  "PRE": "Designates pre-production fabrics",
62
63
  "PROD": "Designates production fabrics",
63
64
  "QA": "Designates quality assurance fabrics",
64
65
  "RVW": "Designates review fabrics",
65
66
  "SANDBOX": "Designates sandbox fabrics",
67
+ "SIT": "System Integration Testing",
66
68
  "STG": "Designates staging fabrics",
67
69
  "TEST": "Designates testing fabrics",
70
+ "TST": "Alternative Test spelling",
68
71
  "UAT": "Designates user acceptance testing fabrics"
69
72
  },
70
73
  "name": "FabricType",
@@ -81,6 +84,9 @@
81
84
  "PROD",
82
85
  "CORP",
83
86
  "RVW",
87
+ "PRD",
88
+ "TST",
89
+ "SIT",
84
90
  "SANDBOX"
85
91
  ],
86
92
  "doc": "Fabric group type"