acryl-datahub 0.15.0rc15__py3-none-any.whl → 0.15.0rc17__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of acryl-datahub might be problematic. Click here for more details.

Files changed (52) hide show
  1. {acryl_datahub-0.15.0rc15.dist-info → acryl_datahub-0.15.0rc17.dist-info}/METADATA +2485 -2501
  2. {acryl_datahub-0.15.0rc15.dist-info → acryl_datahub-0.15.0rc17.dist-info}/RECORD +49 -49
  3. datahub/__init__.py +1 -1
  4. datahub/api/entities/structuredproperties/structuredproperties.py +7 -5
  5. datahub/cli/cli_utils.py +2 -0
  6. datahub/cli/delete_cli.py +66 -20
  7. datahub/configuration/common.py +3 -3
  8. datahub/ingestion/api/incremental_properties_helper.py +69 -0
  9. datahub/ingestion/api/source.py +5 -1
  10. datahub/ingestion/api/source_helpers.py +3 -1
  11. datahub/ingestion/reporting/datahub_ingestion_run_summary_provider.py +2 -2
  12. datahub/ingestion/run/pipeline.py +1 -1
  13. datahub/ingestion/run/pipeline_config.py +6 -0
  14. datahub/ingestion/sink/datahub_rest.py +3 -3
  15. datahub/ingestion/source/abs/source.py +4 -0
  16. datahub/ingestion/source/gc/datahub_gc.py +5 -5
  17. datahub/ingestion/source/gc/soft_deleted_entity_cleanup.py +1 -1
  18. datahub/ingestion/source/kafka/kafka.py +18 -11
  19. datahub/ingestion/source/looker/lookml_concept_context.py +1 -2
  20. datahub/ingestion/source/looker/view_upstream.py +65 -30
  21. datahub/ingestion/source/mode.py +0 -23
  22. datahub/ingestion/source/redash.py +13 -63
  23. datahub/ingestion/source/redshift/config.py +1 -0
  24. datahub/ingestion/source/redshift/redshift.py +2 -0
  25. datahub/ingestion/source/snowflake/snowflake_config.py +4 -0
  26. datahub/ingestion/source/snowflake/snowflake_query.py +6 -2
  27. datahub/ingestion/source/snowflake/snowflake_report.py +1 -0
  28. datahub/ingestion/source/snowflake/snowflake_schema.py +12 -0
  29. datahub/ingestion/source/snowflake/snowflake_schema_gen.py +17 -2
  30. datahub/ingestion/source/snowflake/snowflake_utils.py +45 -5
  31. datahub/ingestion/source/snowflake/snowflake_v2.py +6 -0
  32. datahub/ingestion/source/state/redundant_run_skip_handler.py +1 -1
  33. datahub/ingestion/source/tableau/tableau.py +35 -16
  34. datahub/ingestion/source/tableau/tableau_common.py +0 -1
  35. datahub/ingestion/source/unity/source.py +2 -0
  36. datahub/ingestion/source/unity/usage.py +20 -11
  37. datahub/metadata/_schema_classes.py +122 -2
  38. datahub/metadata/com/linkedin/pegasus2avro/structured/__init__.py +2 -0
  39. datahub/metadata/schema.avsc +73 -1
  40. datahub/metadata/schemas/StructuredPropertyDefinition.avsc +1 -1
  41. datahub/metadata/schemas/StructuredPropertyKey.avsc +1 -0
  42. datahub/metadata/schemas/StructuredPropertySettings.avsc +114 -0
  43. datahub/sql_parsing/schema_resolver.py +23 -0
  44. datahub/sql_parsing/sqlglot_lineage.py +48 -13
  45. datahub/testing/doctest.py +12 -0
  46. datahub/utilities/partition_executor.py +1 -1
  47. datahub/utilities/sql_lineage_parser_impl.py +0 -160
  48. datahub/utilities/sql_parser.py +0 -94
  49. datahub/utilities/sql_parser_base.py +0 -21
  50. {acryl_datahub-0.15.0rc15.dist-info → acryl_datahub-0.15.0rc17.dist-info}/WHEEL +0 -0
  51. {acryl_datahub-0.15.0rc15.dist-info → acryl_datahub-0.15.0rc17.dist-info}/entry_points.txt +0 -0
  52. {acryl_datahub-0.15.0rc15.dist-info → acryl_datahub-0.15.0rc17.dist-info}/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  import abc
2
2
  from functools import cached_property
3
- from typing import ClassVar, Literal, Optional, Tuple
3
+ from typing import ClassVar, List, Literal, Optional, Tuple
4
4
 
5
5
  from datahub.configuration.pattern_utils import is_schema_allowed
6
6
  from datahub.emitter.mce_builder import make_dataset_urn_with_platform_instance
@@ -184,6 +184,46 @@ def _is_sys_table(table_name: str) -> bool:
184
184
  return table_name.lower().startswith("sys$")
185
185
 
186
186
 
187
+ def _split_qualified_name(qualified_name: str) -> List[str]:
188
+ """
189
+ Split a qualified name into its constituent parts.
190
+
191
+ >>> _split_qualified_name("db.my_schema.my_table")
192
+ ['db', 'my_schema', 'my_table']
193
+ >>> _split_qualified_name('"db"."my_schema"."my_table"')
194
+ ['db', 'my_schema', 'my_table']
195
+ >>> _split_qualified_name('TEST_DB.TEST_SCHEMA."TABLE.WITH.DOTS"')
196
+ ['TEST_DB', 'TEST_SCHEMA', 'TABLE.WITH.DOTS']
197
+ >>> _split_qualified_name('TEST_DB."SCHEMA.WITH.DOTS".MY_TABLE')
198
+ ['TEST_DB', 'SCHEMA.WITH.DOTS', 'MY_TABLE']
199
+ """
200
+
201
+ # Fast path - no quotes.
202
+ if '"' not in qualified_name:
203
+ return qualified_name.split(".")
204
+
205
+ # First pass - split on dots that are not inside quotes.
206
+ in_quote = False
207
+ parts: List[List[str]] = [[]]
208
+ for char in qualified_name:
209
+ if char == '"':
210
+ in_quote = not in_quote
211
+ elif char == "." and not in_quote:
212
+ parts.append([])
213
+ else:
214
+ parts[-1].append(char)
215
+
216
+ # Second pass - remove outer pairs of quotes.
217
+ result = []
218
+ for part in parts:
219
+ if len(part) > 2 and part[0] == '"' and part[-1] == '"':
220
+ part = part[1:-1]
221
+
222
+ result.append("".join(part))
223
+
224
+ return result
225
+
226
+
187
227
  # Qualified Object names from snowflake audit logs have quotes for for snowflake quoted identifiers,
188
228
  # For example "test-database"."test-schema".test_table
189
229
  # whereas we generate urns without quotes even for quoted identifiers for backward compatibility
@@ -192,7 +232,7 @@ def _is_sys_table(table_name: str) -> bool:
192
232
  def _cleanup_qualified_name(
193
233
  qualified_name: str, structured_reporter: SourceReport
194
234
  ) -> str:
195
- name_parts = qualified_name.split(".")
235
+ name_parts = _split_qualified_name(qualified_name)
196
236
  if len(name_parts) != 3:
197
237
  if not _is_sys_table(qualified_name):
198
238
  structured_reporter.info(
@@ -203,9 +243,9 @@ def _cleanup_qualified_name(
203
243
  )
204
244
  return qualified_name.replace('"', "")
205
245
  return _combine_identifier_parts(
206
- db_name=name_parts[0].strip('"'),
207
- schema_name=name_parts[1].strip('"'),
208
- table_name=name_parts[2].strip('"'),
246
+ db_name=name_parts[0],
247
+ schema_name=name_parts[1],
248
+ table_name=name_parts[2],
209
249
  )
210
250
 
211
251
 
@@ -17,6 +17,9 @@ from datahub.ingestion.api.decorators import (
17
17
  support_status,
18
18
  )
19
19
  from datahub.ingestion.api.incremental_lineage_helper import auto_incremental_lineage
20
+ from datahub.ingestion.api.incremental_properties_helper import (
21
+ auto_incremental_properties,
22
+ )
20
23
  from datahub.ingestion.api.source import (
21
24
  CapabilityReport,
22
25
  MetadataWorkUnitProcessor,
@@ -446,6 +449,9 @@ class SnowflakeV2Source(
446
449
  functools.partial(
447
450
  auto_incremental_lineage, self.config.incremental_lineage
448
451
  ),
452
+ functools.partial(
453
+ auto_incremental_properties, self.config.incremental_properties
454
+ ),
449
455
  StaleEntityRemovalHandler.create(
450
456
  self, self.config, self.ctx
451
457
  ).workunit_processor,
@@ -69,7 +69,7 @@ class RedundantRunSkipHandler(
69
69
  platform: Optional[str] = None
70
70
  source_class = type(self.source)
71
71
  if hasattr(source_class, "get_platform_name"):
72
- platform = source_class.get_platform_name() # type: ignore
72
+ platform = source_class.get_platform_name()
73
73
 
74
74
  # Default name for everything else
75
75
  job_name_suffix = self.get_job_name_suffix()
@@ -353,7 +353,7 @@ class TableauConfig(
353
353
 
354
354
  project_path_separator: str = Field(
355
355
  default="/",
356
- description="The separator used for the project_pattern field between project names. By default, we use a slash. "
356
+ description="The separator used for the project_path_pattern field between project names. By default, we use a slash. "
357
357
  "You can change this if your Tableau projects contain slashes in their names, and you'd like to filter by project.",
358
358
  )
359
359
 
@@ -959,19 +959,36 @@ class TableauSiteSource:
959
959
  return is_allowed
960
960
 
961
961
  def _is_denied_project(self, project: TableauProject) -> bool:
962
- # Either project name or project path should exist in deny
963
- for deny_pattern in self.config.project_pattern.deny:
964
- # Either name or project path is denied
965
- if re.match(
966
- deny_pattern, project.name, self.config.project_pattern.regex_flags
967
- ) or re.match(
968
- deny_pattern,
969
- self._get_project_path(project),
970
- self.config.project_pattern.regex_flags,
971
- ):
972
- return True
973
- logger.info(f"project({project.name}) is not denied as per project_pattern")
974
- return False
962
+ """
963
+ Why use an explicit denial check instead of the `AllowDenyPattern.allowed` method?
964
+
965
+ Consider a scenario where a Tableau site contains four projects: A, B, C, and D, with the following hierarchical relationship:
966
+
967
+ - **A**
968
+ - **B** (Child of A)
969
+ - **C** (Child of A)
970
+ - **D**
971
+
972
+ In this setup:
973
+
974
+ - `project_pattern` is configured with `allow: ["A"]` and `deny: ["B"]`.
975
+ - `extract_project_hierarchy` is set to `True`.
976
+
977
+ The goal is to extract assets from project A and its children while explicitly denying the child project B.
978
+
979
+ If we rely solely on the `project_pattern.allowed()` method, project C's assets will not be ingested.
980
+ This happens because project C is not explicitly included in the `allow` list, nor is it part of the `deny` list.
981
+ However, since `extract_project_hierarchy` is enabled, project C should ideally be included in the ingestion process unless explicitly denied.
982
+
983
+ To address this, the function explicitly checks the deny regex to ensure that project C’s assets are ingested if it is not specifically denied in the deny list. This approach ensures that the hierarchy is respected while adhering to the configured allow/deny rules.
984
+ """
985
+
986
+ # Either project_pattern or project_path_pattern is set in a recipe
987
+ # TableauConfig.projects_backward_compatibility ensures that at least one of these properties is configured.
988
+
989
+ return self.config.project_pattern.denied(
990
+ project.name
991
+ ) or self.config.project_path_pattern.denied(self._get_project_path(project))
975
992
 
976
993
  def _init_tableau_project_registry(self, all_project_map: dict) -> None:
977
994
  list_of_skip_projects: List[TableauProject] = []
@@ -999,9 +1016,11 @@ class TableauSiteSource:
999
1016
  for project in list_of_skip_projects:
1000
1017
  if (
1001
1018
  project.parent_id in projects_to_ingest
1002
- and self._is_denied_project(project) is False
1019
+ and not self._is_denied_project(project)
1003
1020
  ):
1004
- logger.debug(f"Project {project.name} is added in project registry")
1021
+ logger.debug(
1022
+ f"Project {project.name} is added in project registry as it's a child project and not explicitly denied in `deny` list"
1023
+ )
1005
1024
  projects_to_ingest[project.id] = project
1006
1025
 
1007
1026
  # We rely on automatic browse paths (v2) when creating containers. That's why we need to sort the projects here.
@@ -979,7 +979,6 @@ def get_filter_pages(query_filter: dict, page_size: int) -> List[dict]:
979
979
  len(query_filter.keys()) == 1
980
980
  and query_filter.get(c.ID_WITH_IN)
981
981
  and isinstance(query_filter[c.ID_WITH_IN], list)
982
- and len(query_filter[c.ID_WITH_IN]) > 100 * page_size
983
982
  ):
984
983
  ids = query_filter[c.ID_WITH_IN]
985
984
  filter_pages = [
@@ -556,6 +556,8 @@ class UnityCatalogSource(StatefulIngestionSourceBase, TestableSource):
556
556
  )
557
557
 
558
558
  if table_props:
559
+ # TODO: use auto_incremental_properties workunit processor instead
560
+ # Consider enabling incremental_properties by default
559
561
  patch_builder = create_dataset_props_patch_builder(dataset_urn, table_props)
560
562
  for patch_mcp in patch_builder.build():
561
563
  yield MetadataWorkUnit(
@@ -7,7 +7,6 @@ from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, Set,
7
7
 
8
8
  import pyspark
9
9
  from databricks.sdk.service.sql import QueryStatementType
10
- from sqllineage.runner import LineageRunner
11
10
 
12
11
  from datahub.emitter.mcp import MetadataChangeProposalWrapper
13
12
  from datahub.ingestion.api.source_helpers import auto_empty_dataset_usage_statistics
@@ -22,7 +21,9 @@ from datahub.ingestion.source.unity.proxy_types import (
22
21
  from datahub.ingestion.source.unity.report import UnityCatalogReport
23
22
  from datahub.ingestion.source.usage.usage_common import UsageAggregator
24
23
  from datahub.metadata.schema_classes import OperationClass
24
+ from datahub.sql_parsing.sqlglot_lineage import create_lineage_sql_parsed_result
25
25
  from datahub.sql_parsing.sqlglot_utils import get_query_fingerprint
26
+ from datahub.utilities.urns.dataset_urn import DatasetUrn
26
27
 
27
28
  logger = logging.getLogger(__name__)
28
29
 
@@ -48,6 +49,7 @@ class UnityCatalogUsageExtractor:
48
49
  proxy: UnityCatalogApiProxy
49
50
  table_urn_builder: Callable[[TableReference], str]
50
51
  user_urn_builder: Callable[[str], str]
52
+ platform: str = "databricks"
51
53
 
52
54
  def __post_init__(self):
53
55
  self.usage_aggregator = UsageAggregator[TableReference](self.config)
@@ -173,7 +175,7 @@ class UnityCatalogUsageExtractor:
173
175
  self, query: Query, table_map: TableMap
174
176
  ) -> Optional[QueryTableInfo]:
175
177
  with self.report.usage_perf_report.sql_parsing_timer:
176
- table_info = self._parse_query_via_lineage_runner(query.query_text)
178
+ table_info = self._parse_query_via_sqlglot(query.query_text)
177
179
  if table_info is None and query.statement_type == QueryStatementType.SELECT:
178
180
  with self.report.usage_perf_report.spark_sql_parsing_timer:
179
181
  table_info = self._parse_query_via_spark_sql_plan(query.query_text)
@@ -191,26 +193,33 @@ class UnityCatalogUsageExtractor:
191
193
  ),
192
194
  )
193
195
 
194
- def _parse_query_via_lineage_runner(self, query: str) -> Optional[StringTableInfo]:
196
+ def _parse_query_via_sqlglot(self, query: str) -> Optional[StringTableInfo]:
195
197
  try:
196
- runner = LineageRunner(query)
198
+ sql_parser_in_tables = create_lineage_sql_parsed_result(
199
+ query=query,
200
+ default_db=None,
201
+ platform=self.platform,
202
+ env=self.config.env,
203
+ platform_instance=None,
204
+ )
205
+
197
206
  return GenericTableInfo(
198
207
  source_tables=[
199
- self._parse_sqllineage_table(table)
200
- for table in runner.source_tables
208
+ self._parse_sqlglot_table(table)
209
+ for table in sql_parser_in_tables.in_tables
201
210
  ],
202
211
  target_tables=[
203
- self._parse_sqllineage_table(table)
204
- for table in runner.target_tables
212
+ self._parse_sqlglot_table(table)
213
+ for table in sql_parser_in_tables.out_tables
205
214
  ],
206
215
  )
207
216
  except Exception as e:
208
- logger.info(f"Could not parse query via lineage runner, {query}: {e!r}")
217
+ logger.info(f"Could not parse query via sqlglot, {query}: {e!r}")
209
218
  return None
210
219
 
211
220
  @staticmethod
212
- def _parse_sqllineage_table(sqllineage_table: object) -> str:
213
- full_table_name = str(sqllineage_table)
221
+ def _parse_sqlglot_table(table_urn: str) -> str:
222
+ full_table_name = DatasetUrn.from_string(table_urn).name
214
223
  default_schema = "<default>."
215
224
  if full_table_name.startswith(default_schema):
216
225
  return full_table_name[len(default_schema) :]
@@ -23267,7 +23267,7 @@ class StructuredPropertyDefinitionClass(_Aspect):
23267
23267
 
23268
23268
  @property
23269
23269
  def lastModified(self) -> Union[None, "AuditStampClass"]:
23270
- """Created Audit stamp"""
23270
+ """Last Modified Audit stamp"""
23271
23271
  return self._inner_dict.get('lastModified') # type: ignore
23272
23272
 
23273
23273
  @lastModified.setter
@@ -23280,7 +23280,7 @@ class StructuredPropertyKeyClass(_Aspect):
23280
23280
 
23281
23281
 
23282
23282
  ASPECT_NAME = 'structuredPropertyKey'
23283
- ASPECT_INFO = {'keyForEntity': 'structuredProperty', 'entityCategory': 'core', 'entityAspects': ['propertyDefinition', 'institutionalMemory', 'status'], 'entityDoc': 'Structured Property represents a property meant for extending the core model of a logical entity'}
23283
+ ASPECT_INFO = {'keyForEntity': 'structuredProperty', 'entityCategory': 'core', 'entityAspects': ['propertyDefinition', 'structuredPropertySettings', 'institutionalMemory', 'status'], 'entityDoc': 'Structured Property represents a property meant for extending the core model of a logical entity'}
23284
23284
  RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.structured.StructuredPropertyKey")
23285
23285
 
23286
23286
  def __init__(self,
@@ -23304,6 +23304,122 @@ class StructuredPropertyKeyClass(_Aspect):
23304
23304
  self._inner_dict['id'] = value
23305
23305
 
23306
23306
 
23307
+ class StructuredPropertySettingsClass(_Aspect):
23308
+ """Settings specific to a structured property entity"""
23309
+
23310
+
23311
+ ASPECT_NAME = 'structuredPropertySettings'
23312
+ ASPECT_INFO = {}
23313
+ RECORD_SCHEMA = get_schema_type("com.linkedin.pegasus2avro.structured.StructuredPropertySettings")
23314
+
23315
+ def __init__(self,
23316
+ isHidden: Optional[bool]=None,
23317
+ showInSearchFilters: Optional[bool]=None,
23318
+ showInAssetSummary: Optional[bool]=None,
23319
+ showAsAssetBadge: Optional[bool]=None,
23320
+ showInColumnsTable: Optional[bool]=None,
23321
+ lastModified: Union[None, "AuditStampClass"]=None,
23322
+ ):
23323
+ super().__init__()
23324
+
23325
+ if isHidden is None:
23326
+ # default: False
23327
+ self.isHidden = self.RECORD_SCHEMA.fields_dict["isHidden"].default
23328
+ else:
23329
+ self.isHidden = isHidden
23330
+ if showInSearchFilters is None:
23331
+ # default: False
23332
+ self.showInSearchFilters = self.RECORD_SCHEMA.fields_dict["showInSearchFilters"].default
23333
+ else:
23334
+ self.showInSearchFilters = showInSearchFilters
23335
+ if showInAssetSummary is None:
23336
+ # default: False
23337
+ self.showInAssetSummary = self.RECORD_SCHEMA.fields_dict["showInAssetSummary"].default
23338
+ else:
23339
+ self.showInAssetSummary = showInAssetSummary
23340
+ if showAsAssetBadge is None:
23341
+ # default: False
23342
+ self.showAsAssetBadge = self.RECORD_SCHEMA.fields_dict["showAsAssetBadge"].default
23343
+ else:
23344
+ self.showAsAssetBadge = showAsAssetBadge
23345
+ if showInColumnsTable is None:
23346
+ # default: False
23347
+ self.showInColumnsTable = self.RECORD_SCHEMA.fields_dict["showInColumnsTable"].default
23348
+ else:
23349
+ self.showInColumnsTable = showInColumnsTable
23350
+ self.lastModified = lastModified
23351
+
23352
+ def _restore_defaults(self) -> None:
23353
+ self.isHidden = self.RECORD_SCHEMA.fields_dict["isHidden"].default
23354
+ self.showInSearchFilters = self.RECORD_SCHEMA.fields_dict["showInSearchFilters"].default
23355
+ self.showInAssetSummary = self.RECORD_SCHEMA.fields_dict["showInAssetSummary"].default
23356
+ self.showAsAssetBadge = self.RECORD_SCHEMA.fields_dict["showAsAssetBadge"].default
23357
+ self.showInColumnsTable = self.RECORD_SCHEMA.fields_dict["showInColumnsTable"].default
23358
+ self.lastModified = self.RECORD_SCHEMA.fields_dict["lastModified"].default
23359
+
23360
+
23361
+ @property
23362
+ def isHidden(self) -> bool:
23363
+ """Whether or not this asset should be hidden in the main application"""
23364
+ return self._inner_dict.get('isHidden') # type: ignore
23365
+
23366
+ @isHidden.setter
23367
+ def isHidden(self, value: bool) -> None:
23368
+ self._inner_dict['isHidden'] = value
23369
+
23370
+
23371
+ @property
23372
+ def showInSearchFilters(self) -> bool:
23373
+ """Whether or not this asset should be displayed as a search filter"""
23374
+ return self._inner_dict.get('showInSearchFilters') # type: ignore
23375
+
23376
+ @showInSearchFilters.setter
23377
+ def showInSearchFilters(self, value: bool) -> None:
23378
+ self._inner_dict['showInSearchFilters'] = value
23379
+
23380
+
23381
+ @property
23382
+ def showInAssetSummary(self) -> bool:
23383
+ """Whether or not this asset should be displayed in the asset sidebar"""
23384
+ return self._inner_dict.get('showInAssetSummary') # type: ignore
23385
+
23386
+ @showInAssetSummary.setter
23387
+ def showInAssetSummary(self, value: bool) -> None:
23388
+ self._inner_dict['showInAssetSummary'] = value
23389
+
23390
+
23391
+ @property
23392
+ def showAsAssetBadge(self) -> bool:
23393
+ """Whether or not this asset should be displayed as an asset badge on other
23394
+ asset's headers"""
23395
+ return self._inner_dict.get('showAsAssetBadge') # type: ignore
23396
+
23397
+ @showAsAssetBadge.setter
23398
+ def showAsAssetBadge(self, value: bool) -> None:
23399
+ self._inner_dict['showAsAssetBadge'] = value
23400
+
23401
+
23402
+ @property
23403
+ def showInColumnsTable(self) -> bool:
23404
+ """Whether or not this asset should be displayed as a column in the schema field table
23405
+ in a Dataset's "Columns" tab."""
23406
+ return self._inner_dict.get('showInColumnsTable') # type: ignore
23407
+
23408
+ @showInColumnsTable.setter
23409
+ def showInColumnsTable(self, value: bool) -> None:
23410
+ self._inner_dict['showInColumnsTable'] = value
23411
+
23412
+
23413
+ @property
23414
+ def lastModified(self) -> Union[None, "AuditStampClass"]:
23415
+ """Last Modified Audit stamp"""
23416
+ return self._inner_dict.get('lastModified') # type: ignore
23417
+
23418
+ @lastModified.setter
23419
+ def lastModified(self, value: Union[None, "AuditStampClass"]) -> None:
23420
+ self._inner_dict['lastModified'] = value
23421
+
23422
+
23307
23423
  class StructuredPropertyValueAssignmentClass(DictWrapper):
23308
23424
  # No docs available.
23309
23425
 
@@ -24775,6 +24891,7 @@ __SCHEMA_TYPES = {
24775
24891
  'com.linkedin.pegasus2avro.structured.StructuredProperties': StructuredPropertiesClass,
24776
24892
  'com.linkedin.pegasus2avro.structured.StructuredPropertyDefinition': StructuredPropertyDefinitionClass,
24777
24893
  'com.linkedin.pegasus2avro.structured.StructuredPropertyKey': StructuredPropertyKeyClass,
24894
+ 'com.linkedin.pegasus2avro.structured.StructuredPropertySettings': StructuredPropertySettingsClass,
24778
24895
  'com.linkedin.pegasus2avro.structured.StructuredPropertyValueAssignment': StructuredPropertyValueAssignmentClass,
24779
24896
  'com.linkedin.pegasus2avro.tag.TagProperties': TagPropertiesClass,
24780
24897
  'com.linkedin.pegasus2avro.telemetry.TelemetryClientId': TelemetryClientIdClass,
@@ -25240,6 +25357,7 @@ __SCHEMA_TYPES = {
25240
25357
  'StructuredProperties': StructuredPropertiesClass,
25241
25358
  'StructuredPropertyDefinition': StructuredPropertyDefinitionClass,
25242
25359
  'StructuredPropertyKey': StructuredPropertyKeyClass,
25360
+ 'StructuredPropertySettings': StructuredPropertySettingsClass,
25243
25361
  'StructuredPropertyValueAssignment': StructuredPropertyValueAssignmentClass,
25244
25362
  'TagProperties': TagPropertiesClass,
25245
25363
  'TelemetryClientId': TelemetryClientIdClass,
@@ -25336,6 +25454,7 @@ ASPECT_CLASSES: List[Type[_Aspect]] = [
25336
25454
  QuerySubjectsClass,
25337
25455
  StructuredPropertyKeyClass,
25338
25456
  StructuredPropertyDefinitionClass,
25457
+ StructuredPropertySettingsClass,
25339
25458
  StructuredPropertiesClass,
25340
25459
  GlobalSettingsInfoClass,
25341
25460
  DataHubRetentionConfigClass,
@@ -25548,6 +25667,7 @@ class AspectBag(TypedDict, total=False):
25548
25667
  querySubjects: QuerySubjectsClass
25549
25668
  structuredPropertyKey: StructuredPropertyKeyClass
25550
25669
  propertyDefinition: StructuredPropertyDefinitionClass
25670
+ structuredPropertySettings: StructuredPropertySettingsClass
25551
25671
  structuredProperties: StructuredPropertiesClass
25552
25672
  globalSettingsInfo: GlobalSettingsInfoClass
25553
25673
  dataHubRetentionConfig: DataHubRetentionConfigClass
@@ -12,6 +12,7 @@ from .....schema_classes import PropertyValueClass
12
12
  from .....schema_classes import StructuredPropertiesClass
13
13
  from .....schema_classes import StructuredPropertyDefinitionClass
14
14
  from .....schema_classes import StructuredPropertyKeyClass
15
+ from .....schema_classes import StructuredPropertySettingsClass
15
16
  from .....schema_classes import StructuredPropertyValueAssignmentClass
16
17
 
17
18
 
@@ -20,6 +21,7 @@ PropertyValue = PropertyValueClass
20
21
  StructuredProperties = StructuredPropertiesClass
21
22
  StructuredPropertyDefinition = StructuredPropertyDefinitionClass
22
23
  StructuredPropertyKey = StructuredPropertyKeyClass
24
+ StructuredPropertySettings = StructuredPropertySettingsClass
23
25
  StructuredPropertyValueAssignment = StructuredPropertyValueAssignmentClass
24
26
 
25
27
  # fmt: on
@@ -2995,6 +2995,7 @@
2995
2995
  "entityCategory": "core",
2996
2996
  "entityAspects": [
2997
2997
  "propertyDefinition",
2998
+ "structuredPropertySettings",
2998
2999
  "institutionalMemory",
2999
3000
  "status"
3000
3001
  ],
@@ -3328,10 +3329,81 @@
3328
3329
  ],
3329
3330
  "name": "lastModified",
3330
3331
  "default": null,
3331
- "doc": "Created Audit stamp"
3332
+ "doc": "Last Modified Audit stamp"
3332
3333
  }
3333
3334
  ]
3334
3335
  },
3336
+ {
3337
+ "type": "record",
3338
+ "Aspect": {
3339
+ "name": "structuredPropertySettings"
3340
+ },
3341
+ "name": "StructuredPropertySettings",
3342
+ "namespace": "com.linkedin.pegasus2avro.structured",
3343
+ "fields": [
3344
+ {
3345
+ "Searchable": {
3346
+ "fieldType": "BOOLEAN"
3347
+ },
3348
+ "type": "boolean",
3349
+ "name": "isHidden",
3350
+ "default": false,
3351
+ "doc": "Whether or not this asset should be hidden in the main application"
3352
+ },
3353
+ {
3354
+ "Searchable": {
3355
+ "fieldType": "BOOLEAN"
3356
+ },
3357
+ "type": "boolean",
3358
+ "name": "showInSearchFilters",
3359
+ "default": false,
3360
+ "doc": "Whether or not this asset should be displayed as a search filter"
3361
+ },
3362
+ {
3363
+ "Searchable": {
3364
+ "fieldType": "BOOLEAN"
3365
+ },
3366
+ "type": "boolean",
3367
+ "name": "showInAssetSummary",
3368
+ "default": false,
3369
+ "doc": "Whether or not this asset should be displayed in the asset sidebar"
3370
+ },
3371
+ {
3372
+ "Searchable": {
3373
+ "fieldType": "BOOLEAN"
3374
+ },
3375
+ "type": "boolean",
3376
+ "name": "showAsAssetBadge",
3377
+ "default": false,
3378
+ "doc": "Whether or not this asset should be displayed as an asset badge on other\nasset's headers"
3379
+ },
3380
+ {
3381
+ "Searchable": {
3382
+ "fieldType": "BOOLEAN"
3383
+ },
3384
+ "type": "boolean",
3385
+ "name": "showInColumnsTable",
3386
+ "default": false,
3387
+ "doc": "Whether or not this asset should be displayed as a column in the schema field table\nin a Dataset's \"Columns\" tab."
3388
+ },
3389
+ {
3390
+ "Searchable": {
3391
+ "/time": {
3392
+ "fieldName": "lastModifiedSettings",
3393
+ "fieldType": "DATETIME"
3394
+ }
3395
+ },
3396
+ "type": [
3397
+ "null",
3398
+ "com.linkedin.pegasus2avro.common.AuditStamp"
3399
+ ],
3400
+ "name": "lastModified",
3401
+ "default": null,
3402
+ "doc": "Last Modified Audit stamp"
3403
+ }
3404
+ ],
3405
+ "doc": "Settings specific to a structured property entity"
3406
+ },
3335
3407
  {
3336
3408
  "type": "record",
3337
3409
  "Aspect": {
@@ -359,7 +359,7 @@
359
359
  ],
360
360
  "name": "lastModified",
361
361
  "default": null,
362
- "doc": "Created Audit stamp"
362
+ "doc": "Last Modified Audit stamp"
363
363
  }
364
364
  ]
365
365
  }
@@ -6,6 +6,7 @@
6
6
  "entityCategory": "core",
7
7
  "entityAspects": [
8
8
  "propertyDefinition",
9
+ "structuredPropertySettings",
9
10
  "institutionalMemory",
10
11
  "status"
11
12
  ],