acryl-datahub 1.2.0.2rc2__py3-none-any.whl → 1.2.0.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of acryl-datahub might be problematic. Click here for more details.

Files changed (51) hide show
  1. {acryl_datahub-1.2.0.2rc2.dist-info → acryl_datahub-1.2.0.3.dist-info}/METADATA +2511 -2509
  2. {acryl_datahub-1.2.0.2rc2.dist-info → acryl_datahub-1.2.0.3.dist-info}/RECORD +51 -43
  3. datahub/_version.py +1 -1
  4. datahub/api/entities/dataset/dataset.py +13 -1
  5. datahub/emitter/rest_emitter.py +18 -5
  6. datahub/ingestion/autogenerated/capability_summary.json +97 -6
  7. datahub/ingestion/graph/client.py +19 -3
  8. datahub/ingestion/sink/datahub_rest.py +2 -0
  9. datahub/ingestion/source/aws/glue.py +8 -0
  10. datahub/ingestion/source/cassandra/cassandra.py +5 -7
  11. datahub/ingestion/source/common/subtypes.py +2 -0
  12. datahub/ingestion/source/datahub/datahub_source.py +3 -0
  13. datahub/ingestion/source/dbt/dbt_common.py +10 -0
  14. datahub/ingestion/source/delta_lake/source.py +1 -0
  15. datahub/ingestion/source/grafana/entity_mcp_builder.py +272 -0
  16. datahub/ingestion/source/grafana/field_utils.py +307 -0
  17. datahub/ingestion/source/grafana/grafana_api.py +142 -0
  18. datahub/ingestion/source/grafana/grafana_config.py +104 -0
  19. datahub/ingestion/source/grafana/grafana_source.py +522 -84
  20. datahub/ingestion/source/grafana/lineage.py +202 -0
  21. datahub/ingestion/source/grafana/models.py +120 -0
  22. datahub/ingestion/source/grafana/report.py +91 -0
  23. datahub/ingestion/source/grafana/types.py +16 -0
  24. datahub/ingestion/source/hex/hex.py +8 -0
  25. datahub/ingestion/source/hex/query_fetcher.py +1 -1
  26. datahub/ingestion/source/looker/looker_source.py +9 -0
  27. datahub/ingestion/source/looker/lookml_source.py +8 -0
  28. datahub/ingestion/source/mongodb.py +11 -1
  29. datahub/ingestion/source/redshift/redshift.py +8 -1
  30. datahub/ingestion/source/s3/source.py +9 -1
  31. datahub/ingestion/source/sql/athena.py +8 -2
  32. datahub/ingestion/source/sql/athena_properties_extractor.py +2 -2
  33. datahub/ingestion/source/sql/clickhouse.py +9 -0
  34. datahub/ingestion/source/sql/vertica.py +3 -0
  35. datahub/ingestion/source/sql_queries.py +88 -46
  36. datahub/ingestion/source/unity/proxy.py +112 -22
  37. datahub/ingestion/source/unity/source.py +7 -10
  38. datahub/metadata/_internal_schema_classes.py +18 -3
  39. datahub/metadata/schema.avsc +19 -1
  40. datahub/metadata/schemas/DataHubPageModuleProperties.avsc +10 -1
  41. datahub/metadata/schemas/DataJobInputOutput.avsc +8 -0
  42. datahub/metadata/schemas/MetadataChangeEvent.avsc +9 -0
  43. datahub/metadata/schemas/UpstreamLineage.avsc +9 -0
  44. datahub/sdk/dataset.py +44 -0
  45. datahub/sdk/search_filters.py +34 -14
  46. datahub/sql_parsing/sql_parsing_aggregator.py +5 -0
  47. datahub/telemetry/telemetry.py +4 -1
  48. {acryl_datahub-1.2.0.2rc2.dist-info → acryl_datahub-1.2.0.3.dist-info}/WHEEL +0 -0
  49. {acryl_datahub-1.2.0.2rc2.dist-info → acryl_datahub-1.2.0.3.dist-info}/entry_points.txt +0 -0
  50. {acryl_datahub-1.2.0.2rc2.dist-info → acryl_datahub-1.2.0.3.dist-info}/licenses/LICENSE +0 -0
  51. {acryl_datahub-1.2.0.2rc2.dist-info → acryl_datahub-1.2.0.3.dist-info}/top_level.txt +0 -0
@@ -1,5 +1,5 @@
1
1
  {
2
- "generated_at": "2025-07-14T09:20:09.632850+00:00",
2
+ "generated_at": "2025-07-24T13:24:05.751563+00:00",
3
3
  "generated_by": "metadata-ingestion/scripts/capability_summary.py",
4
4
  "plugin_details": {
5
5
  "abs": {
@@ -49,6 +49,7 @@
49
49
  "capability": "LINEAGE_FINE",
50
50
  "description": "Supported for S3 tables",
51
51
  "subtype_modifier": [
52
+ "View",
52
53
  "Table"
53
54
  ],
54
55
  "supported": true
@@ -95,6 +96,7 @@
95
96
  "capability": "LINEAGE_COARSE",
96
97
  "description": "Supported for S3 tables",
97
98
  "subtype_modifier": [
99
+ "View",
98
100
  "Table"
99
101
  ],
100
102
  "supported": true
@@ -305,7 +307,8 @@
305
307
  "capability": "LINEAGE_COARSE",
306
308
  "description": "Enabled by default to get lineage for views via `include_view_lineage`",
307
309
  "subtype_modifier": [
308
- "View"
310
+ "View",
311
+ "Table"
309
312
  ],
310
313
  "supported": true
311
314
  },
@@ -462,6 +465,12 @@
462
465
  },
463
466
  "datahub": {
464
467
  "capabilities": [
468
+ {
469
+ "capability": "CONTAINERS",
470
+ "description": "Enabled by default",
471
+ "subtype_modifier": null,
472
+ "supported": true
473
+ },
465
474
  {
466
475
  "capability": "DELETION_DETECTION",
467
476
  "description": "Enabled by default via stateful ingestion",
@@ -595,6 +604,12 @@
595
604
  },
596
605
  "delta-lake": {
597
606
  "capabilities": [
607
+ {
608
+ "capability": "CONTAINERS",
609
+ "description": "Enabled by default",
610
+ "subtype_modifier": null,
611
+ "supported": true
612
+ },
598
613
  {
599
614
  "capability": "DELETION_DETECTION",
600
615
  "description": "Enabled by default via stateful ingestion",
@@ -918,6 +933,14 @@
918
933
  },
919
934
  "glue": {
920
935
  "capabilities": [
936
+ {
937
+ "capability": "CONTAINERS",
938
+ "description": "Enabled by default",
939
+ "subtype_modifier": [
940
+ "Database"
941
+ ],
942
+ "supported": true
943
+ },
921
944
  {
922
945
  "capability": "LINEAGE_FINE",
923
946
  "description": "Support via the `emit_s3_lineage` config field",
@@ -956,9 +979,39 @@
956
979
  },
957
980
  "grafana": {
958
981
  "capabilities": [
982
+ {
983
+ "capability": "LINEAGE_FINE",
984
+ "description": "Enabled by default",
985
+ "subtype_modifier": null,
986
+ "supported": true
987
+ },
959
988
  {
960
989
  "capability": "DELETION_DETECTION",
961
- "description": "Enabled by default via stateful ingestion",
990
+ "description": "Enabled by default",
991
+ "subtype_modifier": null,
992
+ "supported": true
993
+ },
994
+ {
995
+ "capability": "OWNERSHIP",
996
+ "description": "Enabled by default",
997
+ "subtype_modifier": null,
998
+ "supported": true
999
+ },
1000
+ {
1001
+ "capability": "TAGS",
1002
+ "description": "Enabled by default",
1003
+ "subtype_modifier": null,
1004
+ "supported": true
1005
+ },
1006
+ {
1007
+ "capability": "PLATFORM_INSTANCE",
1008
+ "description": "Enabled by default",
1009
+ "subtype_modifier": null,
1010
+ "supported": true
1011
+ },
1012
+ {
1013
+ "capability": "LINEAGE_COARSE",
1014
+ "description": "Enabled by default",
962
1015
  "subtype_modifier": null,
963
1016
  "supported": true
964
1017
  }
@@ -966,7 +1019,7 @@
966
1019
  "classname": "datahub.ingestion.source.grafana.grafana_source.GrafanaSource",
967
1020
  "platform_id": "grafana",
968
1021
  "platform_name": "Grafana",
969
- "support_status": "TESTING"
1022
+ "support_status": "CERTIFIED"
970
1023
  },
971
1024
  "hana": {
972
1025
  "capabilities": [
@@ -1057,6 +1110,14 @@
1057
1110
  "subtype_modifier": null,
1058
1111
  "supported": true
1059
1112
  },
1113
+ {
1114
+ "capability": "USAGE_STATS",
1115
+ "description": "Supported by default",
1116
+ "subtype_modifier": [
1117
+ "Project"
1118
+ ],
1119
+ "supported": true
1120
+ },
1060
1121
  {
1061
1122
  "capability": "DESCRIPTIONS",
1062
1123
  "description": "Supported by default",
@@ -1433,6 +1494,15 @@
1433
1494
  },
1434
1495
  "looker": {
1435
1496
  "capabilities": [
1497
+ {
1498
+ "capability": "CONTAINERS",
1499
+ "description": "Enabled by default",
1500
+ "subtype_modifier": [
1501
+ "LookML Model",
1502
+ "Folder"
1503
+ ],
1504
+ "supported": true
1505
+ },
1436
1506
  {
1437
1507
  "capability": "LINEAGE_FINE",
1438
1508
  "description": "Enabled by default, configured using `extract_column_level_lineage`",
@@ -1489,6 +1559,14 @@
1489
1559
  },
1490
1560
  "lookml": {
1491
1561
  "capabilities": [
1562
+ {
1563
+ "capability": "CONTAINERS",
1564
+ "description": "Enabled by default",
1565
+ "subtype_modifier": [
1566
+ "LookML Project"
1567
+ ],
1568
+ "supported": true
1569
+ },
1492
1570
  {
1493
1571
  "capability": "LINEAGE_FINE",
1494
1572
  "description": "Enabled by default, configured using `extract_column_level_lineage`",
@@ -1712,6 +1790,14 @@
1712
1790
  },
1713
1791
  "mongodb": {
1714
1792
  "capabilities": [
1793
+ {
1794
+ "capability": "CONTAINERS",
1795
+ "description": "Enabled by default",
1796
+ "subtype_modifier": [
1797
+ "Database"
1798
+ ],
1799
+ "supported": true
1800
+ },
1715
1801
  {
1716
1802
  "capability": "DELETION_DETECTION",
1717
1803
  "description": "Enabled by default via stateful ingestion",
@@ -2511,7 +2597,9 @@
2511
2597
  {
2512
2598
  "capability": "CONTAINERS",
2513
2599
  "description": "Enabled by default",
2514
- "subtype_modifier": null,
2600
+ "subtype_modifier": [
2601
+ "Database"
2602
+ ],
2515
2603
  "supported": true
2516
2604
  },
2517
2605
  {
@@ -2591,7 +2679,10 @@
2591
2679
  {
2592
2680
  "capability": "CONTAINERS",
2593
2681
  "description": "Enabled by default",
2594
- "subtype_modifier": null,
2682
+ "subtype_modifier": [
2683
+ "Folder",
2684
+ "S3 bucket"
2685
+ ],
2595
2686
  "supported": true
2596
2687
  },
2597
2688
  {
@@ -76,7 +76,15 @@ from datahub.metadata.schema_classes import (
76
76
  SystemMetadataClass,
77
77
  TelemetryClientIdClass,
78
78
  )
79
- from datahub.metadata.urns import CorpUserUrn, Urn
79
+ from datahub.metadata.urns import (
80
+ CorpUserUrn,
81
+ MlFeatureTableUrn,
82
+ MlFeatureUrn,
83
+ MlModelGroupUrn,
84
+ MlModelUrn,
85
+ MlPrimaryKeyUrn,
86
+ Urn,
87
+ )
80
88
  from datahub.telemetry.telemetry import telemetry_instance
81
89
  from datahub.utilities.perf_timer import PerfTimer
82
90
  from datahub.utilities.str_enum import StrEnum
@@ -118,8 +126,16 @@ def entity_type_to_graphql(entity_type: str) -> str:
118
126
  """Convert the entity types into GraphQL "EntityType" enum values."""
119
127
 
120
128
  # Hard-coded special cases.
121
- if entity_type == CorpUserUrn.ENTITY_TYPE:
122
- return "CORP_USER"
129
+ special_cases = {
130
+ CorpUserUrn.ENTITY_TYPE: "CORP_USER",
131
+ MlModelUrn.ENTITY_TYPE: "MLMODEL",
132
+ MlModelGroupUrn.ENTITY_TYPE: "MLMODEL_GROUP",
133
+ MlFeatureTableUrn.ENTITY_TYPE: "MLFEATURE_TABLE",
134
+ MlFeatureUrn.ENTITY_TYPE: "MLFEATURE",
135
+ MlPrimaryKeyUrn.ENTITY_TYPE: "MLPRIMARY_KEY",
136
+ }
137
+ if entity_type in special_cases:
138
+ return special_cases[entity_type]
123
139
 
124
140
  # Convert camelCase to UPPER_UNDERSCORE.
125
141
  entity_type = (
@@ -92,6 +92,7 @@ class DatahubRestSinkConfig(DatahubClientConfig):
92
92
  @dataclasses.dataclass
93
93
  class DataHubRestSinkReport(SinkReport):
94
94
  mode: Optional[RestSinkMode] = None
95
+ endpoint: Optional[RestSinkEndpoint] = None
95
96
  max_threads: Optional[int] = None
96
97
  gms_version: Optional[str] = None
97
98
  pending_requests: int = 0
@@ -142,6 +143,7 @@ class DatahubRestSink(Sink[DatahubRestSinkConfig, DataHubRestSinkReport]):
142
143
 
143
144
  self.report.gms_version = gms_config.service_version
144
145
  self.report.mode = self.config.mode
146
+ self.report.endpoint = self.config.endpoint
145
147
  self.report.max_threads = self.config.max_threads
146
148
  logger.debug("Setting env variables to override config")
147
149
  logger.debug("Setting gms config")
@@ -75,6 +75,7 @@ from datahub.ingestion.source.aws.tag_entities import (
75
75
  from datahub.ingestion.source.common.subtypes import (
76
76
  DatasetContainerSubTypes,
77
77
  DatasetSubTypes,
78
+ SourceCapabilityModifier,
78
79
  )
79
80
  from datahub.ingestion.source.glue_profiling_config import GlueProfilingConfig
80
81
  from datahub.ingestion.source.state.stale_entity_removal_handler import (
@@ -275,6 +276,13 @@ class GlueSourceReport(StaleEntityRemovalSourceReport):
275
276
  @capability(
276
277
  SourceCapability.LINEAGE_FINE, "Support via the `emit_s3_lineage` config field"
277
278
  )
279
+ @capability(
280
+ SourceCapability.CONTAINERS,
281
+ "Enabled by default",
282
+ subtype_modifier=[
283
+ SourceCapabilityModifier.DATABASE,
284
+ ],
285
+ )
278
286
  class GlueSource(StatefulIngestionSourceBase):
279
287
  """
280
288
  Note: if you also have files in S3 that you'd like to ingest, we recommend you use Glue's built-in data catalog. See [here](../../../../docs/generated/ingestion/sources/s3.md) for a quick guide on how to set up a crawler on Glue and ingest the outputs with DataHub.
@@ -296,13 +296,11 @@ class CassandraSource(StatefulIngestionSourceBase):
296
296
  qualified_name=dataset_name,
297
297
  description=view.comment,
298
298
  custom_properties=self._get_dataset_custom_props(view),
299
- extra_aspects=[
300
- ViewPropertiesClass(
301
- materialized=True,
302
- viewLogic=view.where_clause, # Use the WHERE clause as view logic
303
- viewLanguage="CQL", # Use "CQL" as the language
304
- ),
305
- ],
299
+ view_definition=ViewPropertiesClass(
300
+ materialized=True,
301
+ viewLogic=view.where_clause, # Use the WHERE clause as view logic
302
+ viewLanguage="CQL", # Use "CQL" as the language
303
+ ),
306
304
  )
307
305
 
308
306
  # Construct and emit lineage off of 'base_table_name'
@@ -69,6 +69,8 @@ class BIContainerSubTypes(StrEnum):
69
69
  SIGMA_WORKSPACE = "Sigma Workspace"
70
70
  SIGMA_WORKBOOK = "Sigma Workbook"
71
71
  MODE_COLLECTION = "Collection"
72
+ GRAFANA_FOLDER = "Folder"
73
+ GRAFANA_DASHBOARD = "Dashboard"
72
74
 
73
75
 
74
76
  class FlowContainerSubTypes(StrEnum):
@@ -6,7 +6,9 @@ from typing import Dict, Iterable, List, Optional
6
6
  from datahub.emitter.mcp import MetadataChangeProposalWrapper
7
7
  from datahub.ingestion.api.common import PipelineContext
8
8
  from datahub.ingestion.api.decorators import (
9
+ SourceCapability,
9
10
  SupportStatus,
11
+ capability,
10
12
  config_class,
11
13
  platform_name,
12
14
  support_status,
@@ -37,6 +39,7 @@ logger = logging.getLogger(__name__)
37
39
  @platform_name("DataHub")
38
40
  @config_class(DataHubSourceConfig)
39
41
  @support_status(SupportStatus.TESTING)
42
+ @capability(SourceCapability.CONTAINERS, "Enabled by default")
40
43
  class DataHubSource(StatefulIngestionSourceBase):
41
44
  platform: str = "datahub"
42
45
 
@@ -120,6 +120,7 @@ logger = logging.getLogger(__name__)
120
120
  DBT_PLATFORM = "dbt"
121
121
 
122
122
  _DEFAULT_ACTOR = mce_builder.make_user_urn("unknown")
123
+ _DBT_MAX_COMPILED_CODE_LENGTH = 1 * 1024 * 1024 # 1MB
123
124
 
124
125
 
125
126
  @dataclass
@@ -1684,6 +1685,12 @@ class DBTSourceBase(StatefulIngestionSourceBase):
1684
1685
  def get_external_url(self, node: DBTNode) -> Optional[str]:
1685
1686
  pass
1686
1687
 
1688
+ @staticmethod
1689
+ def _truncate_code(code: str, max_length: int) -> str:
1690
+ if len(code) > max_length:
1691
+ return code[:max_length] + "..."
1692
+ return code
1693
+
1687
1694
  def _create_view_properties_aspect(
1688
1695
  self, node: DBTNode
1689
1696
  ) -> Optional[ViewPropertiesClass]:
@@ -1695,6 +1702,9 @@ class DBTSourceBase(StatefulIngestionSourceBase):
1695
1702
  compiled_code = try_format_query(
1696
1703
  node.compiled_code, platform=self.config.target_platform
1697
1704
  )
1705
+ compiled_code = self._truncate_code(
1706
+ compiled_code, _DBT_MAX_COMPILED_CODE_LENGTH
1707
+ )
1698
1708
 
1699
1709
  materialized = node.materialization in {"table", "incremental", "snapshot"}
1700
1710
  view_properties = ViewPropertiesClass(
@@ -85,6 +85,7 @@ OPERATION_STATEMENT_TYPES = {
85
85
  @config_class(DeltaLakeSourceConfig)
86
86
  @support_status(SupportStatus.INCUBATING)
87
87
  @capability(SourceCapability.TAGS, "Can extract S3 object/bucket tags if enabled")
88
+ @capability(SourceCapability.CONTAINERS, "Enabled by default")
88
89
  class DeltaLakeSource(StatefulIngestionSourceBase):
89
90
  """
90
91
  This plugin extracts:
@@ -0,0 +1,272 @@
1
+ from typing import Dict, List, Optional, Tuple
2
+
3
+ from datahub.emitter.mce_builder import (
4
+ make_chart_urn,
5
+ make_dashboard_urn,
6
+ make_data_platform_urn,
7
+ make_dataplatform_instance_urn,
8
+ make_dataset_urn_with_platform_instance,
9
+ make_tag_urn,
10
+ make_user_urn,
11
+ )
12
+ from datahub.emitter.mcp import MetadataChangeProposalWrapper
13
+ from datahub.ingestion.source.grafana.models import Dashboard, Panel
14
+ from datahub.ingestion.source.grafana.types import CHART_TYPE_MAPPINGS
15
+ from datahub.metadata.schema_classes import (
16
+ ChangeAuditStampsClass,
17
+ ChartInfoClass,
18
+ DashboardInfoClass,
19
+ DataPlatformInstanceClass,
20
+ GlobalTagsClass,
21
+ OwnerClass,
22
+ OwnershipClass,
23
+ OwnershipTypeClass,
24
+ StatusClass,
25
+ TagAssociationClass,
26
+ )
27
+
28
+
29
+ def build_chart_mcps(
30
+ panel: Panel,
31
+ dashboard: Dashboard,
32
+ platform: str,
33
+ platform_instance: Optional[str],
34
+ env: str,
35
+ base_url: str,
36
+ ingest_tags: bool,
37
+ ) -> Tuple[Optional[str], str, List[MetadataChangeProposalWrapper]]:
38
+ """Build chart metadata change proposals"""
39
+ ds_urn = None
40
+ mcps = []
41
+
42
+ chart_urn = make_chart_urn(
43
+ platform,
44
+ f"{dashboard.uid}.{panel.id}",
45
+ platform_instance,
46
+ )
47
+
48
+ # Platform instance aspect
49
+ mcps.append(
50
+ MetadataChangeProposalWrapper(
51
+ entityUrn=chart_urn,
52
+ aspect=DataPlatformInstanceClass(
53
+ platform=make_data_platform_urn(platform),
54
+ instance=make_dataplatform_instance_urn(
55
+ platform=platform,
56
+ instance=platform_instance,
57
+ )
58
+ if platform_instance
59
+ else None,
60
+ ),
61
+ )
62
+ )
63
+
64
+ # Status aspect
65
+ mcps.append(
66
+ MetadataChangeProposalWrapper(
67
+ entityUrn=chart_urn,
68
+ aspect=StatusClass(removed=False),
69
+ )
70
+ )
71
+
72
+ # Get input datasets
73
+ input_datasets = []
74
+ if panel.datasource_ref:
75
+ ds_type = panel.datasource_ref.type or "unknown"
76
+ ds_uid = panel.datasource_ref.uid or "unknown"
77
+
78
+ # Add Grafana dataset
79
+ dataset_name = f"{ds_type}.{ds_uid}.{panel.id}"
80
+ ds_urn = make_dataset_urn_with_platform_instance(
81
+ platform=platform,
82
+ name=dataset_name,
83
+ platform_instance=platform_instance,
84
+ env=env,
85
+ )
86
+ input_datasets.append(ds_urn)
87
+
88
+ # Chart info aspect
89
+ title = panel.title or f"Panel {panel.id}"
90
+ mcps.append(
91
+ MetadataChangeProposalWrapper(
92
+ entityUrn=chart_urn,
93
+ aspect=ChartInfoClass(
94
+ type=CHART_TYPE_MAPPINGS.get(panel.type) if panel.type else None,
95
+ description=panel.description,
96
+ title=title,
97
+ lastModified=ChangeAuditStampsClass(),
98
+ chartUrl=f"{base_url}/d/{dashboard.uid}?viewPanel={panel.id}",
99
+ customProperties=_build_custom_properties(panel),
100
+ inputs=input_datasets,
101
+ ),
102
+ )
103
+ )
104
+
105
+ # Tags aspect
106
+ if dashboard.tags and ingest_tags:
107
+ tags = []
108
+ for tag in dashboard.tags:
109
+ if ":" in tag:
110
+ key, value = tag.split(":", 1)
111
+ tag_urn = make_tag_urn(f"{key}.{value}")
112
+ else:
113
+ tag_urn = make_tag_urn(tag)
114
+ tags.append(TagAssociationClass(tag=tag_urn))
115
+
116
+ if tags:
117
+ mcps.append(
118
+ MetadataChangeProposalWrapper(
119
+ entityUrn=chart_urn,
120
+ aspect=GlobalTagsClass(tags=tags),
121
+ )
122
+ )
123
+
124
+ return ds_urn, chart_urn, mcps
125
+
126
+
127
+ def build_dashboard_mcps(
128
+ dashboard: Dashboard,
129
+ platform: str,
130
+ platform_instance: Optional[str],
131
+ chart_urns: List[str],
132
+ base_url: str,
133
+ ingest_owners: bool,
134
+ ingest_tags: bool,
135
+ ) -> Tuple[str, List[MetadataChangeProposalWrapper]]:
136
+ """Build dashboard metadata change proposals"""
137
+ mcps = []
138
+ dashboard_urn = make_dashboard_urn(platform, dashboard.uid, platform_instance)
139
+
140
+ # Platform instance aspect
141
+ mcps.append(
142
+ MetadataChangeProposalWrapper(
143
+ entityUrn=dashboard_urn,
144
+ aspect=DataPlatformInstanceClass(
145
+ platform=make_data_platform_urn(platform),
146
+ instance=make_dataplatform_instance_urn(
147
+ platform=platform,
148
+ instance=platform_instance,
149
+ )
150
+ if platform_instance
151
+ else None,
152
+ ),
153
+ )
154
+ )
155
+
156
+ # Dashboard info aspect
157
+ mcps.append(
158
+ MetadataChangeProposalWrapper(
159
+ entityUrn=dashboard_urn,
160
+ aspect=DashboardInfoClass(
161
+ description=dashboard.description,
162
+ title=dashboard.title,
163
+ charts=chart_urns,
164
+ lastModified=ChangeAuditStampsClass(),
165
+ dashboardUrl=f"{base_url}/d/{dashboard.uid}",
166
+ customProperties=_build_dashboard_properties(dashboard),
167
+ ),
168
+ )
169
+ )
170
+
171
+ # Ownership aspect
172
+ if dashboard.uid and ingest_owners:
173
+ owner = _build_ownership(dashboard)
174
+ if owner:
175
+ mcps.append(
176
+ MetadataChangeProposalWrapper(
177
+ entityUrn=dashboard_urn,
178
+ aspect=owner,
179
+ )
180
+ )
181
+
182
+ # Tags aspect
183
+ if dashboard.tags and ingest_tags:
184
+ tags = [TagAssociationClass(tag=make_tag_urn(tag)) for tag in dashboard.tags]
185
+ if tags:
186
+ mcps.append(
187
+ MetadataChangeProposalWrapper(
188
+ entityUrn=dashboard_urn,
189
+ aspect=GlobalTagsClass(tags=tags),
190
+ )
191
+ )
192
+
193
+ # Status aspect
194
+ mcps.append(
195
+ MetadataChangeProposalWrapper(
196
+ entityUrn=dashboard_urn,
197
+ aspect=StatusClass(removed=False),
198
+ )
199
+ )
200
+
201
+ return dashboard_urn, mcps
202
+
203
+
204
+ def _build_custom_properties(panel: Panel) -> Dict[str, str]:
205
+ """Build custom properties for chart"""
206
+ props = {}
207
+
208
+ if panel.type:
209
+ props["type"] = panel.type
210
+
211
+ if panel.datasource_ref:
212
+ props["datasourceType"] = panel.datasource_ref.type or ""
213
+ props["datasourceUid"] = panel.datasource_ref.uid or ""
214
+
215
+ for key in [
216
+ "description",
217
+ "format",
218
+ "pluginVersion",
219
+ "repeatDirection",
220
+ "maxDataPoints",
221
+ ]:
222
+ value = getattr(panel, key, None)
223
+ if value:
224
+ props[key] = str(value)
225
+
226
+ if panel.query_targets:
227
+ props["targetsCount"] = str(len(panel.query_targets))
228
+
229
+ return props
230
+
231
+
232
+ def _build_dashboard_properties(dashboard: Dashboard) -> Dict[str, str]:
233
+ """Build custom properties for dashboard"""
234
+ props = {}
235
+
236
+ if dashboard.timezone:
237
+ props["timezone"] = dashboard.timezone
238
+
239
+ if dashboard.schema_version:
240
+ props["schema_version"] = dashboard.schema_version
241
+
242
+ if dashboard.version:
243
+ props["version"] = dashboard.version
244
+
245
+ if dashboard.refresh:
246
+ props["refresh"] = dashboard.refresh
247
+
248
+ return props
249
+
250
+
251
+ def _build_ownership(dashboard: Dashboard) -> Optional[OwnershipClass]:
252
+ """Build ownership information"""
253
+ owners = []
254
+
255
+ if dashboard.uid:
256
+ owners.append(
257
+ OwnerClass(
258
+ owner=make_user_urn(dashboard.uid),
259
+ type=OwnershipTypeClass.TECHNICAL_OWNER,
260
+ )
261
+ )
262
+
263
+ if dashboard.created_by:
264
+ owner_id = dashboard.created_by.split("@")[0]
265
+ owners.append(
266
+ OwnerClass(
267
+ owner=make_user_urn(owner_id),
268
+ type=OwnershipTypeClass.DATAOWNER,
269
+ )
270
+ )
271
+
272
+ return OwnershipClass(owners=owners) if owners else None