acryl-datahub 1.0.0rc9__py3-none-any.whl → 1.0.0rc10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of acryl-datahub might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  datahub/__init__.py,sha256=aq_i5lVREmoLfYIqcx_pEQicO855YlhD19tWc1eZZNI,59
2
2
  datahub/__main__.py,sha256=pegIvQ9hzK7IhqVeUi1MeADSZ2QlP-D3K0OQdEg55RU,106
3
- datahub/_version.py,sha256=Lb1n-6UK3dLoc2_soJ-gDgjrGTnKGEGSjla124hRAcA,321
3
+ datahub/_version.py,sha256=FaV2BCpaCxlMvv5cXKYFF-MDOTi4jF-QgH3AQNNnof0,322
4
4
  datahub/entrypoints.py,sha256=2TYgHhs3sCxJlojIHjqfxzt3_ImPwPzq4vBtsUuMqu4,8885
5
5
  datahub/errors.py,sha256=w6h8b27j9XlmPbTwqpu7-wgiTrXlHzcnUOnJ_iOrwzo,520
6
6
  datahub/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -119,7 +119,7 @@ datahub/emitter/composite_emitter.py,sha256=ZU-IdlAXKGPtmyT0JJgYC09vRn-TmeNaA6VP
119
119
  datahub/emitter/enum_helpers.py,sha256=QBOEUu_hDCvyL_v4ayNQV8XwJbf5zKyu0Xat0mI1Kgo,376
120
120
  datahub/emitter/generic_emitter.py,sha256=i37ZFm9VR_tmiZm9kIypEkQEB_cLKbzj_tJvViN-fm8,828
121
121
  datahub/emitter/kafka_emitter.py,sha256=Uix1W1WaXF8VqUTUfzdRZKca2XrR1w50Anx2LVkROlc,5822
122
- datahub/emitter/mce_builder.py,sha256=9wjXG1WmWZUN7-_JdRJ5OcH8IPG0b3TGzxry4yscOR0,16545
122
+ datahub/emitter/mce_builder.py,sha256=8UiG2VsYgC7n29h_y4qL6F9faGwwMZF3zGscl_CBT9s,16808
123
123
  datahub/emitter/mcp.py,sha256=hAAYziDdkwjazQU0DtWMbQWY8wS09ACrKJbqxoWXdgc,9637
124
124
  datahub/emitter/mcp_builder.py,sha256=Q1bX2BthNvZ7ae71XYF6ICoiN8IOqaAd_h3zOct57Q0,11752
125
125
  datahub/emitter/mcp_patch_builder.py,sha256=u7cpW6DkiN7KpLapmMaXgL_FneoN69boxiANbVgMdSI,4564
@@ -202,9 +202,9 @@ datahub/ingestion/source/ge_data_profiler.py,sha256=C93ZZrtIRVL6pDpQ3fn7ZbbJiZmH
202
202
  datahub/ingestion/source/ge_profiling_config.py,sha256=FlWfXoVoayabVXNMB9qETEU0GX0az6HYqNUZRnIu_fQ,10866
203
203
  datahub/ingestion/source/glue_profiling_config.py,sha256=vpMJH4Lf_qgR32BZy58suabri1yV5geaAPjzg2eORDc,2559
204
204
  datahub/ingestion/source/ldap.py,sha256=CNr3foofIpoCXu_GGqfcajlQE2qkHr5isYwVcDutdkk,18695
205
- datahub/ingestion/source/metabase.py,sha256=6mUPZrgv0Yrdu_crYWjbd1B6dRKx1YCRAz9uocIZYXw,32588
205
+ datahub/ingestion/source/metabase.py,sha256=j8DRV2GvisezidL1JZ5HJLF_hdFdtvaoyDoEdEyh0Ks,32603
206
206
  datahub/ingestion/source/mlflow.py,sha256=cqQivSyrptm15vn--xbT7eTRHJJVKMmQpoVqfzuDIDU,12858
207
- datahub/ingestion/source/mode.py,sha256=26gB13L6Eflm8fle_e31x-FnLn41WdRsBmelsiFavu4,63627
207
+ datahub/ingestion/source/mode.py,sha256=w85zCIZicfABx5dKCupsGpH1tgUMhS1El-jIWa2gwNU,63632
208
208
  datahub/ingestion/source/mongodb.py,sha256=2C2Cxn8DXL53IbNiywIuKt8UT_EMcPg9f8su-OPSNGU,21237
209
209
  datahub/ingestion/source/nifi.py,sha256=w5TPnqPmpotvzSsJROi6nUiHWPUVC6u1g0CzXIE6FNs,56903
210
210
  datahub/ingestion/source/openapi.py,sha256=39ep3etbWh8NBPjTXXwH3mieC5P6bMVAjhvK7UvcTis,17372
@@ -268,7 +268,7 @@ datahub/ingestion/source/cassandra/cassandra_profiling.py,sha256=DkSIryZNwLei5Pa
268
268
  datahub/ingestion/source/cassandra/cassandra_utils.py,sha256=j-LidYkaCTmGnpUVNLsax_c3z32PsQbsbHeYojygd1s,5105
269
269
  datahub/ingestion/source/common/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
270
270
  datahub/ingestion/source/common/data_reader.py,sha256=XbSxiRTYrk6seOz0ZjVjzSpGvP8lEjmqXrNI4cdYYmQ,1819
271
- datahub/ingestion/source/common/subtypes.py,sha256=S0ssIxV7V38HGQwl-h5izYWyj1MQgmvJk4k_Q-5VGJ8,2329
271
+ datahub/ingestion/source/common/subtypes.py,sha256=EiYSjBHiRvGjRB5wjKEfS5b_k9tQCFWMP1ADw_1p-CY,2525
272
272
  datahub/ingestion/source/data_lake_common/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
273
273
  datahub/ingestion/source/data_lake_common/config.py,sha256=qUk83B01hjuBKHvVz8SmXnVCy5eFj-2-2QLEOrAdbgk,359
274
274
  datahub/ingestion/source/data_lake_common/data_lake_utils.py,sha256=nxu7osuzqxScPFc-1ODA2M1c_xPNPpRH_SMMU7zKOIE,6212
@@ -328,7 +328,7 @@ datahub/ingestion/source/iceberg/iceberg_common.py,sha256=krt-41r90t0CkNeJXsiwO-
328
328
  datahub/ingestion/source/iceberg/iceberg_profiler.py,sha256=CkBB5fryMVoqqCM6eLSIeb4yP85ABHONNRm0QqZKrnw,9977
329
329
  datahub/ingestion/source/identity/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
330
330
  datahub/ingestion/source/identity/azure_ad.py,sha256=9Hrvm4CSfc02yjnPUsCYSY4Qw9fXPnDFWLexab0mcpc,28559
331
- datahub/ingestion/source/identity/okta.py,sha256=ZVvRgFUyJ2jUSq0RS_0Cx-2J4oxMbruKhk7blts_HaU,31231
331
+ datahub/ingestion/source/identity/okta.py,sha256=jC21myJuMRTaPgj0OD9heaC-mz8ECjqpy2hSJwlUSwM,31943
332
332
  datahub/ingestion/source/kafka/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
333
333
  datahub/ingestion/source/kafka/kafka.py,sha256=mboUWQmlumEwcXwY2POeK1L8tdk5-CABakZ-MWbvdNQ,26579
334
334
  datahub/ingestion/source/kafka/kafka_schema_registry_base.py,sha256=13XjSwqyVhH1CJUFHAbWdmmv_Rw0Ju_9HQdBmIzPNNA,566
@@ -481,8 +481,8 @@ datahub/ingestion/source/sql/trino.py,sha256=8viVOu67mhDnsO3LuPSRi1WDR5MLdOXu7HO
481
481
  datahub/ingestion/source/sql/two_tier_sql_source.py,sha256=YDrGBb5WKVls6qv17QU5foKrf71SydzEltc3WsVAhQc,5732
482
482
  datahub/ingestion/source/sql/vertica.py,sha256=_9OgSgIgqBml0av063rb8nACiT3SAmzpw0ouyF91wv8,33382
483
483
  datahub/ingestion/source/sql/mssql/__init__.py,sha256=1agpl8S_uDW40olkhCX_W19dbr5GO9qgjS3R7pLRZSk,87
484
- datahub/ingestion/source/sql/mssql/job_models.py,sha256=nrRDYVKah5ep7J8-ddNX2PVPY0MIPQqkvEgO33s5L3k,7988
485
- datahub/ingestion/source/sql/mssql/source.py,sha256=sHckKQ7qMJKnH6r8rj8B-9NiIGXFLiyKTLBJh1SJxmQ,32371
484
+ datahub/ingestion/source/sql/mssql/job_models.py,sha256=5-QQv8w-KnyNq_y-VmSC_K5sr0VoZhfYW6Aasd-z2LY,8901
485
+ datahub/ingestion/source/sql/mssql/source.py,sha256=QxgUWL-aSjTXmqZPD_7811MsrDsgW_I9_oMimomdE0A,32593
486
486
  datahub/ingestion/source/sql/mssql/stored_procedure_lineage.py,sha256=RpnvKPalAAaOD_eUg8bZ4VkGTSeLFWuy0mefwc4s3x8,2837
487
487
  datahub/ingestion/source/state/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
488
488
  datahub/ingestion/source/state/checkpoint.py,sha256=-fTUZKkY4nHTFqSWZ0jJkkdIu_tWlOjRNhm4FTr4ul4,8860
@@ -580,7 +580,7 @@ datahub/metadata/schema.avsc,sha256=iQJaPYHy4xrGQBEbRgn-RF4kGC1iNPhZawHTAYTyfW0,
580
580
  datahub/metadata/schema_classes.py,sha256=X5Jl5EaSxyHdXOQv14pJ5WkQALun4MRpJ4q12wVFE18,1299
581
581
  datahub/metadata/urns.py,sha256=nfrCTExR-k2P9w272WVtWSN3xW1VUJngPwP3xnvULjU,1217
582
582
  datahub/metadata/_urns/__init__.py,sha256=cOF3GHMDgPhmbLKbN02NPpuLGHSu0qNgQyBRv08eqF0,243
583
- datahub/metadata/_urns/urn_defs.py,sha256=aw3971Iq-qsRcvqxIIrr9a4_1IdjWgkpIjH5qI1hnaI,133309
583
+ datahub/metadata/_urns/urn_defs.py,sha256=mQ52ozRUt19MyBLNZh1f1ETlafCzCYmEbcKxAjR_8o4,133983
584
584
  datahub/metadata/com/__init__.py,sha256=gsAIuTxzfJdI7a9ybZlgMIHMAYksM1SxGxXjtySgKSc,202
585
585
  datahub/metadata/com/linkedin/__init__.py,sha256=gsAIuTxzfJdI7a9ybZlgMIHMAYksM1SxGxXjtySgKSc,202
586
586
  datahub/metadata/com/linkedin/events/__init__.py,sha256=s_dR0plZF-rOxxIbE8ojekJqwiHzl2WYR-Z3kW6kKS0,298
@@ -1021,9 +1021,9 @@ datahub_provider/operators/datahub_assertion_operator.py,sha256=uvTQ-jk2F0sbqqxp
1021
1021
  datahub_provider/operators/datahub_assertion_sensor.py,sha256=lCBj_3x1cf5GMNpHdfkpHuyHfVxsm6ff5x2Z5iizcAo,140
1022
1022
  datahub_provider/operators/datahub_operation_operator.py,sha256=aevDp2FzX7FxGlXrR0khoHNbxbhKR2qPEX5e8O2Jyzw,174
1023
1023
  datahub_provider/operators/datahub_operation_sensor.py,sha256=8fcdVBCEPgqy1etTXgLoiHoJrRt_nzFZQMdSzHqSG7M,168
1024
- acryl_datahub-1.0.0rc9.dist-info/LICENSE,sha256=9xNHpsD0uYF5ONzXsKDCuHHB-xbiCrSbueWXqrTNsxk,11365
1025
- acryl_datahub-1.0.0rc9.dist-info/METADATA,sha256=V5RVytQ19FP_0BtHEgHjQ-89KScjbKyOHx4p5yZxLOQ,175382
1026
- acryl_datahub-1.0.0rc9.dist-info/WHEEL,sha256=jB7zZ3N9hIM9adW7qlTAyycLYW9npaWKLRzaoVcLKcM,91
1027
- acryl_datahub-1.0.0rc9.dist-info/entry_points.txt,sha256=U1e5ZwqPX1OaIbvGrwvozcdB8SbzFYXQM7plpdLKKeo,9592
1028
- acryl_datahub-1.0.0rc9.dist-info/top_level.txt,sha256=iLjSrLK5ox1YVYcglRUkcvfZPvKlobBWx7CTUXx8_GI,25
1029
- acryl_datahub-1.0.0rc9.dist-info/RECORD,,
1024
+ acryl_datahub-1.0.0rc10.dist-info/LICENSE,sha256=9xNHpsD0uYF5ONzXsKDCuHHB-xbiCrSbueWXqrTNsxk,11365
1025
+ acryl_datahub-1.0.0rc10.dist-info/METADATA,sha256=KibwklUZSYWViDayJ8MX8bAVCp7PPGAQsqfyBdWoSHM,175337
1026
+ acryl_datahub-1.0.0rc10.dist-info/WHEEL,sha256=jB7zZ3N9hIM9adW7qlTAyycLYW9npaWKLRzaoVcLKcM,91
1027
+ acryl_datahub-1.0.0rc10.dist-info/entry_points.txt,sha256=U1e5ZwqPX1OaIbvGrwvozcdB8SbzFYXQM7plpdLKKeo,9592
1028
+ acryl_datahub-1.0.0rc10.dist-info/top_level.txt,sha256=iLjSrLK5ox1YVYcglRUkcvfZPvKlobBWx7CTUXx8_GI,25
1029
+ acryl_datahub-1.0.0rc10.dist-info/RECORD,,
datahub/_version.py CHANGED
@@ -1,6 +1,6 @@
1
1
  # Published at https://pypi.org/project/acryl-datahub/.
2
2
  __package_name__ = "acryl-datahub"
3
- __version__ = "1.0.0rc9"
3
+ __version__ = "1.0.0rc10"
4
4
 
5
5
 
6
6
  def is_dev_mode() -> bool:
@@ -52,7 +52,15 @@ from datahub.metadata.schema_classes import (
52
52
  UpstreamLineageClass,
53
53
  _Aspect as AspectAbstract,
54
54
  )
55
- from datahub.metadata.urns import DataFlowUrn, DatasetUrn, TagUrn
55
+ from datahub.metadata.urns import (
56
+ ChartUrn,
57
+ DashboardUrn,
58
+ DataFlowUrn,
59
+ DataJobUrn,
60
+ DataPlatformUrn,
61
+ DatasetUrn,
62
+ TagUrn,
63
+ )
56
64
  from datahub.utilities.urn_encoder import UrnEncoder
57
65
 
58
66
  logger = logging.getLogger(__name__)
@@ -119,7 +127,7 @@ def parse_ts_millis(ts: Optional[float]) -> Optional[datetime]:
119
127
  def make_data_platform_urn(platform: str) -> str:
120
128
  if platform.startswith("urn:li:dataPlatform:"):
121
129
  return platform
122
- return f"urn:li:dataPlatform:{platform}"
130
+ return DataPlatformUrn.create_from_id(platform).urn()
123
131
 
124
132
 
125
133
  def make_dataset_urn(platform: str, name: str, env: str = DEFAULT_ENV) -> str:
@@ -236,7 +244,7 @@ def make_user_urn(username: str) -> str:
236
244
  Makes a user urn if the input is not a user or group urn already
237
245
  """
238
246
  return (
239
- f"urn:li:corpuser:{username}"
247
+ f"urn:li:corpuser:{UrnEncoder.encode_string(username)}"
240
248
  if not username.startswith(("urn:li:corpuser:", "urn:li:corpGroup:"))
241
249
  else username
242
250
  )
@@ -249,7 +257,7 @@ def make_group_urn(groupname: str) -> str:
249
257
  if groupname and groupname.startswith(("urn:li:corpGroup:", "urn:li:corpuser:")):
250
258
  return groupname
251
259
  else:
252
- return f"urn:li:corpGroup:{groupname}"
260
+ return f"urn:li:corpGroup:{UrnEncoder.encode_string(groupname)}"
253
261
 
254
262
 
255
263
  def make_tag_urn(tag: str) -> str:
@@ -301,7 +309,12 @@ def make_data_flow_urn(
301
309
 
302
310
 
303
311
  def make_data_job_urn_with_flow(flow_urn: str, job_id: str) -> str:
304
- return f"urn:li:dataJob:({flow_urn},{job_id})"
312
+ data_flow_urn = DataFlowUrn.from_string(flow_urn)
313
+ data_job_urn = DataJobUrn.create_from_ids(
314
+ data_flow_urn=data_flow_urn.urn(),
315
+ job_id=job_id,
316
+ )
317
+ return data_job_urn.urn()
305
318
 
306
319
 
307
320
  def make_data_process_instance_urn(dataProcessInstanceId: str) -> str:
@@ -324,10 +337,11 @@ def make_dashboard_urn(
324
337
  platform: str, name: str, platform_instance: Optional[str] = None
325
338
  ) -> str:
326
339
  # FIXME: dashboards don't currently include data platform urn prefixes.
327
- if platform_instance:
328
- return f"urn:li:dashboard:({platform},{platform_instance}.{name})"
329
- else:
330
- return f"urn:li:dashboard:({platform},{name})"
340
+ return DashboardUrn.create_from_ids(
341
+ platform=platform,
342
+ name=name,
343
+ platform_instance=platform_instance,
344
+ ).urn()
331
345
 
332
346
 
333
347
  def dashboard_urn_to_key(dashboard_urn: str) -> Optional[DashboardKeyClass]:
@@ -342,10 +356,11 @@ def make_chart_urn(
342
356
  platform: str, name: str, platform_instance: Optional[str] = None
343
357
  ) -> str:
344
358
  # FIXME: charts don't currently include data platform urn prefixes.
345
- if platform_instance:
346
- return f"urn:li:chart:({platform},{platform_instance}.{name})"
347
- else:
348
- return f"urn:li:chart:({platform},{name})"
359
+ return ChartUrn.create_from_ids(
360
+ platform=platform,
361
+ name=name,
362
+ platform_instance=platform_instance,
363
+ ).urn()
349
364
 
350
365
 
351
366
  def chart_urn_to_key(chart_urn: str) -> Optional[ChartKeyClass]:
@@ -60,8 +60,15 @@ class BIContainerSubTypes(StrEnum):
60
60
  MODE_COLLECTION = "Collection"
61
61
 
62
62
 
63
+ class FlowContainerSubTypes(StrEnum):
64
+ MSSQL_JOB = "Job"
65
+ MSSQL_PROCEDURE_CONTAINER = "Procedures Container"
66
+
67
+
63
68
  class JobContainerSubTypes(StrEnum):
64
69
  NIFI_PROCESS_GROUP = "Process Group"
70
+ MSSQL_JOBSTEP = "Job Step"
71
+ MSSQL_STORED_PROCEDURE = "Stored Procedure"
65
72
 
66
73
 
67
74
  class BIAssetSubTypes(StrEnum):
@@ -666,6 +666,27 @@ class OktaSource(StatefulIngestionSourceBase):
666
666
  self.config.okta_profile_to_username_regex,
667
667
  )
668
668
 
669
+ def _map_okta_user_profile_custom_properties(
670
+ self, profile: UserProfile
671
+ ) -> Dict[str, str]:
672
+ # filter out the common fields that are already mapped to the CorpUserInfo aspect and the private ones
673
+ return {
674
+ k: str(v)
675
+ for k, v in profile.__dict__.items()
676
+ if v
677
+ and k
678
+ not in [
679
+ "displayName",
680
+ "firstName",
681
+ "lastName",
682
+ "email",
683
+ "title",
684
+ "countryCode",
685
+ "department",
686
+ ]
687
+ and not k.startswith("_")
688
+ }
689
+
669
690
  # Converts Okta User Profile into a CorpUserInfo.
670
691
  def _map_okta_user_profile(self, profile: UserProfile) -> CorpUserInfoClass:
671
692
  # TODO: Extract user's manager if provided.
@@ -683,6 +704,7 @@ class OktaSource(StatefulIngestionSourceBase):
683
704
  title=profile.title,
684
705
  countryCode=profile.countryCode,
685
706
  departmentName=profile.department,
707
+ customProperties=self._map_okta_user_profile_custom_properties(profile),
686
708
  )
687
709
 
688
710
  def _make_corp_group_urn(self, name: str) -> str:
@@ -313,7 +313,7 @@ class MetabaseSource(StatefulIngestionSourceBase):
313
313
  return None
314
314
 
315
315
  dashboard_urn = builder.make_dashboard_urn(
316
- self.platform, dashboard_details.get("id", "")
316
+ self.platform, str(dashboard_details.get("id", ""))
317
317
  )
318
318
  dashboard_snapshot = DashboardSnapshot(
319
319
  urn=dashboard_urn,
@@ -337,7 +337,7 @@ class MetabaseSource(StatefulIngestionSourceBase):
337
337
  card_id = card_info.get("card").get("id", "")
338
338
  if not card_id:
339
339
  continue # most likely a virtual card without an id (text or heading), not relevant.
340
- chart_urn = builder.make_chart_urn(self.platform, card_id)
340
+ chart_urn = builder.make_chart_urn(self.platform, str(card_id))
341
341
  chart_urns.append(chart_urn)
342
342
 
343
343
  dashboard_info_class = DashboardInfoClass(
@@ -459,7 +459,7 @@ class MetabaseSource(StatefulIngestionSourceBase):
459
459
  )
460
460
  return None
461
461
 
462
- chart_urn = builder.make_chart_urn(self.platform, card_id)
462
+ chart_urn = builder.make_chart_urn(self.platform, str(card_id))
463
463
  chart_snapshot = ChartSnapshot(
464
464
  urn=chart_urn,
465
465
  aspects=[],
@@ -377,7 +377,7 @@ class ModeSource(StatefulIngestionSourceBase):
377
377
  ]
378
378
 
379
379
  def _dashboard_urn(self, report_info: dict) -> str:
380
- return builder.make_dashboard_urn(self.platform, report_info.get("id", ""))
380
+ return builder.make_dashboard_urn(self.platform, str(report_info.get("id", "")))
381
381
 
382
382
  def _parse_last_run_at(self, report_info: dict) -> Optional[int]:
383
383
  # Mode queries are refreshed, and that timestamp is reflected correctly here.
@@ -11,12 +11,17 @@ from datahub.emitter.mcp_builder import (
11
11
  DatabaseKey,
12
12
  SchemaKey,
13
13
  )
14
+ from datahub.ingestion.source.common.subtypes import (
15
+ FlowContainerSubTypes,
16
+ JobContainerSubTypes,
17
+ )
14
18
  from datahub.metadata.schema_classes import (
15
19
  ContainerClass,
16
20
  DataFlowInfoClass,
17
21
  DataJobInfoClass,
18
22
  DataJobInputOutputClass,
19
23
  DataPlatformInstanceClass,
24
+ SubTypesClass,
20
25
  )
21
26
 
22
27
 
@@ -211,6 +216,18 @@ class MSSQLDataJob:
211
216
  status=self.status,
212
217
  )
213
218
 
219
+ @property
220
+ def as_subtypes_aspect(self) -> SubTypesClass:
221
+ assert isinstance(self.entity, (JobStep, StoredProcedure))
222
+ type = (
223
+ JobContainerSubTypes.MSSQL_JOBSTEP
224
+ if isinstance(self.entity, JobStep)
225
+ else JobContainerSubTypes.MSSQL_STORED_PROCEDURE
226
+ )
227
+ return SubTypesClass(
228
+ typeNames=[type],
229
+ )
230
+
214
231
  @property
215
232
  def as_maybe_platform_instance_aspect(self) -> Optional[DataPlatformInstanceClass]:
216
233
  if self.entity.flow.platform_instance:
@@ -276,6 +293,18 @@ class MSSQLDataFlow:
276
293
  externalUrl=self.external_url,
277
294
  )
278
295
 
296
+ @property
297
+ def as_subtypes_aspect(self) -> SubTypesClass:
298
+ assert isinstance(self.entity, (MSSQLJob, MSSQLProceduresContainer))
299
+ type = (
300
+ FlowContainerSubTypes.MSSQL_JOB
301
+ if isinstance(self.entity, MSSQLJob)
302
+ else FlowContainerSubTypes.MSSQL_PROCEDURE_CONTAINER
303
+ )
304
+ return SubTypesClass(
305
+ typeNames=[type],
306
+ )
307
+
279
308
  @property
280
309
  def as_maybe_platform_instance_aspect(self) -> Optional[DataPlatformInstanceClass]:
281
310
  if self.entity.platform_instance:
@@ -638,6 +638,11 @@ class SQLServerSource(SQLAlchemySource):
638
638
  aspect=data_job.as_datajob_info_aspect,
639
639
  ).as_workunit()
640
640
 
641
+ yield MetadataChangeProposalWrapper(
642
+ entityUrn=data_job.urn,
643
+ aspect=data_job.as_subtypes_aspect,
644
+ ).as_workunit()
645
+
641
646
  data_platform_instance_aspect = data_job.as_maybe_platform_instance_aspect
642
647
  if data_platform_instance_aspect:
643
648
  yield MetadataChangeProposalWrapper(
@@ -676,8 +681,6 @@ class SQLServerSource(SQLAlchemySource):
676
681
  ),
677
682
  ).as_workunit()
678
683
 
679
- # TODO: Add SubType when it appear
680
-
681
684
  def construct_flow_workunits(
682
685
  self,
683
686
  data_flow: MSSQLDataFlow,
@@ -687,6 +690,11 @@ class SQLServerSource(SQLAlchemySource):
687
690
  aspect=data_flow.as_dataflow_info_aspect,
688
691
  ).as_workunit()
689
692
 
693
+ yield MetadataChangeProposalWrapper(
694
+ entityUrn=data_flow.urn,
695
+ aspect=data_flow.as_subtypes_aspect,
696
+ ).as_workunit()
697
+
690
698
  data_platform_instance_aspect = data_flow.as_maybe_platform_instance_aspect
691
699
  if data_platform_instance_aspect:
692
700
  yield MetadataChangeProposalWrapper(
@@ -700,8 +708,6 @@ class SQLServerSource(SQLAlchemySource):
700
708
  aspect=data_flow.as_container_aspect,
701
709
  ).as_workunit()
702
710
 
703
- # TODO: Add SubType when it appear
704
-
705
711
  def get_inspectors(self) -> Iterable[Inspector]:
706
712
  # This method can be overridden in the case that you want to dynamically
707
713
  # run on multiple databases.
@@ -788,6 +788,18 @@ class ChartUrn(_SpecificUrn):
788
788
  def from_key_aspect(cls, key_aspect: "ChartKeyClass") -> "ChartUrn":
789
789
  return cls(dashboard_tool=key_aspect.dashboardTool, chart_id=key_aspect.chartId)
790
790
 
791
+ @classmethod
792
+ def create_from_ids(
793
+ cls,
794
+ platform: str,
795
+ name: str,
796
+ platform_instance: Optional[str] = None,
797
+ ) -> "ChartUrn":
798
+ return ChartUrn(
799
+ dashboard_tool=platform,
800
+ chart_id=f"{platform_instance}.{name}" if platform_instance else name,
801
+ )
802
+
791
803
  @property
792
804
  def dashboard_tool(self) -> str:
793
805
  return self._entity_ids[0]
@@ -1480,6 +1492,18 @@ class DashboardUrn(_SpecificUrn):
1480
1492
  def from_key_aspect(cls, key_aspect: "DashboardKeyClass") -> "DashboardUrn":
1481
1493
  return cls(dashboard_tool=key_aspect.dashboardTool, dashboard_id=key_aspect.dashboardId)
1482
1494
 
1495
+ @classmethod
1496
+ def create_from_ids(
1497
+ cls,
1498
+ platform: str,
1499
+ name: str,
1500
+ platform_instance: Optional[str] = None,
1501
+ ) -> "DashboardUrn":
1502
+ return DashboardUrn(
1503
+ dashboard_tool=platform,
1504
+ dashboard_id=f"{platform_instance}.{name}" if platform_instance else name,
1505
+ )
1506
+
1483
1507
  @property
1484
1508
  def dashboard_tool(self) -> str:
1485
1509
  return self._entity_ids[0]