semantic-link-labs 0.8.10__py3-none-any.whl → 0.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (81) hide show
  1. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/METADATA +6 -5
  2. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/RECORD +81 -80
  3. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +34 -3
  5. sempy_labs/_authentication.py +80 -4
  6. sempy_labs/_capacities.py +770 -200
  7. sempy_labs/_capacity_migration.py +7 -37
  8. sempy_labs/_clear_cache.py +37 -35
  9. sempy_labs/_connections.py +13 -13
  10. sempy_labs/_data_pipelines.py +20 -20
  11. sempy_labs/_dataflows.py +27 -28
  12. sempy_labs/_dax.py +41 -47
  13. sempy_labs/_deployment_pipelines.py +1 -1
  14. sempy_labs/_environments.py +26 -23
  15. sempy_labs/_eventhouses.py +16 -15
  16. sempy_labs/_eventstreams.py +16 -15
  17. sempy_labs/_external_data_shares.py +18 -20
  18. sempy_labs/_gateways.py +16 -14
  19. sempy_labs/_generate_semantic_model.py +107 -62
  20. sempy_labs/_git.py +105 -43
  21. sempy_labs/_helper_functions.py +251 -194
  22. sempy_labs/_job_scheduler.py +227 -0
  23. sempy_labs/_kql_databases.py +16 -15
  24. sempy_labs/_kql_querysets.py +16 -15
  25. sempy_labs/_list_functions.py +150 -126
  26. sempy_labs/_managed_private_endpoints.py +19 -17
  27. sempy_labs/_mirrored_databases.py +51 -48
  28. sempy_labs/_mirrored_warehouses.py +5 -4
  29. sempy_labs/_ml_experiments.py +16 -15
  30. sempy_labs/_ml_models.py +15 -14
  31. sempy_labs/_model_bpa.py +210 -207
  32. sempy_labs/_model_bpa_bulk.py +2 -2
  33. sempy_labs/_model_bpa_rules.py +3 -3
  34. sempy_labs/_model_dependencies.py +55 -29
  35. sempy_labs/_notebooks.py +29 -25
  36. sempy_labs/_one_lake_integration.py +23 -26
  37. sempy_labs/_query_scale_out.py +75 -64
  38. sempy_labs/_refresh_semantic_model.py +25 -26
  39. sempy_labs/_spark.py +33 -32
  40. sempy_labs/_sql.py +19 -12
  41. sempy_labs/_translations.py +10 -7
  42. sempy_labs/_vertipaq.py +38 -33
  43. sempy_labs/_warehouses.py +26 -25
  44. sempy_labs/_workspace_identity.py +11 -10
  45. sempy_labs/_workspaces.py +40 -33
  46. sempy_labs/admin/_basic_functions.py +166 -115
  47. sempy_labs/admin/_domains.py +7 -2
  48. sempy_labs/admin/_external_data_share.py +3 -3
  49. sempy_labs/admin/_git.py +4 -1
  50. sempy_labs/admin/_items.py +11 -6
  51. sempy_labs/admin/_scanner.py +10 -5
  52. sempy_labs/directlake/_directlake_schema_compare.py +25 -16
  53. sempy_labs/directlake/_directlake_schema_sync.py +24 -12
  54. sempy_labs/directlake/_dl_helper.py +74 -55
  55. sempy_labs/directlake/_generate_shared_expression.py +10 -9
  56. sempy_labs/directlake/_get_directlake_lakehouse.py +32 -36
  57. sempy_labs/directlake/_get_shared_expression.py +4 -3
  58. sempy_labs/directlake/_guardrails.py +12 -6
  59. sempy_labs/directlake/_list_directlake_model_calc_tables.py +15 -9
  60. sempy_labs/directlake/_show_unsupported_directlake_objects.py +16 -10
  61. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +35 -31
  62. sempy_labs/directlake/_update_directlake_partition_entity.py +39 -31
  63. sempy_labs/directlake/_warm_cache.py +87 -65
  64. sempy_labs/lakehouse/_get_lakehouse_columns.py +23 -26
  65. sempy_labs/lakehouse/_get_lakehouse_tables.py +27 -38
  66. sempy_labs/lakehouse/_lakehouse.py +7 -20
  67. sempy_labs/lakehouse/_shortcuts.py +42 -23
  68. sempy_labs/migration/_create_pqt_file.py +16 -11
  69. sempy_labs/migration/_refresh_calc_tables.py +16 -10
  70. sempy_labs/report/_download_report.py +9 -8
  71. sempy_labs/report/_generate_report.py +85 -44
  72. sempy_labs/report/_paginated.py +9 -9
  73. sempy_labs/report/_report_bpa.py +15 -11
  74. sempy_labs/report/_report_functions.py +80 -91
  75. sempy_labs/report/_report_helper.py +8 -4
  76. sempy_labs/report/_report_list_functions.py +24 -13
  77. sempy_labs/report/_report_rebind.py +17 -16
  78. sempy_labs/report/_reportwrapper.py +41 -33
  79. sempy_labs/tom/_model.py +139 -21
  80. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/LICENSE +0 -0
  81. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/top_level.txt +0 -0
@@ -3,7 +3,6 @@ from sempy_labs._helper_functions import (
3
3
  resolve_workspace_name_and_id,
4
4
  create_relationship_name,
5
5
  resolve_lakehouse_id,
6
- resolve_dataset_id,
7
6
  pagination,
8
7
  resolve_item_type,
9
8
  format_dax_object_name,
@@ -17,17 +16,17 @@ from uuid import UUID
17
16
 
18
17
 
19
18
  def get_object_level_security(
20
- dataset: str | UUID, workspace: Optional[str] = None
19
+ dataset: str | UUID, workspace: Optional[str | UUID] = None
21
20
  ) -> pd.DataFrame:
22
21
  """
23
22
  Shows the object level security for the semantic model.
24
23
 
25
24
  Parameters
26
25
  ----------
27
- dataset : str | UUID
26
+ dataset : str | uuid.UUID
28
27
  Name or ID of the semantic model.
29
- workspace : str, default=None
30
- The Fabric workspace name.
28
+ workspace : str | uuid.UUID, default=None
29
+ The Fabric workspace name or ID.
31
30
  Defaults to None which resolves to the workspace of the attached lakehouse
32
31
  or if no lakehouse attached, resolves to the workspace of the notebook.
33
32
 
@@ -85,17 +84,17 @@ def get_object_level_security(
85
84
 
86
85
 
87
86
  def list_tables(
88
- dataset: str | UUID, workspace: Optional[str] = None, extended: bool = False
87
+ dataset: str | UUID, workspace: Optional[str | UUID] = None, extended: bool = False
89
88
  ) -> pd.DataFrame:
90
89
  """
91
90
  Shows a semantic model's tables and their properties.
92
91
 
93
92
  Parameters
94
93
  ----------
95
- dataset : str | UUID
94
+ dataset : str | uuid.UUID
96
95
  Name or ID of the semantic model.
97
- workspace : str, default=None
98
- The Fabric workspace name.
96
+ workspace : str | uuid.UUID, default=None
97
+ The Fabric workspace name or ID.
99
98
  Defaults to None which resolves to the workspace of the attached lakehouse
100
99
  or if no lakehouse attached, resolves to the workspace of the notebook.
101
100
  extended : bool, default=False
@@ -258,16 +257,18 @@ def list_tables(
258
257
  return df
259
258
 
260
259
 
261
- def list_annotations(dataset: str, workspace: Optional[str] = None) -> pd.DataFrame:
260
+ def list_annotations(
261
+ dataset: str | UUID, workspace: Optional[str | UUID] = None
262
+ ) -> pd.DataFrame:
262
263
  """
263
264
  Shows a semantic model's annotations and their properties.
264
265
 
265
266
  Parameters
266
267
  ----------
267
- dataset : str
268
- Name of the semantic model.
269
- workspace : str, default=None
270
- The Fabric workspace name.
268
+ dataset : str | uuid.UUID
269
+ Name or ID of the semantic model.
270
+ workspace : str | uuid.UUID, default=None
271
+ The Fabric workspace name or ID.
271
272
  Defaults to None which resolves to the workspace of the attached lakehouse
272
273
  or if no lakehouse attached, resolves to the workspace of the notebook.
273
274
 
@@ -279,7 +280,8 @@ def list_annotations(dataset: str, workspace: Optional[str] = None) -> pd.DataFr
279
280
 
280
281
  from sempy_labs.tom import connect_semantic_model
281
282
 
282
- workspace = fabric.resolve_workspace_name(workspace)
283
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
284
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
283
285
 
284
286
  df = pd.DataFrame(
285
287
  columns=[
@@ -292,7 +294,7 @@ def list_annotations(dataset: str, workspace: Optional[str] = None) -> pd.DataFr
292
294
  )
293
295
 
294
296
  with connect_semantic_model(
295
- dataset=dataset, readonly=True, workspace=workspace
297
+ dataset=dataset_id, readonly=True, workspace=workspace_id
296
298
  ) as tom:
297
299
 
298
300
  mName = tom.model.Name
@@ -489,8 +491,8 @@ def list_annotations(dataset: str, workspace: Optional[str] = None) -> pd.DataFr
489
491
 
490
492
 
491
493
  def list_columns(
492
- dataset: str,
493
- workspace: Optional[str] = None,
494
+ dataset: str | UUID,
495
+ workspace: Optional[str | UUID] = None,
494
496
  lakehouse: Optional[str] = None,
495
497
  lakehouse_workspace: Optional[str] = None,
496
498
  ) -> pd.DataFrame:
@@ -499,10 +501,10 @@ def list_columns(
499
501
 
500
502
  Parameters
501
503
  ----------
502
- dataset : str
503
- Name of the semantic model.
504
- workspace : str, default=None
505
- The Fabric workspace name.
504
+ dataset : str | uuid.UUID
505
+ Name or ID of the semantic model.
506
+ workspace : str | uuid.UUID, default=None
507
+ The Fabric workspace name or ID.
506
508
  Defaults to None which resolves to the workspace of the attached lakehouse
507
509
  or if no lakehouse attached, resolves to the workspace of the notebook.
508
510
  lakehouse : str, default=None
@@ -523,20 +525,21 @@ def list_columns(
523
525
  )
524
526
  from pyspark.sql import SparkSession
525
527
 
526
- workspace = fabric.resolve_workspace_name(workspace)
528
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
529
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
527
530
 
528
- dfP = fabric.list_partitions(dataset=dataset, workspace=workspace)
531
+ dfP = fabric.list_partitions(dataset=dataset_id, workspace=workspace_id)
529
532
 
530
533
  isDirectLake = any(r["Mode"] == "DirectLake" for i, r in dfP.iterrows())
531
534
 
532
- dfC = fabric.list_columns(dataset=dataset, workspace=workspace)
535
+ dfC = fabric.list_columns(dataset=dataset_id, workspace=workspace_id)
533
536
 
534
537
  if isDirectLake:
535
538
  dfC["Column Cardinality"] = None
536
539
  sql_statements = []
537
540
  (lakeID, lakeName) = get_direct_lake_lakehouse(
538
- dataset=dataset,
539
- workspace=workspace,
541
+ dataset=dataset_id,
542
+ workspace=workspace_id,
540
543
  lakehouse=lakehouse,
541
544
  lakehouse_workspace=lakehouse_workspace,
542
545
  )
@@ -590,14 +593,14 @@ def list_columns(
590
593
  return dfC
591
594
 
592
595
 
593
- def list_dashboards(workspace: Optional[str] = None) -> pd.DataFrame:
596
+ def list_dashboards(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
594
597
  """
595
598
  Shows a list of the dashboards within a workspace.
596
599
 
597
600
  Parameters
598
601
  ----------
599
- workspace : str, default=None
600
- The Fabric workspace name.
602
+ workspace : str | uuid.UUID, default=None
603
+ The Fabric workspace name or ID.
601
604
  Defaults to None which resolves to the workspace of the attached lakehouse
602
605
  or if no lakehouse attached, resolves to the workspace of the notebook.
603
606
 
@@ -620,7 +623,7 @@ def list_dashboards(workspace: Optional[str] = None) -> pd.DataFrame:
620
623
  ]
621
624
  )
622
625
 
623
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
626
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
624
627
 
625
628
  client = fabric.PowerBIRestClient()
626
629
  response = client.get(f"/v1.0/myorg/groups/{workspace_id}/dashboards")
@@ -645,14 +648,14 @@ def list_dashboards(workspace: Optional[str] = None) -> pd.DataFrame:
645
648
  return df
646
649
 
647
650
 
648
- def list_lakehouses(workspace: Optional[str] = None) -> pd.DataFrame:
651
+ def list_lakehouses(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
649
652
  """
650
653
  Shows the lakehouses within a workspace.
651
654
 
652
655
  Parameters
653
656
  ----------
654
- workspace : str, default=None
655
- The Fabric workspace name.
657
+ workspace : str | uuid.UUID, default=None
658
+ The Fabric workspace name or ID.
656
659
  Defaults to None which resolves to the workspace of the attached lakehouse
657
660
  or if no lakehouse attached, resolves to the workspace of the notebook.
658
661
 
@@ -675,7 +678,7 @@ def list_lakehouses(workspace: Optional[str] = None) -> pd.DataFrame:
675
678
  ]
676
679
  )
677
680
 
678
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
681
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
679
682
 
680
683
  client = fabric.FabricRestClient()
681
684
  response = client.get(f"/v1/workspaces/{workspace_id}/lakehouses")
@@ -705,14 +708,14 @@ def list_lakehouses(workspace: Optional[str] = None) -> pd.DataFrame:
705
708
  return df
706
709
 
707
710
 
708
- def list_sql_endpoints(workspace: Optional[str] = None) -> pd.DataFrame:
711
+ def list_sql_endpoints(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
709
712
  """
710
713
  Shows the SQL endpoints within a workspace.
711
714
 
712
715
  Parameters
713
716
  ----------
714
- workspace : str, default=None
715
- The Fabric workspace name.
717
+ workspace : str | uuid.UUID, default=None
718
+ The Fabric workspace name or ID.
716
719
  Defaults to None which resolves to the workspace of the attached lakehouse
717
720
  or if no lakehouse attached, resolves to the workspace of the notebook.
718
721
 
@@ -724,7 +727,7 @@ def list_sql_endpoints(workspace: Optional[str] = None) -> pd.DataFrame:
724
727
 
725
728
  df = pd.DataFrame(columns=["SQL Endpoint Id", "SQL Endpoint Name", "Description"])
726
729
 
727
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
730
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
728
731
 
729
732
  client = fabric.FabricRestClient()
730
733
  response = client.get(f"/v1/workspaces/{workspace_id}/sqlEndpoints")
@@ -746,14 +749,14 @@ def list_sql_endpoints(workspace: Optional[str] = None) -> pd.DataFrame:
746
749
  return df
747
750
 
748
751
 
749
- def list_datamarts(workspace: Optional[str] = None) -> pd.DataFrame:
752
+ def list_datamarts(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
750
753
  """
751
754
  Shows the datamarts within a workspace.
752
755
 
753
756
  Parameters
754
757
  ----------
755
- workspace : str, default=None
756
- The Fabric workspace name.
758
+ workspace : str | uuid.UUID, default=None
759
+ The Fabric workspace name or ID.
757
760
  Defaults to None which resolves to the workspace of the attached lakehouse
758
761
  or if no lakehouse attached, resolves to the workspace of the notebook.
759
762
 
@@ -765,7 +768,7 @@ def list_datamarts(workspace: Optional[str] = None) -> pd.DataFrame:
765
768
 
766
769
  df = pd.DataFrame(columns=["Datamart Name", "Datamart ID", "Description"])
767
770
 
768
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
771
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
769
772
 
770
773
  client = fabric.FabricRestClient()
771
774
  response = client.get(f"/v1/workspaces/{workspace_id}/datamarts")
@@ -791,7 +794,7 @@ def update_item(
791
794
  current_name: str,
792
795
  new_name: str,
793
796
  description: Optional[str] = None,
794
- workspace: Optional[str] = None,
797
+ workspace: Optional[str | UUID] = None,
795
798
  ):
796
799
  """
797
800
  Updates the name/description of a Fabric item.
@@ -806,13 +809,13 @@ def update_item(
806
809
  The new name of the item.
807
810
  description : str, default=None
808
811
  A description of the item.
809
- workspace : str, default=None
810
- The Fabric workspace name.
812
+ workspace : str | uuid.UUID, default=None
813
+ The Fabric workspace name or ID.
811
814
  Defaults to None which resolves to the workspace of the attached lakehouse
812
815
  or if no lakehouse attached, resolves to the workspace of the notebook.
813
816
  """
814
817
 
815
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
818
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
816
819
  item_type = item_type.replace(" ", "").capitalize()
817
820
 
818
821
  if item_type not in icons.itemTypes.keys():
@@ -822,12 +825,12 @@ def update_item(
822
825
 
823
826
  itemType = icons.itemTypes[item_type]
824
827
 
825
- dfI = fabric.list_items(workspace=workspace, type=item_type)
828
+ dfI = fabric.list_items(workspace=workspace_id, type=item_type)
826
829
  dfI_filt = dfI[(dfI["Display Name"] == current_name)]
827
830
 
828
831
  if len(dfI_filt) == 0:
829
832
  raise ValueError(
830
- f"{icons.red_dot} The '{current_name}' {item_type} does not exist within the '{workspace}' workspace."
833
+ f"{icons.red_dot} The '{current_name}' {item_type} does not exist within the '{workspace_name}' workspace."
831
834
  )
832
835
 
833
836
  itemId = dfI_filt["Id"].iloc[0]
@@ -845,26 +848,26 @@ def update_item(
845
848
  raise FabricHTTPException(response)
846
849
  if description is None:
847
850
  print(
848
- f"{icons.green_dot} The '{current_name}' {item_type} within the '{workspace}' workspace has been updated to be named '{new_name}'"
851
+ f"{icons.green_dot} The '{current_name}' {item_type} within the '{workspace_name}' workspace has been updated to be named '{new_name}'"
849
852
  )
850
853
  else:
851
854
  print(
852
- f"{icons.green_dot} The '{current_name}' {item_type} within the '{workspace}' workspace has been updated to be named '{new_name}' and have a description of '{description}'"
855
+ f"{icons.green_dot} The '{current_name}' {item_type} within the '{workspace_name}' workspace has been updated to be named '{new_name}' and have a description of '{description}'"
853
856
  )
854
857
 
855
858
 
856
859
  def list_relationships(
857
- dataset: str | UUID, workspace: Optional[str] = None, extended: bool = False
860
+ dataset: str | UUID, workspace: Optional[str | UUID] = None, extended: bool = False
858
861
  ) -> pd.DataFrame:
859
862
  """
860
863
  Shows a semantic model's relationships and their properties.
861
864
 
862
865
  Parameters
863
866
  ----------
864
- dataset: str | UUID
867
+ dataset: str | uuid.UUID
865
868
  Name or UUID of the semantic model.
866
- workspace : str, default=None
867
- The Fabric workspace name.
869
+ workspace : str | uuid.UUID, default=None
870
+ The Fabric workspace name or ID.
868
871
  Defaults to None which resolves to the workspace of the attached lakehouse
869
872
  or if no lakehouse attached, resolves to the workspace of the notebook.
870
873
  extended : bool, default=False
@@ -937,16 +940,18 @@ def list_relationships(
937
940
  return dfR
938
941
 
939
942
 
940
- def list_kpis(dataset: str, workspace: Optional[str] = None) -> pd.DataFrame:
943
+ def list_kpis(
944
+ dataset: str | UUID, workspace: Optional[str | UUID] = None
945
+ ) -> pd.DataFrame:
941
946
  """
942
947
  Shows a semantic model's KPIs and their properties.
943
948
 
944
949
  Parameters
945
950
  ----------
946
- dataset: str
947
- Name of the semantic model.
948
- workspace : str, default=None
949
- The Fabric workspace name.
951
+ dataset: str | uuid.UUID
952
+ Name or ID of the semantic model.
953
+ workspace : str | uuid.UUID, default=None
954
+ The Fabric workspace name or ID.
950
955
  Defaults to None which resolves to the workspace of the attached lakehouse
951
956
  or if no lakehouse attached, resolves to the workspace of the notebook.
952
957
 
@@ -958,8 +963,11 @@ def list_kpis(dataset: str, workspace: Optional[str] = None) -> pd.DataFrame:
958
963
 
959
964
  from sempy_labs.tom import connect_semantic_model
960
965
 
966
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
967
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
968
+
961
969
  with connect_semantic_model(
962
- dataset=dataset, workspace=workspace, readonly=True
970
+ dataset=dataset_id, workspace=workspace_id, readonly=True
963
971
  ) as tom:
964
972
 
965
973
  df = pd.DataFrame(
@@ -1002,17 +1010,17 @@ def list_kpis(dataset: str, workspace: Optional[str] = None) -> pd.DataFrame:
1002
1010
 
1003
1011
 
1004
1012
  def list_semantic_model_objects(
1005
- dataset: str, workspace: Optional[str] = None
1013
+ dataset: str | UUID, workspace: Optional[str | UUID] = None
1006
1014
  ) -> pd.DataFrame:
1007
1015
  """
1008
1016
  Shows a list of semantic model objects.
1009
1017
 
1010
1018
  Parameters
1011
1019
  ----------
1012
- dataset : str
1013
- Name of the semantic model.
1014
- workspace : str, default=None
1015
- The Fabric workspace name.
1020
+ dataset : str | uuid.UUID
1021
+ Name or ID of the semantic model.
1022
+ workspace : str | uuid.UUID, default=None
1023
+ The Fabric workspace name or ID.
1016
1024
  Defaults to None which resolves to the workspace of the attached lakehouse
1017
1025
  or if no lakehouse attached, resolves to the workspace of the notebook.
1018
1026
 
@@ -1166,7 +1174,7 @@ def list_semantic_model_objects(
1166
1174
 
1167
1175
 
1168
1176
  def list_shortcuts(
1169
- lakehouse: Optional[str] = None, workspace: Optional[str] = None
1177
+ lakehouse: Optional[str] = None, workspace: Optional[str | UUID] = None
1170
1178
  ) -> pd.DataFrame:
1171
1179
  """
1172
1180
  Shows all shortcuts which exist in a Fabric lakehouse and their properties.
@@ -1176,8 +1184,8 @@ def list_shortcuts(
1176
1184
  lakehouse : str, default=None
1177
1185
  The Fabric lakehouse name.
1178
1186
  Defaults to None which resolves to the lakehouse attached to the notebook.
1179
- workspace : str, default=None
1180
- The name of the Fabric workspace in which lakehouse resides.
1187
+ workspace : str | uuid.UUID, default=None
1188
+ The name or ID of the Fabric workspace in which lakehouse resides.
1181
1189
  Defaults to None which resolves to the workspace of the attached lakehouse
1182
1190
  or if no lakehouse attached, resolves to the workspace of the notebook.
1183
1191
 
@@ -1187,12 +1195,12 @@ def list_shortcuts(
1187
1195
  A pandas dataframe showing all the shortcuts which exist in the specified lakehouse.
1188
1196
  """
1189
1197
 
1190
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
1198
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
1191
1199
 
1192
1200
  if lakehouse is None:
1193
1201
  lakehouse_id = fabric.get_lakehouse_id()
1194
1202
  else:
1195
- lakehouse_id = resolve_lakehouse_id(lakehouse, workspace)
1203
+ lakehouse_id = resolve_lakehouse_id(lakehouse, workspace_id)
1196
1204
 
1197
1205
  client = fabric.FabricRestClient()
1198
1206
 
@@ -1309,17 +1317,17 @@ def list_capacities() -> pd.DataFrame:
1309
1317
 
1310
1318
 
1311
1319
  def list_reports_using_semantic_model(
1312
- dataset: str, workspace: Optional[str] = None
1320
+ dataset: str | UUID, workspace: Optional[str | UUID] = None
1313
1321
  ) -> pd.DataFrame:
1314
1322
  """
1315
1323
  Shows a list of all the reports (in all workspaces) which use a given semantic model.
1316
1324
 
1317
1325
  Parameters
1318
1326
  ----------
1319
- dataset : str
1320
- Name of the semantic model.
1321
- workspace : str, default=None
1322
- The Fabric workspace name.
1327
+ dataset : str | uuid.UUID
1328
+ Name or ID of the semantic model.
1329
+ workspace : str | uuid.UUID, default=None
1330
+ The Fabric workspace name or ID.
1323
1331
  Defaults to None which resolves to the workspace of the attached lakehouse
1324
1332
  or if no lakehouse attached, resolves to the workspace of the notebook.
1325
1333
 
@@ -1329,44 +1337,54 @@ def list_reports_using_semantic_model(
1329
1337
  A pandas dataframe showing the reports which use a given semantic model.
1330
1338
  """
1331
1339
 
1332
- df = pd.DataFrame(
1333
- columns=[
1334
- "Report Name",
1335
- "Report Id",
1336
- "Report Workspace Name",
1337
- "Report Workspace Id",
1338
- ]
1339
- )
1340
+ # df = pd.DataFrame(
1341
+ # columns=[
1342
+ # "Report Name",
1343
+ # "Report Id",
1344
+ # "Report Workspace Name",
1345
+ # "Report Workspace Id",
1346
+ # ]
1347
+ # )
1340
1348
 
1341
- workspace = fabric.resolve_workspace_name(workspace)
1342
- dataset_id = resolve_dataset_id(dataset, workspace)
1343
- client = fabric.PowerBIRestClient()
1344
- response = client.get(
1345
- f"metadata/relations/downstream/dataset/{dataset_id}?apiVersion=3"
1346
- )
1349
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
1350
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
1347
1351
 
1348
- response_json = response.json()
1352
+ dfR = fabric.list_reports(workspace=workspace_id)
1353
+ dfR_filt = dfR[
1354
+ (dfR["Dataset Id"] == dataset_id)
1355
+ & (dfR["Dataset Workspace Id"] == workspace_id)
1356
+ ][["Name", "Id"]]
1357
+ dfR_filt.rename(columns={"Name": "Report Name", "Id": "Report Id"}, inplace=True)
1358
+ dfR_filt["Report Worskpace Name"] = workspace_name
1359
+ dfR_filt["Report Workspace Id"] = workspace_id
1349
1360
 
1350
- for i in response_json.get("artifacts", []):
1351
- object_workspace_id = i.get("workspace", {}).get("objectId")
1352
- object_type = i.get("typeName")
1361
+ return dfR_filt
1353
1362
 
1354
- if object_type == "Report":
1355
- new_data = {
1356
- "Report Name": i.get("displayName"),
1357
- "Report Id": i.get("objectId"),
1358
- "Report Workspace Name": fabric.resolve_workspace_name(
1359
- object_workspace_id
1360
- ),
1361
- "Report Workspace Id": object_workspace_id,
1362
- }
1363
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
1363
+ # client = fabric.PowerBIRestClient()
1364
+ # response = client.get(
1365
+ # f"metadata/relations/downstream/dataset/{dataset_id}?apiVersion=3"
1366
+ # )
1364
1367
 
1365
- return df
1368
+ # response_json = response.json()
1369
+
1370
+ # for i in response_json.get("artifacts", []):
1371
+ # object_workspace_id = i.get("workspace", {}).get("objectId")
1372
+ # object_type = i.get("typeName")
1373
+
1374
+ # if object_type == "Report":
1375
+ # new_data = {
1376
+ # "Report Name": i.get("displayName"),
1377
+ # "Report Id": i.get("objectId"),
1378
+ # "Report Workspace Name": fabric.resolve_workspace_name(
1379
+ # object_workspace_id
1380
+ # ),
1381
+ # "Report Workspace Id": object_workspace_id,
1382
+ # }
1383
+ # df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
1366
1384
 
1367
1385
 
1368
1386
  def list_report_semantic_model_objects(
1369
- dataset: str, workspace: Optional[str] = None, extended: bool = False
1387
+ dataset: str | UUID, workspace: Optional[str | UUID] = None, extended: bool = False
1370
1388
  ) -> pd.DataFrame:
1371
1389
  """
1372
1390
  Shows a list of semantic model objects (i.e. columns, measures, hierarchies) used in all reports which feed data from
@@ -1376,10 +1394,10 @@ def list_report_semantic_model_objects(
1376
1394
 
1377
1395
  Parameters
1378
1396
  ----------
1379
- dataset : str
1380
- Name of the semantic model.
1381
- workspace : str, default=None
1382
- The Fabric workspace name.
1397
+ dataset : str | uuid.UUID
1398
+ Name or ID of the semantic model.
1399
+ workspace : str | uuid.UUID, default=None
1400
+ The Fabric workspace name or ID.
1383
1401
  Defaults to None which resolves to the workspace of the attached lakehouse
1384
1402
  or if no lakehouse attached, resolves to the workspace of the notebook.
1385
1403
  extended: bool, default=False
@@ -1408,8 +1426,11 @@ def list_report_semantic_model_objects(
1408
1426
  ]
1409
1427
  )
1410
1428
 
1429
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
1430
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
1431
+
1411
1432
  # Collect all reports which use the semantic model
1412
- dfR = list_reports_using_semantic_model(dataset=dataset, workspace=workspace)
1433
+ dfR = list_reports_using_semantic_model(dataset=dataset_id, workspace=workspace_id)
1413
1434
 
1414
1435
  if len(dfR) == 0:
1415
1436
  return dfRO
@@ -1433,7 +1454,7 @@ def list_report_semantic_model_objects(
1433
1454
  # Collect all semantic model objects
1434
1455
  if extended:
1435
1456
  with connect_semantic_model(
1436
- dataset=dataset, readonly=True, workspace=workspace
1457
+ dataset=dataset_id, readonly=True, workspace=workspace_id
1437
1458
  ) as tom:
1438
1459
  for index, row in dfRO.iterrows():
1439
1460
  object_type = row["Object Type"]
@@ -1458,8 +1479,8 @@ def list_report_semantic_model_objects(
1458
1479
 
1459
1480
 
1460
1481
  def list_semantic_model_object_report_usage(
1461
- dataset: str,
1462
- workspace: Optional[str] = None,
1482
+ dataset: str | UUID,
1483
+ workspace: Optional[str | UUID] = None,
1463
1484
  include_dependencies: bool = False,
1464
1485
  extended: bool = False,
1465
1486
  ) -> pd.DataFrame:
@@ -1470,10 +1491,10 @@ def list_semantic_model_object_report_usage(
1470
1491
 
1471
1492
  Parameters
1472
1493
  ----------
1473
- dataset : str
1474
- Name of the semantic model.
1475
- workspace : str, default=None
1476
- The Fabric workspace name.
1494
+ dataset : str | uuid.UUID
1495
+ Name or ID of the semantic model.
1496
+ workspace : str | uuid.UUID, default=None
1497
+ The Fabric workspace name or ID.
1477
1498
  Defaults to None which resolves to the workspace of the attached lakehouse
1478
1499
  or if no lakehouse attached, resolves to the workspace of the notebook.
1479
1500
  include_dependencies : bool, default=False
@@ -1491,9 +1512,10 @@ def list_semantic_model_object_report_usage(
1491
1512
  from sempy_labs._model_dependencies import get_model_calc_dependencies
1492
1513
  from sempy_labs._helper_functions import format_dax_object_name
1493
1514
 
1494
- workspace = fabric.resolve_workspace_name(workspace)
1515
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
1516
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
1495
1517
 
1496
- dfR = list_report_semantic_model_objects(dataset=dataset, workspace=workspace)
1518
+ dfR = list_report_semantic_model_objects(dataset=dataset_id, workspace=workspace_id)
1497
1519
  usage_column_name = "Report Usage Count"
1498
1520
 
1499
1521
  if not include_dependencies:
@@ -1504,7 +1526,7 @@ def list_semantic_model_object_report_usage(
1504
1526
  )
1505
1527
  else:
1506
1528
  df = pd.DataFrame(columns=["Table Name", "Object Name", "Object Type"])
1507
- dep = get_model_calc_dependencies(dataset=dataset, workspace=workspace)
1529
+ dep = get_model_calc_dependencies(dataset=dataset_id, workspace=workspace_id)
1508
1530
 
1509
1531
  for i, r in dfR.iterrows():
1510
1532
  object_type = r["Object Type"]
@@ -1544,7 +1566,9 @@ def list_semantic_model_object_report_usage(
1544
1566
  final_df["Object"] = format_dax_object_name(
1545
1567
  final_df["Table Name"], final_df["Object Name"]
1546
1568
  )
1547
- dfC = fabric.list_columns(dataset=dataset, workspace=workspace, extended=True)
1569
+ dfC = fabric.list_columns(
1570
+ dataset=dataset_id, workspace=workspace_id, extended=True
1571
+ )
1548
1572
  dfC["Object"] = format_dax_object_name(dfC["Table Name"], dfC["Column Name"])
1549
1573
  final_df = pd.merge(
1550
1574
  final_df,
@@ -1583,8 +1607,8 @@ def list_server_properties(workspace: Optional[str | UUID] = None) -> pd.DataFra
1583
1607
 
1584
1608
  Parameters
1585
1609
  ----------
1586
- workspace : str, default=None
1587
- The Fabric workspace name.
1610
+ workspace : str | uuid.UUID, default=None
1611
+ The Fabric workspace name or ID.
1588
1612
  Defaults to None which resolves to the workspace of the attached lakehouse
1589
1613
  or if no lakehouse attached, resolves to the workspace of the notebook.
1590
1614
 
@@ -1626,9 +1650,9 @@ def list_semantic_model_errors(
1626
1650
 
1627
1651
  Parameters
1628
1652
  ----------
1629
- dataset : str | UUID
1653
+ dataset : str | uuid.UUID
1630
1654
  Name or ID of the semantic model.
1631
- workspace : str | UUID, default=None
1655
+ workspace : str | uuid.UUID, default=None
1632
1656
  The Fabric workspace name or ID.
1633
1657
  Defaults to None which resolves to the workspace of the attached lakehouse
1634
1658
  or if no lakehouse attached, resolves to the workspace of the notebook.