semantic-link-labs 0.8.9__py3-none-any.whl → 0.8.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (76) hide show
  1. {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/METADATA +5 -2
  2. {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/RECORD +76 -75
  3. sempy_labs/__init__.py +14 -2
  4. sempy_labs/_authentication.py +31 -2
  5. sempy_labs/_clear_cache.py +39 -37
  6. sempy_labs/_connections.py +13 -13
  7. sempy_labs/_data_pipelines.py +20 -20
  8. sempy_labs/_dataflows.py +27 -28
  9. sempy_labs/_dax.py +41 -47
  10. sempy_labs/_environments.py +26 -23
  11. sempy_labs/_eventhouses.py +16 -15
  12. sempy_labs/_eventstreams.py +16 -15
  13. sempy_labs/_external_data_shares.py +18 -20
  14. sempy_labs/_gateways.py +57 -11
  15. sempy_labs/_generate_semantic_model.py +100 -71
  16. sempy_labs/_git.py +134 -67
  17. sempy_labs/_helper_functions.py +199 -145
  18. sempy_labs/_job_scheduler.py +92 -0
  19. sempy_labs/_kql_databases.py +16 -15
  20. sempy_labs/_kql_querysets.py +16 -15
  21. sempy_labs/_list_functions.py +281 -120
  22. sempy_labs/_managed_private_endpoints.py +19 -17
  23. sempy_labs/_mirrored_databases.py +51 -48
  24. sempy_labs/_mirrored_warehouses.py +5 -4
  25. sempy_labs/_ml_experiments.py +16 -15
  26. sempy_labs/_ml_models.py +15 -14
  27. sempy_labs/_model_bpa.py +27 -25
  28. sempy_labs/_model_bpa_bulk.py +3 -3
  29. sempy_labs/_model_dependencies.py +60 -28
  30. sempy_labs/_notebooks.py +73 -39
  31. sempy_labs/_one_lake_integration.py +23 -26
  32. sempy_labs/_query_scale_out.py +67 -64
  33. sempy_labs/_refresh_semantic_model.py +47 -42
  34. sempy_labs/_spark.py +33 -32
  35. sempy_labs/_sql.py +12 -9
  36. sempy_labs/_translations.py +10 -7
  37. sempy_labs/_vertipaq.py +34 -31
  38. sempy_labs/_warehouses.py +22 -21
  39. sempy_labs/_workspace_identity.py +11 -10
  40. sempy_labs/_workspaces.py +40 -33
  41. sempy_labs/admin/__init__.py +4 -0
  42. sempy_labs/admin/_basic_functions.py +44 -12
  43. sempy_labs/admin/_external_data_share.py +3 -3
  44. sempy_labs/admin/_items.py +4 -4
  45. sempy_labs/admin/_scanner.py +7 -5
  46. sempy_labs/directlake/_directlake_schema_compare.py +18 -14
  47. sempy_labs/directlake/_directlake_schema_sync.py +18 -12
  48. sempy_labs/directlake/_dl_helper.py +36 -32
  49. sempy_labs/directlake/_generate_shared_expression.py +10 -9
  50. sempy_labs/directlake/_get_directlake_lakehouse.py +16 -13
  51. sempy_labs/directlake/_get_shared_expression.py +4 -3
  52. sempy_labs/directlake/_guardrails.py +12 -6
  53. sempy_labs/directlake/_list_directlake_model_calc_tables.py +15 -9
  54. sempy_labs/directlake/_show_unsupported_directlake_objects.py +16 -10
  55. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +35 -31
  56. sempy_labs/directlake/_update_directlake_partition_entity.py +34 -31
  57. sempy_labs/directlake/_warm_cache.py +87 -65
  58. sempy_labs/lakehouse/_get_lakehouse_columns.py +10 -8
  59. sempy_labs/lakehouse/_get_lakehouse_tables.py +10 -9
  60. sempy_labs/lakehouse/_lakehouse.py +17 -13
  61. sempy_labs/lakehouse/_shortcuts.py +42 -23
  62. sempy_labs/migration/_create_pqt_file.py +16 -11
  63. sempy_labs/migration/_refresh_calc_tables.py +16 -10
  64. sempy_labs/report/_download_report.py +9 -8
  65. sempy_labs/report/_generate_report.py +40 -44
  66. sempy_labs/report/_paginated.py +9 -9
  67. sempy_labs/report/_report_bpa.py +13 -9
  68. sempy_labs/report/_report_functions.py +80 -91
  69. sempy_labs/report/_report_helper.py +8 -4
  70. sempy_labs/report/_report_list_functions.py +24 -13
  71. sempy_labs/report/_report_rebind.py +17 -16
  72. sempy_labs/report/_reportwrapper.py +41 -33
  73. sempy_labs/tom/_model.py +117 -38
  74. {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/LICENSE +0 -0
  75. {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/WHEEL +0 -0
  76. {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/top_level.txt +0 -0
@@ -27,9 +27,9 @@ def list_workspaces(
27
27
 
28
28
  Parameters
29
29
  ----------
30
- capacity : str | UUID, default=None
30
+ capacity : str | uuid.UUID, default=None
31
31
  Returns only the workspaces in the specified Capacity.
32
- workspace : str | UUID, default=None
32
+ workspace : str | uuid.UUID, default=None
33
33
  Returns the workspace with the specific name.
34
34
  workspace_state : str, default=None
35
35
  Return only the workspace with the requested state. You can find the possible states in `Workspace States <https://learn.microsoft.com/en-us/rest/api/fabric/admin/workspaces/list-workspaces?tabs=HTTP#workspacestate>`_.
@@ -131,7 +131,7 @@ def list_capacities(
131
131
 
132
132
  Parameters
133
133
  ----------
134
- capacity : str | UUID, default=None
134
+ capacity : str | uuid.UUID, default=None
135
135
  Capacity name or id to filter.
136
136
 
137
137
  Returns
@@ -185,11 +185,11 @@ def assign_workspaces_to_capacity(
185
185
 
186
186
  Parameters
187
187
  ----------
188
- source_capacity : str | UUID, default=None
188
+ source_capacity : str | uuid.UUID, default=None
189
189
  The name of the source capacity. If the Workspace is not specified, this is parameter mandatory.
190
- target_capacity : str | UUID, default=None
190
+ target_capacity : str | uuid.UUID, default=None
191
191
  The name of the target capacity.
192
- workspace : str | List[str] | UUID | List[UUID], default=None
192
+ workspace : str | List[str] | uuid.UUID | List[uuid.UUID], default=None
193
193
  The name or id of the workspace(s).
194
194
  Defaults to None which resolves to migrating all workspaces within the source capacity to the target capacity.
195
195
  """
@@ -274,7 +274,7 @@ def unassign_workspaces_from_capacity(
274
274
 
275
275
  Parameters
276
276
  ----------
277
- workspaces : str | List[str] | UUID | List[UUID]
277
+ workspaces : str | List[str] | uuid.UUID | List[uuid.UUID]
278
278
  The Fabric workspace name(s) or id(s).
279
279
  """
280
280
  if isinstance(workspaces, str):
@@ -512,8 +512,6 @@ def list_datasets(
512
512
  Returns a subset of a results based on Odata filter query parameter condition.
513
513
  skip : int, default=None
514
514
  Skips the first n results.
515
- token_provider : Optional[TokenProvider] = None,
516
- Authentication provider used to be use in the request. Supports Service Principal.
517
515
 
518
516
  Returns
519
517
  -------
@@ -673,7 +671,7 @@ def list_workspace_access_details(
673
671
 
674
672
  Parameters
675
673
  ----------
676
- workspace : str | UUID, default=None
674
+ workspace : str | uuid.UUID, default=None
677
675
  The Fabric workspace name or id.
678
676
  Defaults to None which resolves to the workspace of the attached lakehouse
679
677
  or if no lakehouse attached, resolves to the workspace of the notebook.
@@ -721,7 +719,7 @@ def list_activity_events(
721
719
  end_time: str,
722
720
  activity_filter: Optional[str] = None,
723
721
  user_id_filter: Optional[str] = None,
724
- return_dataframe: Optional[bool] = True,
722
+ return_dataframe: bool = True,
725
723
  ) -> pd.DataFrame | dict:
726
724
  """
727
725
  Shows a list of audit activity events for a tenant.
@@ -777,6 +775,23 @@ def list_activity_events(
777
775
  "Object Display Name",
778
776
  "Experience",
779
777
  "Refresh Enforcement Policy",
778
+ "Is Success",
779
+ "Activity Id",
780
+ "Item Name",
781
+ "Dataset Name",
782
+ "Report Name",
783
+ "Capacity Id",
784
+ "Capacity Name",
785
+ "App Name",
786
+ "Dataset Id",
787
+ "Report Id",
788
+ "Artifact Id",
789
+ "Artifact Name",
790
+ "Report Type",
791
+ "App Report Id",
792
+ "Distribution Method",
793
+ "Consumption Method",
794
+ "Artifact Kind",
780
795
  ]
781
796
  )
782
797
 
@@ -825,6 +840,23 @@ def list_activity_events(
825
840
  "Object Display Name": i.get("ObjectDisplayName"),
826
841
  "Experience": i.get("Experience"),
827
842
  "Refresh Enforcement Policy": i.get("RefreshEnforcementPolicy"),
843
+ "Is Success": i.get("IsSuccess"),
844
+ "Activity Id": i.get("ActivityId"),
845
+ "Item Name": i.get("ItemName"),
846
+ "Dataset Name": i.get("DatasetName"),
847
+ "Report Name": i.get("ReportName"),
848
+ "Capacity Id": i.get("CapacityId"),
849
+ "Capacity Name": i.get("CapacityName"),
850
+ "App Name": i.get("AppName"),
851
+ "Dataset Id": i.get("DatasetId"),
852
+ "Report Id": i.get("ReportId"),
853
+ "Artifact Id": i.get("ArtifactId"),
854
+ "Artifact Name": i.get("ArtifactName"),
855
+ "Report Type": i.get("ReportType"),
856
+ "App Report Id": i.get("AppReportId"),
857
+ "Distribution Method": i.get("DistributionMethod"),
858
+ "Consumption Method": i.get("ConsumptionMethod"),
859
+ "Artifact Kind": i.get("ArtifactKind"),
828
860
  }
829
861
  df = pd.concat(
830
862
  [df, pd.DataFrame(new_data, index=[0])],
@@ -1003,7 +1035,7 @@ def get_capacity_assignment_status(workspace: Optional[str | UUID] = None):
1003
1035
 
1004
1036
  Parameters
1005
1037
  ----------
1006
- workspace : str | UUID, default=None
1038
+ workspace : str | uuid.UUID, default=None
1007
1039
  The Fabric workspace name or id.
1008
1040
  Defaults to None which resolves to the workspace of the attached lakehouse
1009
1041
  or if no lakehouse attached, resolves to the workspace of the notebook.
@@ -75,11 +75,11 @@ def revoke_external_data_share(
75
75
 
76
76
  Parameters
77
77
  ----------
78
- external_data_share_id : UUID
78
+ external_data_share_id : uuid.UUID
79
79
  The external data share ID.
80
- item_id : int, default=None
80
+ item_id : uuid.UUID, default=None
81
81
  The Item ID
82
- workspace : str
82
+ workspace : str | uuid.UUID
83
83
  The Fabric workspace name or id.
84
84
  """
85
85
  (workspace, workspace_id) = _resolve_workspace_name_and_id(workspace)
@@ -65,7 +65,7 @@ def _resolve_item_name_and_id(
65
65
 
66
66
  def list_items(
67
67
  capacity: Optional[str | UUID] = None,
68
- workspace: Optional[str] = None,
68
+ workspace: Optional[str | UUID] = None,
69
69
  state: Optional[str] = None,
70
70
  type: Optional[str] = None,
71
71
  item: Optional[str | UUID] = None,
@@ -78,9 +78,9 @@ def list_items(
78
78
 
79
79
  Parameters
80
80
  ----------
81
- capacity : str | UUID, default=None
81
+ capacity : str | uuid.UUID, default=None
82
82
  The capacity name or id.
83
- workspace : str, default=None
83
+ workspace : str | uuid.UUID, default=None
84
84
  The Fabric workspace name.
85
85
  Defaults to None which resolves to the workspace of the attached lakehouse
86
86
  or if no lakehouse attached, resolves to the workspace of the notebook.
@@ -195,7 +195,7 @@ def list_item_access_details(
195
195
  Name or id of the Fabric item.
196
196
  type : str, default=None
197
197
  Type of Fabric item.
198
- workspace : str, default=None
198
+ workspace : str | uuid.UUID, default=None
199
199
  The Fabric workspace name or id.
200
200
  Defaults to None which resolves to the workspace of the attached lakehouse
201
201
  or if no lakehouse attached, resolves to the workspace of the notebook.
@@ -5,8 +5,10 @@ from sempy.fabric.exceptions import FabricHTTPException
5
5
  import numpy as np
6
6
  import time
7
7
  from sempy_labs.admin._basic_functions import list_workspaces
8
+ from sempy._utils._log import log
8
9
 
9
10
 
11
+ @log
10
12
  def scan_workspaces(
11
13
  data_source_details: bool = False,
12
14
  dataset_schema: bool = False,
@@ -16,12 +18,12 @@ def scan_workspaces(
16
18
  workspace: Optional[str | List[str] | UUID | List[UUID]] = None,
17
19
  ) -> dict:
18
20
  """
19
- Get the inventory and details of the tenant.
21
+ Gets the scan result for the specified scan.
20
22
 
21
23
  This is a wrapper function for the following APIs:
22
- `Admin - WorkspaceInfo PostWorkspaceInfo <https://learn.microsoft.com/en-gb/rest/api/power-bi/admin/workspace-info-post-workspace-info>`_.
23
- `Admin - WorkspaceInfo GetScanStatus <https://learn.microsoft.com/en-gb/rest/api/power-bi/admin/workspace-info-get-scan-status>`_.
24
- `Admin - WorkspaceInfo GetScanResult <https://learn.microsoft.com/en-gb/rest/api/power-bi/admin/workspace-info-get-scan-result>`_.
24
+ `Admin - WorkspaceInfo PostWorkspaceInfo <https://learn.microsoft.com/rest/api/power-bi/admin/workspace-info-post-workspace-info>`_.
25
+ `Admin - WorkspaceInfo GetScanStatus <https://learn.microsoft.com/rest/api/power-bi/admin/workspace-info-get-scan-status>`_.
26
+ `Admin - WorkspaceInfo GetScanResult <https://learn.microsoft.com/rest/api/power-bi/admin/workspace-info-get-scan-result>`_.
25
27
 
26
28
  Parameters
27
29
  ----------
@@ -35,7 +37,7 @@ def scan_workspaces(
35
37
  Whether to return lineage info (upstream dataflows, tiles, data source IDs)
36
38
  artifact_users : bool, default=False
37
39
  Whether to return user details for a Power BI item (such as a report or a dashboard)
38
- workspace : str | List[str] | UUID | List[UUID], default=None
40
+ workspace : str | List[str] | uuid.UUID | List[uuid.UUID], default=None
39
41
  The required workspace name(s) or id(s) to be scanned
40
42
 
41
43
  Returns
@@ -2,6 +2,8 @@ import sempy.fabric as fabric
2
2
  import pandas as pd
3
3
  from sempy_labs._helper_functions import (
4
4
  format_dax_object_name,
5
+ resolve_workspace_name_and_id,
6
+ resolve_dataset_name_and_id,
5
7
  )
6
8
  from IPython.display import display
7
9
  from sempy_labs.lakehouse import get_lakehouse_columns
@@ -9,12 +11,13 @@ from sempy_labs.directlake._dl_helper import get_direct_lake_source
9
11
  from typing import Optional
10
12
  import sempy_labs._icons as icons
11
13
  from sempy._utils._log import log
14
+ from uuid import UUID
12
15
 
13
16
 
14
17
  @log
15
18
  def direct_lake_schema_compare(
16
- dataset: str,
17
- workspace: Optional[str] = None,
19
+ dataset: str | UUID,
20
+ workspace: Optional[str | UUID] = None,
18
21
  **kwargs,
19
22
  ):
20
23
  """
@@ -22,10 +25,10 @@ def direct_lake_schema_compare(
22
25
 
23
26
  Parameters
24
27
  ----------
25
- dataset : str
26
- Name of the semantic model.
27
- workspace : str, default=None
28
- The Fabric workspace name.
28
+ dataset : str | uuid.UUID
29
+ Name or ID of the semantic model.
30
+ workspace : str | uuid.UUID, default=None
31
+ The Fabric workspace name or ID.
29
32
  Defaults to None which resolves to the workspace of the attached lakehouse
30
33
  or if no lakehouse attached, resolves to the workspace of the notebook.
31
34
  """
@@ -41,10 +44,11 @@ def direct_lake_schema_compare(
41
44
  )
42
45
  del kwargs["lakehouse_workspace"]
43
46
 
44
- workspace = fabric.resolve_workspace_name(workspace)
47
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
48
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
45
49
 
46
50
  artifact_type, lakehouse_name, lakehouse_id, lakehouse_workspace_id = (
47
- get_direct_lake_source(dataset=dataset, workspace=workspace)
51
+ get_direct_lake_source(dataset=dataset_id, workspace=workspace_id)
48
52
  )
49
53
  lakehouse_workspace = fabric.resolve_workspace_name(lakehouse_workspace_id)
50
54
 
@@ -53,15 +57,15 @@ def direct_lake_schema_compare(
53
57
  f"{icons.red_dot} This function is only valid for Direct Lake semantic models which source from Fabric lakehouses (not warehouses)."
54
58
  )
55
59
 
56
- dfP = fabric.list_partitions(dataset=dataset, workspace=workspace)
60
+ dfP = fabric.list_partitions(dataset=dataset_id, workspace=workspace_id)
57
61
 
58
62
  if not any(r["Mode"] == "DirectLake" for i, r in dfP.iterrows()):
59
63
  raise ValueError(
60
- f"{icons.red_dot} The '{dataset}' semantic model is not in Direct Lake mode."
64
+ f"{icons.red_dot} The '{dataset_name}' semantic model within the '{workspace_name}' workspace is not in Direct Lake mode."
61
65
  )
62
66
 
63
- dfT = fabric.list_tables(dataset=dataset, workspace=workspace)
64
- dfC = fabric.list_columns(dataset=dataset, workspace=workspace)
67
+ dfT = fabric.list_tables(dataset=dataset_id, workspace=workspace_id)
68
+ dfC = fabric.list_columns(dataset=dataset_id, workspace=workspace_id)
65
69
  lc = get_lakehouse_columns(lakehouse_name, lakehouse_workspace)
66
70
 
67
71
  dfT.rename(columns={"Type": "Table Type"}, inplace=True)
@@ -92,7 +96,7 @@ def direct_lake_schema_compare(
92
96
  )
93
97
  else:
94
98
  print(
95
- f"{icons.yellow_dot} The following tables exist in the '{dataset}' semantic model within the '{workspace}' workspace"
99
+ f"{icons.yellow_dot} The following tables exist in the '{dataset_name}' semantic model within the '{workspace_name}' workspace"
96
100
  f" but do not exist in the '{lakehouse_name}' lakehouse within the '{lakehouse_workspace}' workspace."
97
101
  )
98
102
  display(missingtbls)
@@ -102,7 +106,7 @@ def direct_lake_schema_compare(
102
106
  )
103
107
  else:
104
108
  print(
105
- f"{icons.yellow_dot} The following columns exist in the '{dataset}' semantic model within the '{workspace}' workspace "
109
+ f"{icons.yellow_dot} The following columns exist in the '{dataset_name}' semantic model within the '{workspace_name}' workspace "
106
110
  f"but do not exist in the '{lakehouse_name}' lakehouse within the '{lakehouse_workspace}' workspace."
107
111
  )
108
112
  display(missingcols)
@@ -3,16 +3,21 @@ import sempy.fabric as fabric
3
3
  from sempy_labs.lakehouse import get_lakehouse_columns
4
4
  from sempy_labs.directlake._dl_helper import get_direct_lake_source
5
5
  from sempy_labs.tom import connect_semantic_model
6
- from sempy_labs._helper_functions import _convert_data_type
6
+ from sempy_labs._helper_functions import (
7
+ _convert_data_type,
8
+ resolve_workspace_name_and_id,
9
+ resolve_dataset_name_and_id,
10
+ )
7
11
  from typing import Optional
8
12
  from sempy._utils._log import log
9
13
  import sempy_labs._icons as icons
14
+ from uuid import UUID
10
15
 
11
16
 
12
17
  @log
13
18
  def direct_lake_schema_sync(
14
- dataset: str,
15
- workspace: Optional[str] = None,
19
+ dataset: str | UUID,
20
+ workspace: Optional[str | UUID] = None,
16
21
  add_to_model: bool = False,
17
22
  **kwargs,
18
23
  ):
@@ -21,10 +26,10 @@ def direct_lake_schema_sync(
21
26
 
22
27
  Parameters
23
28
  ----------
24
- dataset : str
25
- Name of the semantic model.
26
- workspace : str, default=None
27
- The Fabric workspace name.
29
+ dataset : str | uuid.UUID
30
+ Name or ID of the semantic model.
31
+ workspace : str | uuid.UUID, default=None
32
+ The Fabric workspace name or ID.
28
33
  Defaults to None which resolves to the workspace of the attached lakehouse
29
34
  or if no lakehouse attached, resolves to the workspace of the notebook.
30
35
  add_to_model : bool, default=False
@@ -45,10 +50,11 @@ def direct_lake_schema_sync(
45
50
  )
46
51
  del kwargs["lakehouse_workspace"]
47
52
 
48
- workspace = fabric.resolve_workspace_name(workspace)
53
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
54
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
49
55
 
50
56
  artifact_type, lakehouse_name, lakehouse_id, lakehouse_workspace_id = (
51
- get_direct_lake_source(dataset=dataset, workspace=workspace)
57
+ get_direct_lake_source(dataset=dataset_id, workspace=workspace_id)
52
58
  )
53
59
 
54
60
  if artifact_type == "Warehouse":
@@ -60,7 +66,7 @@ def direct_lake_schema_sync(
60
66
  lc = get_lakehouse_columns(lakehouse_name, lakehouse_workspace)
61
67
 
62
68
  with connect_semantic_model(
63
- dataset=dataset, readonly=False, workspace=workspace
69
+ dataset=dataset_id, readonly=False, workspace=workspace_id
64
70
  ) as tom:
65
71
 
66
72
  for i, r in lc.iterrows():
@@ -86,7 +92,7 @@ def direct_lake_schema_sync(
86
92
  for c in tom.all_columns()
87
93
  ):
88
94
  print(
89
- f"{icons.yellow_dot} The '{lakeCName}' column exists in the '{lakeTName}' lakehouse table but not in the '{dataset}' semantic model within the '{workspace}' workspace."
95
+ f"{icons.yellow_dot} The '{lakeCName}' column exists in the '{lakeTName}' lakehouse table but not in the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
90
96
  )
91
97
  if add_to_model:
92
98
  dt = _convert_data_type(dType)
@@ -97,5 +103,5 @@ def direct_lake_schema_sync(
97
103
  data_type=dt,
98
104
  )
99
105
  print(
100
- f"{icons.green_dot} The '{lakeCName}' column in the '{lakeTName}' lakehouse table was added to the '{dataset}' semantic model within the '{workspace}' workspace."
106
+ f"{icons.green_dot} The '{lakeCName}' column in the '{lakeTName}' lakehouse table was added to the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
101
107
  )
@@ -7,24 +7,25 @@ import sempy_labs._icons as icons
7
7
  from sempy._utils._log import log
8
8
  from sempy_labs._helper_functions import (
9
9
  retry,
10
- resolve_dataset_id,
11
10
  resolve_lakehouse_name,
12
11
  _convert_data_type,
12
+ resolve_dataset_name_and_id,
13
+ resolve_workspace_name_and_id,
13
14
  )
14
15
 
15
16
 
16
17
  def check_fallback_reason(
17
- dataset: str, workspace: Optional[str] = None
18
+ dataset: str | UUID, workspace: Optional[str | UUID] = None
18
19
  ) -> pd.DataFrame:
19
20
  """
20
21
  Shows the reason a table in a Direct Lake semantic model would fallback to DirectQuery.
21
22
 
22
23
  Parameters
23
24
  ----------
24
- dataset : str
25
- Name of the semantic model.
26
- workspace : str, default=None
27
- The Fabric workspace name.
25
+ dataset : str | uuid.UUID
26
+ Name or ID of the semantic model.
27
+ workspace : str | uuid.UUID, default=None
28
+ The Fabric workspace name or ID.
28
29
  Defaults to None which resolves to the workspace of the attached lakehouse
29
30
  or if no lakehouse attached, resolves to the workspace of the notebook.
30
31
 
@@ -35,19 +36,22 @@ def check_fallback_reason(
35
36
  """
36
37
  from sempy_labs.tom import connect_semantic_model
37
38
 
38
- workspace = fabric.resolve_workspace_name(workspace)
39
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
40
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(
41
+ dataset, workspace=workspace_id
42
+ )
39
43
 
40
44
  with connect_semantic_model(
41
- dataset=dataset, workspace=workspace, readonly=True
45
+ dataset=dataset_id, workspace=workspace_id, readonly=True
42
46
  ) as tom:
43
47
  if not tom.is_direct_lake():
44
48
  raise ValueError(
45
- f"{icons.red_dot} The '{dataset}' semantic model is not in Direct Lake. This function is only applicable to Direct Lake semantic models."
49
+ f"{icons.red_dot} The '{dataset_name}' semantic model is not in Direct Lake. This function is only applicable to Direct Lake semantic models."
46
50
  )
47
51
 
48
52
  df = fabric.evaluate_dax(
49
- dataset=dataset,
50
- workspace=workspace,
53
+ dataset=dataset_id,
54
+ workspace=workspace_id,
51
55
  dax_string="""
52
56
  SELECT [TableName] AS [Table Name],[FallbackReason] AS [FallbackReasonID]
53
57
  FROM $SYSTEM.TMSCHEMA_DELTA_TABLE_METADATA_STORAGES
@@ -76,9 +80,9 @@ def check_fallback_reason(
76
80
  def generate_direct_lake_semantic_model(
77
81
  dataset: str,
78
82
  lakehouse_tables: Union[str, List[str]],
79
- workspace: Optional[str] = None,
83
+ workspace: Optional[str | UUID] = None,
80
84
  lakehouse: Optional[str] = None,
81
- lakehouse_workspace: Optional[str] = None,
85
+ lakehouse_workspace: Optional[str | UUID] = None,
82
86
  schema: str = "dbo",
83
87
  overwrite: bool = False,
84
88
  refresh: bool = True,
@@ -92,15 +96,15 @@ def generate_direct_lake_semantic_model(
92
96
  Name of the semantic model to be created.
93
97
  lakehouse_tables : str | List[str]
94
98
  The table(s) within the Fabric lakehouse to add to the semantic model. All columns from these tables will be added to the semantic model.
95
- workspace : str, default=None
96
- The Fabric workspace name in which the semantic model will reside.
99
+ workspace : str | uuid.UUID, default=None
100
+ The Fabric workspace name or ID in which the semantic model will reside.
97
101
  Defaults to None which resolves to the workspace of the attached lakehouse
98
102
  or if no lakehouse attached, resolves to the workspace of the notebook.
99
103
  lakehouse : str, default=None
100
104
  The lakehouse which stores the delta tables which will feed the Direct Lake semantic model.
101
105
  Defaults to None which resolves to the attached lakehouse.
102
- lakehouse_workspace : str, default=None
103
- The Fabric workspace in which the lakehouse resides.
106
+ lakehouse_workspace : str | uuid.UUID, default=None
107
+ The Fabric workspace name or ID in which the lakehouse resides.
104
108
  Defaults to None which resolves to the workspace of the attached lakehouse
105
109
  or if no lakehouse attached, resolves to the workspace of the notebook.
106
110
  schema : str, default="dbo"
@@ -122,7 +126,7 @@ def generate_direct_lake_semantic_model(
122
126
  if isinstance(lakehouse_tables, str):
123
127
  lakehouse_tables = [lakehouse_tables]
124
128
 
125
- workspace = fabric.resolve_workspace_name(workspace)
129
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
126
130
  if lakehouse_workspace is None:
127
131
  lakehouse_workspace = workspace
128
132
  if lakehouse is None:
@@ -139,23 +143,23 @@ def generate_direct_lake_semantic_model(
139
143
  for t in lakehouse_tables:
140
144
  if t not in dfLT["Table Name"].values:
141
145
  raise ValueError(
142
- f"{icons.red_dot} The '{t}' table does not exist as a delta table in the '{lakehouse}' within the '{workspace}' workspace."
146
+ f"{icons.red_dot} The '{t}' table does not exist as a delta table in the '{lakehouse}' within the '{workspace_name}' workspace."
143
147
  )
144
148
 
145
149
  dfLC = get_lakehouse_columns(lakehouse=lakehouse, workspace=lakehouse_workspace)
146
150
  expr = generate_shared_expression(
147
151
  item_name=lakehouse, item_type="Lakehouse", workspace=lakehouse_workspace
148
152
  )
149
- dfD = fabric.list_datasets(workspace=workspace)
153
+ dfD = fabric.list_datasets(workspace=workspace_id)
150
154
  dfD_filt = dfD[dfD["Dataset Name"] == dataset]
151
155
 
152
156
  if len(dfD_filt) > 0 and not overwrite:
153
157
  raise ValueError(
154
- f"{icons.red_dot} The '{dataset}' semantic model within the '{workspace}' workspace already exists. Overwrite is set to False so the new semantic model has not been created."
158
+ f"{icons.red_dot} The '{dataset}' semantic model within the '{workspace_name}' workspace already exists. Overwrite is set to False so the new semantic model has not been created."
155
159
  )
156
160
 
157
161
  create_blank_semantic_model(
158
- dataset=dataset, workspace=workspace, overwrite=overwrite
162
+ dataset=dataset, workspace=workspace_id, overwrite=overwrite
159
163
  )
160
164
 
161
165
  @retry(
@@ -164,7 +168,7 @@ def generate_direct_lake_semantic_model(
164
168
  )
165
169
  def dyn_connect():
166
170
  with connect_semantic_model(
167
- dataset=dataset, readonly=True, workspace=workspace
171
+ dataset=dataset, readonly=True, workspace=workspace_id
168
172
  ) as tom:
169
173
 
170
174
  tom.model
@@ -173,7 +177,7 @@ def generate_direct_lake_semantic_model(
173
177
 
174
178
  expression_name = "DatabaseQuery"
175
179
  with connect_semantic_model(
176
- dataset=dataset, workspace=workspace, readonly=False
180
+ dataset=dataset, workspace=workspace_id, readonly=False
177
181
  ) as tom:
178
182
  if not any(e.Name == expression_name for e in tom.model.Expressions):
179
183
  tom.add_expression(name=expression_name, expression=expr)
@@ -194,21 +198,21 @@ def generate_direct_lake_semantic_model(
194
198
  )
195
199
 
196
200
  if refresh:
197
- refresh_semantic_model(dataset=dataset, workspace=workspace)
201
+ refresh_semantic_model(dataset=dataset, workspace=workspace_id)
198
202
 
199
203
 
200
204
  def get_direct_lake_source(
201
- dataset: str, workspace: Optional[str] = None
205
+ dataset: str | UUID, workspace: Optional[str | UUID] = None
202
206
  ) -> Tuple[str, str, UUID, UUID]:
203
207
  """
204
208
  Obtains the source information for a direct lake semantic model.
205
209
 
206
210
  Parameters
207
211
  ----------
208
- dataset : str
209
- The name of the semantic model.
210
- workspace : str, default=None
211
- The Fabric workspace name.
212
+ dataset : str | uuid.UUID
213
+ The name or ID of the semantic model.
214
+ workspace : str | uuid.UUID, default=None
215
+ The Fabric workspace name or ID.
212
216
  Defaults to None which resolves to the workspace of the attached lakehouse
213
217
  or if no lakehouse attached, resolves to the workspace of the notebook.
214
218
 
@@ -220,8 +224,8 @@ def get_direct_lake_source(
220
224
  If the semantic model is not a Direct Lake semantic model, it will return None, None, None.
221
225
  """
222
226
 
223
- workspace = fabric.resolve_workspace_name(workspace)
224
- dataset_id = resolve_dataset_id(dataset, workspace)
227
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
228
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
225
229
  client = fabric.PowerBIRestClient()
226
230
  request_body = {
227
231
  "artifacts": [
@@ -3,16 +3,18 @@ from sempy_labs._helper_functions import (
3
3
  resolve_lakehouse_name,
4
4
  resolve_lakehouse_id,
5
5
  resolve_warehouse_id,
6
+ resolve_workspace_name_and_id,
6
7
  )
7
8
  from typing import Optional
8
9
  import sempy_labs._icons as icons
9
10
  from sempy.fabric.exceptions import FabricHTTPException
11
+ from uuid import UUID
10
12
 
11
13
 
12
14
  def generate_shared_expression(
13
15
  item_name: Optional[str] = None,
14
16
  item_type: str = "Lakehouse",
15
- workspace: Optional[str] = None,
17
+ workspace: Optional[str | UUID] = None,
16
18
  ) -> str:
17
19
  """
18
20
  Dynamically generates the M expression used by a Direct Lake model for a given lakehouse/warehouse.
@@ -24,8 +26,8 @@ def generate_shared_expression(
24
26
  Defaults to None which resolves to the lakehouse attached to the notebook.
25
27
  item_type : str, default="Lakehouse"
26
28
  The Fabric item name. Valid options: 'Lakehouse', 'Warehouse'.
27
- workspace : str, default=None
28
- The Fabric workspace used by the item.
29
+ workspace : str | uuid.UUID, default=None
30
+ The Fabric workspace name or ID used by the item.
29
31
  Defaults to None which resolves to the workspace of the attached lakehouse
30
32
  or if no lakehouse attached, resolves to the workspace of the notebook.
31
33
 
@@ -35,8 +37,7 @@ def generate_shared_expression(
35
37
  Shows the expression which can be used to connect a Direct Lake semantic model to its SQL Endpoint.
36
38
  """
37
39
 
38
- workspace = fabric.resolve_workspace_name(workspace)
39
- workspace_id = fabric.resolve_workspace_id(workspace)
40
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
40
41
  item_types = ["Lakehouse", "Warehouse"]
41
42
  item_type = item_type.capitalize()
42
43
  if item_type not in item_types:
@@ -46,11 +47,11 @@ def generate_shared_expression(
46
47
 
47
48
  if item_name is None:
48
49
  item_id = fabric.get_lakehouse_id()
49
- item_name = resolve_lakehouse_name(item_id, workspace)
50
+ item_name = resolve_lakehouse_name(item_id, workspace_id)
50
51
  elif item_name is not None and item_type == "Lakehouse":
51
- item_id = resolve_lakehouse_id(lakehouse=item_name, workspace=workspace)
52
+ item_id = resolve_lakehouse_id(lakehouse=item_name, workspace=workspace_id)
52
53
  elif item_type == "Warehouse":
53
- item_id = resolve_warehouse_id(warehouse=item_name, workspace=workspace)
54
+ item_id = resolve_warehouse_id(warehouse=item_name, workspace=workspace_id)
54
55
 
55
56
  client = fabric.FabricRestClient()
56
57
  item_type_rest = f"{item_type.lower()}s"
@@ -72,7 +73,7 @@ def generate_shared_expression(
72
73
 
73
74
  if provStatus == "InProgress":
74
75
  raise ValueError(
75
- f"{icons.red_dot} The SQL Endpoint for the '{item_name}' lakehouse within the '{workspace}' workspace has not yet been provisioned. Please wait until it has been provisioned."
76
+ f"{icons.red_dot} The SQL Endpoint for the '{item_name}' lakehouse within the '{workspace_name}' workspace has not yet been provisioned. Please wait until it has been provisioned."
76
77
  )
77
78
 
78
79
  start_expr = "let\n\tdatabase = "