semantic-link-labs 0.8.10__py3-none-any.whl → 0.8.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (73) hide show
  1. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.8.11.dist-info}/METADATA +3 -2
  2. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.8.11.dist-info}/RECORD +73 -72
  3. sempy_labs/__init__.py +6 -2
  4. sempy_labs/_clear_cache.py +39 -37
  5. sempy_labs/_connections.py +13 -13
  6. sempy_labs/_data_pipelines.py +20 -20
  7. sempy_labs/_dataflows.py +27 -28
  8. sempy_labs/_dax.py +41 -47
  9. sempy_labs/_environments.py +26 -23
  10. sempy_labs/_eventhouses.py +16 -15
  11. sempy_labs/_eventstreams.py +16 -15
  12. sempy_labs/_external_data_shares.py +18 -20
  13. sempy_labs/_gateways.py +14 -14
  14. sempy_labs/_generate_semantic_model.py +99 -62
  15. sempy_labs/_git.py +105 -43
  16. sempy_labs/_helper_functions.py +148 -131
  17. sempy_labs/_job_scheduler.py +92 -0
  18. sempy_labs/_kql_databases.py +16 -15
  19. sempy_labs/_kql_querysets.py +16 -15
  20. sempy_labs/_list_functions.py +114 -99
  21. sempy_labs/_managed_private_endpoints.py +19 -17
  22. sempy_labs/_mirrored_databases.py +51 -48
  23. sempy_labs/_mirrored_warehouses.py +5 -4
  24. sempy_labs/_ml_experiments.py +16 -15
  25. sempy_labs/_ml_models.py +15 -14
  26. sempy_labs/_model_bpa.py +3 -3
  27. sempy_labs/_model_dependencies.py +55 -29
  28. sempy_labs/_notebooks.py +27 -25
  29. sempy_labs/_one_lake_integration.py +23 -26
  30. sempy_labs/_query_scale_out.py +67 -64
  31. sempy_labs/_refresh_semantic_model.py +25 -26
  32. sempy_labs/_spark.py +33 -32
  33. sempy_labs/_sql.py +12 -9
  34. sempy_labs/_translations.py +10 -7
  35. sempy_labs/_vertipaq.py +34 -31
  36. sempy_labs/_warehouses.py +22 -21
  37. sempy_labs/_workspace_identity.py +11 -10
  38. sempy_labs/_workspaces.py +40 -33
  39. sempy_labs/admin/_basic_functions.py +10 -12
  40. sempy_labs/admin/_external_data_share.py +3 -3
  41. sempy_labs/admin/_items.py +4 -4
  42. sempy_labs/admin/_scanner.py +3 -1
  43. sempy_labs/directlake/_directlake_schema_compare.py +18 -14
  44. sempy_labs/directlake/_directlake_schema_sync.py +18 -12
  45. sempy_labs/directlake/_dl_helper.py +25 -26
  46. sempy_labs/directlake/_generate_shared_expression.py +10 -9
  47. sempy_labs/directlake/_get_directlake_lakehouse.py +16 -13
  48. sempy_labs/directlake/_get_shared_expression.py +4 -3
  49. sempy_labs/directlake/_guardrails.py +12 -6
  50. sempy_labs/directlake/_list_directlake_model_calc_tables.py +15 -9
  51. sempy_labs/directlake/_show_unsupported_directlake_objects.py +16 -10
  52. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +35 -31
  53. sempy_labs/directlake/_update_directlake_partition_entity.py +34 -31
  54. sempy_labs/directlake/_warm_cache.py +87 -65
  55. sempy_labs/lakehouse/_get_lakehouse_columns.py +10 -8
  56. sempy_labs/lakehouse/_get_lakehouse_tables.py +10 -9
  57. sempy_labs/lakehouse/_lakehouse.py +17 -13
  58. sempy_labs/lakehouse/_shortcuts.py +42 -23
  59. sempy_labs/migration/_create_pqt_file.py +16 -11
  60. sempy_labs/migration/_refresh_calc_tables.py +16 -10
  61. sempy_labs/report/_download_report.py +9 -8
  62. sempy_labs/report/_generate_report.py +40 -44
  63. sempy_labs/report/_paginated.py +9 -9
  64. sempy_labs/report/_report_bpa.py +13 -9
  65. sempy_labs/report/_report_functions.py +80 -91
  66. sempy_labs/report/_report_helper.py +8 -4
  67. sempy_labs/report/_report_list_functions.py +24 -13
  68. sempy_labs/report/_report_rebind.py +17 -16
  69. sempy_labs/report/_reportwrapper.py +41 -33
  70. sempy_labs/tom/_model.py +43 -6
  71. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.8.11.dist-info}/LICENSE +0 -0
  72. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.8.11.dist-info}/WHEEL +0 -0
  73. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.8.11.dist-info}/top_level.txt +0 -0
@@ -7,7 +7,6 @@ import sempy_labs._icons as icons
7
7
  from sempy._utils._log import log
8
8
  from sempy_labs._helper_functions import (
9
9
  retry,
10
- resolve_dataset_id,
11
10
  resolve_lakehouse_name,
12
11
  _convert_data_type,
13
12
  resolve_dataset_name_and_id,
@@ -16,17 +15,17 @@ from sempy_labs._helper_functions import (
16
15
 
17
16
 
18
17
  def check_fallback_reason(
19
- dataset: str | UUID, workspace: Optional[str] = None
18
+ dataset: str | UUID, workspace: Optional[str | UUID] = None
20
19
  ) -> pd.DataFrame:
21
20
  """
22
21
  Shows the reason a table in a Direct Lake semantic model would fallback to DirectQuery.
23
22
 
24
23
  Parameters
25
24
  ----------
26
- dataset : str | UUID
25
+ dataset : str | uuid.UUID
27
26
  Name or ID of the semantic model.
28
- workspace : str, default=None
29
- The Fabric workspace name.
27
+ workspace : str | uuid.UUID, default=None
28
+ The Fabric workspace name or ID.
30
29
  Defaults to None which resolves to the workspace of the attached lakehouse
31
30
  or if no lakehouse attached, resolves to the workspace of the notebook.
32
31
 
@@ -81,9 +80,9 @@ def check_fallback_reason(
81
80
  def generate_direct_lake_semantic_model(
82
81
  dataset: str,
83
82
  lakehouse_tables: Union[str, List[str]],
84
- workspace: Optional[str] = None,
83
+ workspace: Optional[str | UUID] = None,
85
84
  lakehouse: Optional[str] = None,
86
- lakehouse_workspace: Optional[str] = None,
85
+ lakehouse_workspace: Optional[str | UUID] = None,
87
86
  schema: str = "dbo",
88
87
  overwrite: bool = False,
89
88
  refresh: bool = True,
@@ -97,15 +96,15 @@ def generate_direct_lake_semantic_model(
97
96
  Name of the semantic model to be created.
98
97
  lakehouse_tables : str | List[str]
99
98
  The table(s) within the Fabric lakehouse to add to the semantic model. All columns from these tables will be added to the semantic model.
100
- workspace : str, default=None
101
- The Fabric workspace name in which the semantic model will reside.
99
+ workspace : str | uuid.UUID, default=None
100
+ The Fabric workspace name or ID in which the semantic model will reside.
102
101
  Defaults to None which resolves to the workspace of the attached lakehouse
103
102
  or if no lakehouse attached, resolves to the workspace of the notebook.
104
103
  lakehouse : str, default=None
105
104
  The lakehouse which stores the delta tables which will feed the Direct Lake semantic model.
106
105
  Defaults to None which resolves to the attached lakehouse.
107
- lakehouse_workspace : str, default=None
108
- The Fabric workspace in which the lakehouse resides.
106
+ lakehouse_workspace : str | uuid.UUID, default=None
107
+ The Fabric workspace name or ID in which the lakehouse resides.
109
108
  Defaults to None which resolves to the workspace of the attached lakehouse
110
109
  or if no lakehouse attached, resolves to the workspace of the notebook.
111
110
  schema : str, default="dbo"
@@ -127,7 +126,7 @@ def generate_direct_lake_semantic_model(
127
126
  if isinstance(lakehouse_tables, str):
128
127
  lakehouse_tables = [lakehouse_tables]
129
128
 
130
- workspace = fabric.resolve_workspace_name(workspace)
129
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
131
130
  if lakehouse_workspace is None:
132
131
  lakehouse_workspace = workspace
133
132
  if lakehouse is None:
@@ -144,23 +143,23 @@ def generate_direct_lake_semantic_model(
144
143
  for t in lakehouse_tables:
145
144
  if t not in dfLT["Table Name"].values:
146
145
  raise ValueError(
147
- f"{icons.red_dot} The '{t}' table does not exist as a delta table in the '{lakehouse}' within the '{workspace}' workspace."
146
+ f"{icons.red_dot} The '{t}' table does not exist as a delta table in the '{lakehouse}' within the '{workspace_name}' workspace."
148
147
  )
149
148
 
150
149
  dfLC = get_lakehouse_columns(lakehouse=lakehouse, workspace=lakehouse_workspace)
151
150
  expr = generate_shared_expression(
152
151
  item_name=lakehouse, item_type="Lakehouse", workspace=lakehouse_workspace
153
152
  )
154
- dfD = fabric.list_datasets(workspace=workspace)
153
+ dfD = fabric.list_datasets(workspace=workspace_id)
155
154
  dfD_filt = dfD[dfD["Dataset Name"] == dataset]
156
155
 
157
156
  if len(dfD_filt) > 0 and not overwrite:
158
157
  raise ValueError(
159
- f"{icons.red_dot} The '{dataset}' semantic model within the '{workspace}' workspace already exists. Overwrite is set to False so the new semantic model has not been created."
158
+ f"{icons.red_dot} The '{dataset}' semantic model within the '{workspace_name}' workspace already exists. Overwrite is set to False so the new semantic model has not been created."
160
159
  )
161
160
 
162
161
  create_blank_semantic_model(
163
- dataset=dataset, workspace=workspace, overwrite=overwrite
162
+ dataset=dataset, workspace=workspace_id, overwrite=overwrite
164
163
  )
165
164
 
166
165
  @retry(
@@ -169,7 +168,7 @@ def generate_direct_lake_semantic_model(
169
168
  )
170
169
  def dyn_connect():
171
170
  with connect_semantic_model(
172
- dataset=dataset, readonly=True, workspace=workspace
171
+ dataset=dataset, readonly=True, workspace=workspace_id
173
172
  ) as tom:
174
173
 
175
174
  tom.model
@@ -178,7 +177,7 @@ def generate_direct_lake_semantic_model(
178
177
 
179
178
  expression_name = "DatabaseQuery"
180
179
  with connect_semantic_model(
181
- dataset=dataset, workspace=workspace, readonly=False
180
+ dataset=dataset, workspace=workspace_id, readonly=False
182
181
  ) as tom:
183
182
  if not any(e.Name == expression_name for e in tom.model.Expressions):
184
183
  tom.add_expression(name=expression_name, expression=expr)
@@ -199,21 +198,21 @@ def generate_direct_lake_semantic_model(
199
198
  )
200
199
 
201
200
  if refresh:
202
- refresh_semantic_model(dataset=dataset, workspace=workspace)
201
+ refresh_semantic_model(dataset=dataset, workspace=workspace_id)
203
202
 
204
203
 
205
204
  def get_direct_lake_source(
206
- dataset: str, workspace: Optional[str] = None
205
+ dataset: str | UUID, workspace: Optional[str | UUID] = None
207
206
  ) -> Tuple[str, str, UUID, UUID]:
208
207
  """
209
208
  Obtains the source information for a direct lake semantic model.
210
209
 
211
210
  Parameters
212
211
  ----------
213
- dataset : str
214
- The name of the semantic model.
215
- workspace : str, default=None
216
- The Fabric workspace name.
212
+ dataset : str | uuid.UUID
213
+ The name or ID of the semantic model.
214
+ workspace : str | uuid.UUID, default=None
215
+ The Fabric workspace name or ID.
217
216
  Defaults to None which resolves to the workspace of the attached lakehouse
218
217
  or if no lakehouse attached, resolves to the workspace of the notebook.
219
218
 
@@ -225,8 +224,8 @@ def get_direct_lake_source(
225
224
  If the semantic model is not a Direct Lake semantic model, it will return None, None, None.
226
225
  """
227
226
 
228
- workspace = fabric.resolve_workspace_name(workspace)
229
- dataset_id = resolve_dataset_id(dataset, workspace)
227
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
228
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
230
229
  client = fabric.PowerBIRestClient()
231
230
  request_body = {
232
231
  "artifacts": [
@@ -3,16 +3,18 @@ from sempy_labs._helper_functions import (
3
3
  resolve_lakehouse_name,
4
4
  resolve_lakehouse_id,
5
5
  resolve_warehouse_id,
6
+ resolve_workspace_name_and_id,
6
7
  )
7
8
  from typing import Optional
8
9
  import sempy_labs._icons as icons
9
10
  from sempy.fabric.exceptions import FabricHTTPException
11
+ from uuid import UUID
10
12
 
11
13
 
12
14
  def generate_shared_expression(
13
15
  item_name: Optional[str] = None,
14
16
  item_type: str = "Lakehouse",
15
- workspace: Optional[str] = None,
17
+ workspace: Optional[str | UUID] = None,
16
18
  ) -> str:
17
19
  """
18
20
  Dynamically generates the M expression used by a Direct Lake model for a given lakehouse/warehouse.
@@ -24,8 +26,8 @@ def generate_shared_expression(
24
26
  Defaults to None which resolves to the lakehouse attached to the notebook.
25
27
  item_type : str, default="Lakehouse"
26
28
  The Fabric item name. Valid options: 'Lakehouse', 'Warehouse'.
27
- workspace : str, default=None
28
- The Fabric workspace used by the item.
29
+ workspace : str | uuid.UUID, default=None
30
+ The Fabric workspace name or ID used by the item.
29
31
  Defaults to None which resolves to the workspace of the attached lakehouse
30
32
  or if no lakehouse attached, resolves to the workspace of the notebook.
31
33
 
@@ -35,8 +37,7 @@ def generate_shared_expression(
35
37
  Shows the expression which can be used to connect a Direct Lake semantic model to its SQL Endpoint.
36
38
  """
37
39
 
38
- workspace = fabric.resolve_workspace_name(workspace)
39
- workspace_id = fabric.resolve_workspace_id(workspace)
40
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
40
41
  item_types = ["Lakehouse", "Warehouse"]
41
42
  item_type = item_type.capitalize()
42
43
  if item_type not in item_types:
@@ -46,11 +47,11 @@ def generate_shared_expression(
46
47
 
47
48
  if item_name is None:
48
49
  item_id = fabric.get_lakehouse_id()
49
- item_name = resolve_lakehouse_name(item_id, workspace)
50
+ item_name = resolve_lakehouse_name(item_id, workspace_id)
50
51
  elif item_name is not None and item_type == "Lakehouse":
51
- item_id = resolve_lakehouse_id(lakehouse=item_name, workspace=workspace)
52
+ item_id = resolve_lakehouse_id(lakehouse=item_name, workspace=workspace_id)
52
53
  elif item_type == "Warehouse":
53
- item_id = resolve_warehouse_id(warehouse=item_name, workspace=workspace)
54
+ item_id = resolve_warehouse_id(warehouse=item_name, workspace=workspace_id)
54
55
 
55
56
  client = fabric.FabricRestClient()
56
57
  item_type_rest = f"{item_type.lower()}s"
@@ -72,7 +73,7 @@ def generate_shared_expression(
72
73
 
73
74
  if provStatus == "InProgress":
74
75
  raise ValueError(
75
- f"{icons.red_dot} The SQL Endpoint for the '{item_name}' lakehouse within the '{workspace}' workspace has not yet been provisioned. Please wait until it has been provisioned."
76
+ f"{icons.red_dot} The SQL Endpoint for the '{item_name}' lakehouse within the '{workspace_name}' workspace has not yet been provisioned. Please wait until it has been provisioned."
76
77
  )
77
78
 
78
79
  start_expr = "let\n\tdatabase = "
@@ -3,46 +3,49 @@ from sempy_labs._helper_functions import (
3
3
  resolve_lakehouse_id,
4
4
  resolve_lakehouse_name,
5
5
  get_direct_lake_sql_endpoint,
6
+ resolve_workspace_name_and_id,
7
+ resolve_dataset_name_and_id,
6
8
  )
7
9
  from typing import Optional, Tuple
8
10
  from uuid import UUID
9
11
 
10
12
 
11
13
  def get_direct_lake_lakehouse(
12
- dataset: str,
13
- workspace: Optional[str] = None,
14
+ dataset: str | UUID,
15
+ workspace: Optional[str | UUID] = None,
14
16
  lakehouse: Optional[str] = None,
15
- lakehouse_workspace: Optional[str] = None,
17
+ lakehouse_workspace: Optional[str | UUID] = None,
16
18
  ) -> Tuple[str, UUID]:
17
19
  """
18
20
  Identifies the lakehouse used by a Direct Lake semantic model.
19
21
 
20
22
  Parameters
21
23
  ----------
22
- dataset : str
23
- Name of the semantic model.
24
- workspace : str, default=None
25
- The Fabric workspace name.
24
+ dataset : str | uuid.UUID
25
+ Name or ID of the semantic model.
26
+ workspace : str | uuid.UUID, default=None
27
+ The Fabric workspace name or ID.
26
28
  Defaults to None which resolves to the workspace of the attached lakehouse
27
29
  or if no lakehouse attached, resolves to the workspace of the notebook.
28
30
  lakehouse : str, default=None
29
31
  The Fabric lakehouse used by the Direct Lake semantic model.
30
32
  Defaults to None which resolves to the lakehouse attached to the notebook.
31
- lakehouse_workspace : str, default=None
32
- The Fabric workspace used by the lakehouse.
33
+ lakehouse_workspace : str | uuid.UUID, default=None
34
+ The Fabric workspace name or ID used by the lakehouse.
33
35
  Defaults to None which resolves to the workspace of the attached lakehouse
34
36
  or if no lakehouse attached, resolves to the workspace of the notebook.
35
37
 
36
38
  Returns
37
39
  -------
38
- str, uuid.UUID
40
+ Tuple[str, uuid.UUID]
39
41
  The lakehouse name and lakehouse ID.
40
42
  """
41
43
 
42
- workspace = fabric.resolve_workspace_name(workspace)
44
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
45
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
43
46
 
44
47
  if lakehouse_workspace is None:
45
- lakehouse_workspace = workspace
48
+ lakehouse_workspace = workspace_name
46
49
 
47
50
  if lakehouse is None:
48
51
  lakehouse_id = fabric.get_lakehouse_id()
@@ -56,7 +59,7 @@ def get_direct_lake_lakehouse(
56
59
  # f"{icons.red_dot} The '{dataset}' semantic model within the '{workspace}' workspace is not in Direct Lake mode."
57
60
  # )
58
61
 
59
- sqlEndpointId = get_direct_lake_sql_endpoint(dataset, workspace)
62
+ sqlEndpointId = get_direct_lake_sql_endpoint(dataset_id, workspace_id)
60
63
 
61
64
  dfI = fabric.list_items(workspace=lakehouse_workspace, type="SQLEndpoint")
62
65
  dfI_filt = dfI[dfI["Id"] == sqlEndpointId]
@@ -1,8 +1,9 @@
1
1
  from typing import Optional
2
+ from uuid import UUID
2
3
 
3
4
 
4
5
  def get_shared_expression(
5
- lakehouse: Optional[str] = None, workspace: Optional[str] = None
6
+ lakehouse: Optional[str] = None, workspace: Optional[str | UUID] = None
6
7
  ) -> str:
7
8
  """
8
9
  Dynamically generates the M expression used by a Direct Lake model for a given lakehouse.
@@ -12,8 +13,8 @@ def get_shared_expression(
12
13
  lakehouse : str, default=None
13
14
  The Fabric lakehouse used by the Direct Lake semantic model.
14
15
  Defaults to None which resolves to the lakehouse attached to the notebook.
15
- workspace : str, default=None
16
- The Fabric workspace used by the lakehouse.
16
+ workspace : str | uuid.UUID, default=None
17
+ The Fabric workspace name or ID used by the lakehouse.
17
18
  Defaults to None which resolves to the workspace of the attached lakehouse
18
19
  or if no lakehouse attached, resolves to the workspace of the notebook.
19
20
 
@@ -2,6 +2,10 @@ import sempy.fabric as fabric
2
2
  import pandas as pd
3
3
  from typing import Optional
4
4
  import sempy_labs._icons as icons
5
+ from uuid import UUID
6
+ from sempy_labs._helper_functions import (
7
+ resolve_workspace_name_and_id,
8
+ )
5
9
 
6
10
 
7
11
  def get_direct_lake_guardrails() -> pd.DataFrame:
@@ -28,14 +32,14 @@ def get_direct_lake_guardrails() -> pd.DataFrame:
28
32
  return df
29
33
 
30
34
 
31
- def get_sku_size(workspace: Optional[str] = None) -> str:
35
+ def get_sku_size(workspace: Optional[str | UUID] = None) -> str:
32
36
  """
33
37
  Shows the SKU size for a workspace.
34
38
 
35
39
  Parameters
36
40
  ----------
37
- workspace : str, default=None
38
- The Fabric workspace name.
41
+ workspace : str | uuid.UUID, default=None
42
+ The Fabric workspace name or ID.
39
43
  Defaults to None which resolves to the workspace of the attached lakehouse
40
44
  or if no lakehouse attached, resolves to the workspace of the notebook.
41
45
 
@@ -45,12 +49,14 @@ def get_sku_size(workspace: Optional[str] = None) -> str:
45
49
  The SKU size for a workspace.
46
50
  """
47
51
 
48
- workspace = fabric.resolve_workspace_name(workspace)
52
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
49
53
 
50
- dfW = fabric.list_workspaces(filter=f"name eq '{workspace}'")
54
+ dfW = fabric.list_workspaces(filter=f"id eq '{workspace_id}'")
51
55
 
52
56
  if len(dfW) == 0:
53
- raise ValueError(f"{icons.red_dot} The '{workspace}' is not a valid workspace.")
57
+ raise ValueError(
58
+ f"{icons.red_dot} The '{workspace_name}' is not a valid workspace."
59
+ )
54
60
 
55
61
  capacity_id = dfW["Capacity Id"].iloc[0]
56
62
  dfC = fabric.list_capacities()
@@ -5,21 +5,26 @@ from sempy_labs.tom import connect_semantic_model
5
5
  from typing import Optional
6
6
  from sempy._utils._log import log
7
7
  import sempy_labs._icons as icons
8
+ from uuid import UUID
9
+ from sempy_labs._helper_functions import (
10
+ resolve_dataset_name_and_id,
11
+ resolve_workspace_name_and_id,
12
+ )
8
13
 
9
14
 
10
15
  @log
11
16
  def list_direct_lake_model_calc_tables(
12
- dataset: str, workspace: Optional[str] = None
17
+ dataset: str | UUID, workspace: Optional[str | UUID] = None
13
18
  ) -> pd.DataFrame:
14
19
  """
15
20
  Shows the calculated tables and their respective DAX expression for a Direct Lake model (which has been migrated from import/DirectQuery).
16
21
 
17
22
  Parameters
18
23
  ----------
19
- dataset : str
20
- Name of the semantic model.
21
- workspace : str, default=None
22
- The Fabric workspace name.
24
+ dataset : str | uuid.UUID
25
+ Name or ID of the semantic model.
26
+ workspace : str | uuid.UUID, default=None
27
+ The Fabric workspace name or ID.
23
28
  Defaults to None which resolves to the workspace of the attached lakehouse
24
29
  or if no lakehouse attached, resolves to the workspace of the notebook.
25
30
 
@@ -29,12 +34,13 @@ def list_direct_lake_model_calc_tables(
29
34
  A pandas dataframe showing the calculated tables which were migrated to Direct Lake and whose DAX expressions are stored as model annotations.
30
35
  """
31
36
 
32
- workspace = fabric.resolve_workspace_name(workspace)
37
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
38
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
33
39
 
34
40
  df = pd.DataFrame(columns=["Table Name", "Source Expression"])
35
41
 
36
42
  with connect_semantic_model(
37
- dataset=dataset, readonly=True, workspace=workspace
43
+ dataset=dataset_id, readonly=True, workspace=workspace_id
38
44
  ) as tom:
39
45
 
40
46
  is_direct_lake = tom.is_direct_lake()
@@ -44,8 +50,8 @@ def list_direct_lake_model_calc_tables(
44
50
  f"{icons.red_dot} The '{dataset}' semantic model is not in Direct Lake mode."
45
51
  )
46
52
  else:
47
- dfA = fabric.list_annotations(dataset=dataset, workspace=workspace)
48
- dfT = list_tables(dataset, workspace)
53
+ dfA = fabric.list_annotations(dataset=dataset_id, workspace=workspace_id)
54
+ dfT = list_tables(dataset_id, workspace_id)
49
55
  dfA_filt = dfA[
50
56
  (dfA["Object Type"] == "Model")
51
57
  & (dfA["Annotation Name"].isin(dfT["Name"]))
@@ -1,13 +1,18 @@
1
1
  import sempy.fabric as fabric
2
2
  import pandas as pd
3
- from sempy_labs._helper_functions import format_dax_object_name
3
+ from sempy_labs._helper_functions import (
4
+ format_dax_object_name,
5
+ resolve_dataset_name_and_id,
6
+ resolve_workspace_name_and_id,
7
+ )
4
8
  from typing import Optional, Tuple
5
9
  from sempy._utils._log import log
10
+ from uuid import UUID
6
11
 
7
12
 
8
13
  @log
9
14
  def show_unsupported_direct_lake_objects(
10
- dataset: str, workspace: Optional[str] = None
15
+ dataset: str | UUID, workspace: Optional[str | UUID] = None
11
16
  ) -> Tuple[pd.DataFrame, pd.DataFrame, pd.DataFrame]:
12
17
  """
13
18
  Returns a list of a semantic model's objects which are not supported by Direct Lake based on
@@ -15,10 +20,10 @@ def show_unsupported_direct_lake_objects(
15
20
 
16
21
  Parameters
17
22
  ----------
18
- dataset : str
19
- Name of the semantic model.
20
- workspace : str, default=None
21
- The Fabric workspace name.
23
+ dataset : str | uuid.UUID
24
+ Name or ID of the semantic model.
25
+ workspace : str | uuid.UUID, default=None
26
+ The Fabric workspace name or ID.
22
27
  Defaults to None which resolves to the workspace of the attached lakehouse
23
28
  or if no lakehouse attached, resolves to the workspace of the notebook.
24
29
 
@@ -30,11 +35,12 @@ def show_unsupported_direct_lake_objects(
30
35
 
31
36
  pd.options.mode.chained_assignment = None
32
37
 
33
- workspace = fabric.resolve_workspace_name(workspace)
38
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
39
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
34
40
 
35
- dfT = fabric.list_tables(dataset=dataset, workspace=workspace)
36
- dfC = fabric.list_columns(dataset=dataset, workspace=workspace)
37
- dfR = fabric.list_relationships(dataset=dataset, workspace=workspace)
41
+ dfT = fabric.list_tables(dataset=dataset_id, workspace=workspace_id)
42
+ dfC = fabric.list_columns(dataset=dataset_id, workspace=workspace_id)
43
+ dfR = fabric.list_relationships(dataset=dataset_id, workspace=workspace_id)
38
44
 
39
45
  # Calc tables
40
46
  dfT_filt = dfT[dfT["Type"] == "Calculated Table"]
@@ -2,42 +2,46 @@ import sempy.fabric as fabric
2
2
  from sempy_labs.directlake._generate_shared_expression import generate_shared_expression
3
3
  from sempy_labs._helper_functions import (
4
4
  resolve_lakehouse_name,
5
+ resolve_dataset_name_and_id,
6
+ resolve_workspace_name_and_id,
5
7
  )
6
8
  from sempy_labs.tom import connect_semantic_model
7
9
  from typing import Optional
8
10
  import sempy_labs._icons as icons
11
+ from uuid import UUID
9
12
 
10
13
 
11
14
  def update_direct_lake_model_lakehouse_connection(
12
- dataset: str,
13
- workspace: Optional[str] = None,
15
+ dataset: str | UUID,
16
+ workspace: Optional[str | UUID] = None,
14
17
  lakehouse: Optional[str] = None,
15
- lakehouse_workspace: Optional[str] = None,
18
+ lakehouse_workspace: Optional[str | UUID] = None,
16
19
  ):
17
20
  """
18
21
  Remaps a Direct Lake semantic model's SQL Endpoint connection to a new lakehouse.
19
22
 
20
23
  Parameters
21
24
  ----------
22
- dataset : str
23
- Name of the semantic model.
24
- workspace : str, default=None
25
- The Fabric workspace name in which the semantic model exists.
25
+ dataset : str | UUID
26
+ Name or ID of the semantic model.
27
+ workspace : str | UUID, default=None
28
+ The Fabric workspace name or ID in which the semantic model exists.
26
29
  Defaults to None which resolves to the workspace of the attached lakehouse
27
30
  or if no lakehouse attached, resolves to the workspace of the notebook.
28
31
  lakehouse : str, default=None
29
32
  The Fabric lakehouse used by the Direct Lake semantic model.
30
33
  Defaults to None which resolves to the lakehouse attached to the notebook.
31
- lakehouse_workspace : str, default=None
32
- The Fabric workspace used by the lakehouse.
34
+ lakehouse_workspace : str | UUID, default=None
35
+ The Fabric workspace name or ID used by the lakehouse.
33
36
  Defaults to None which resolves to the workspace of the attached lakehouse
34
37
  or if no lakehouse attached, resolves to the workspace of the notebook.
35
38
  """
36
39
 
37
- workspace = fabric.resolve_workspace_name(workspace)
40
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
41
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
38
42
 
39
43
  if lakehouse_workspace is None:
40
- lakehouse_workspace = workspace
44
+ lakehouse_workspace = workspace_name
41
45
 
42
46
  if lakehouse is None:
43
47
  lakehouse_id = fabric.get_lakehouse_id()
@@ -50,7 +54,7 @@ def update_direct_lake_model_lakehouse_connection(
50
54
  if len(dfI_filt) == 0:
51
55
  raise ValueError(
52
56
  f"{icons.red_dot} The '{lakehouse}' lakehouse does not exist within the '{lakehouse_workspace}' workspace. "
53
- f"Therefore it cannot be used to support the '{dataset}' semantic model within the '{workspace}' workspace."
57
+ f"Therefore it cannot be used to support the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
54
58
  )
55
59
 
56
60
  icons.sll_tags.append("UpdateDLConnection")
@@ -60,37 +64,37 @@ def update_direct_lake_model_lakehouse_connection(
60
64
  )
61
65
 
62
66
  with connect_semantic_model(
63
- dataset=dataset, readonly=False, workspace=workspace
67
+ dataset=dataset_id, readonly=False, workspace=workspace_id
64
68
  ) as tom:
65
69
 
66
70
  if not tom.is_direct_lake():
67
71
  raise ValueError(
68
- f"{icons.red_dot} The '{dataset}' semantic model is not in Direct Lake. This function is only applicable to Direct Lake semantic models."
72
+ f"{icons.red_dot} The '{dataset_name}' semantic model is not in Direct Lake. This function is only applicable to Direct Lake semantic models."
69
73
  )
70
74
 
71
75
  tom.model.Expressions["DatabaseQuery"].Expression = shEx
72
76
 
73
77
  print(
74
- f"{icons.green_dot} The expression in the '{dataset}' semantic model has been updated to point to the '{lakehouse}' lakehouse in the '{lakehouse_workspace}' workspace."
78
+ f"{icons.green_dot} The expression in the '{dataset_name}' semantic model has been updated to point to the '{lakehouse}' lakehouse in the '{lakehouse_workspace}' workspace."
75
79
  )
76
80
 
77
81
 
78
82
  def update_direct_lake_model_connection(
79
- dataset: str,
80
- workspace: Optional[str] = None,
83
+ dataset: str | UUID,
84
+ workspace: Optional[str | UUID] = None,
81
85
  source: Optional[str] = None,
82
- source_type: Optional[str] = "Lakehouse",
83
- source_workspace: Optional[str] = None,
86
+ source_type: str = "Lakehouse",
87
+ source_workspace: Optional[str | UUID] = None,
84
88
  ):
85
89
  """
86
90
  Remaps a Direct Lake semantic model's SQL Endpoint connection to a new lakehouse/warehouse.
87
91
 
88
92
  Parameters
89
93
  ----------
90
- dataset : str
91
- Name of the semantic model.
92
- workspace : str, default=None
93
- The Fabric workspace name in which the semantic model exists.
94
+ dataset : str | UUID
95
+ Name or ID of the semantic model.
96
+ workspace : str | UUID, default=None
97
+ The Fabric workspace name or ID in which the semantic model exists.
94
98
  Defaults to None which resolves to the workspace of the attached lakehouse
95
99
  or if no lakehouse attached, resolves to the workspace of the notebook.
96
100
  source : str, default=None
@@ -98,14 +102,14 @@ def update_direct_lake_model_connection(
98
102
  Defaults to None which resolves to the lakehouse attached to the notebook.
99
103
  source_type : str, default="Lakehouse"
100
104
  The type of source for the Direct Lake semantic model. Valid options: "Lakehouse", "Warehouse".
101
- source_workspace : str, default=None
102
- The Fabric workspace used by the lakehouse/warehouse.
105
+ source_workspace : str | UUID, default=None
106
+ The Fabric workspace name or ID used by the lakehouse/warehouse.
103
107
  Defaults to None which resolves to the workspace of the attached lakehouse
104
108
  or if no lakehouse attached, resolves to the workspace of the notebook.
105
109
  """
106
110
 
107
- if workspace is None:
108
- workspace = fabric.resolve_workspace_name(workspace)
111
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
112
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
109
113
 
110
114
  source_type = source_type.capitalize()
111
115
 
@@ -115,7 +119,7 @@ def update_direct_lake_model_connection(
115
119
  )
116
120
 
117
121
  if source_workspace is None:
118
- source_workspace = workspace
122
+ source_workspace = workspace_name
119
123
 
120
124
  if source is None:
121
125
  source_id = fabric.get_lakehouse_id()
@@ -135,16 +139,16 @@ def update_direct_lake_model_connection(
135
139
  )
136
140
 
137
141
  with connect_semantic_model(
138
- dataset=dataset, readonly=False, workspace=workspace
142
+ dataset=dataset_id, readonly=False, workspace=workspace_id
139
143
  ) as tom:
140
144
 
141
145
  if not tom.is_direct_lake():
142
146
  raise ValueError(
143
- f"{icons.red_dot} The '{dataset}' semantic model is not in Direct Lake. This function is only applicable to Direct Lake semantic models."
147
+ f"{icons.red_dot} The '{dataset_name}' semantic model within the '{workspace_name}' workspace is not in Direct Lake. This function is only applicable to Direct Lake semantic models."
144
148
  )
145
149
 
146
150
  tom.model.Expressions["DatabaseQuery"].Expression = shEx
147
151
 
148
152
  print(
149
- f"{icons.green_dot} The expression in the '{dataset}' semantic model has been updated to point to the '{source}' {source_type.lower()} in the '{source_workspace}' workspace."
153
+ f"{icons.green_dot} The expression in the '{dataset_name}' semantic model within the '{workspace_name}' workspace has been updated to point to the '{source}' {source_type.lower()} in the '{source_workspace}' workspace."
150
154
  )