semantic-link-labs 0.7.4__py3-none-any.whl → 0.8.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (59) hide show
  1. {semantic_link_labs-0.7.4.dist-info → semantic_link_labs-0.8.1.dist-info}/METADATA +43 -7
  2. {semantic_link_labs-0.7.4.dist-info → semantic_link_labs-0.8.1.dist-info}/RECORD +59 -40
  3. {semantic_link_labs-0.7.4.dist-info → semantic_link_labs-0.8.1.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +116 -58
  5. sempy_labs/_ai.py +0 -2
  6. sempy_labs/_capacities.py +39 -3
  7. sempy_labs/_capacity_migration.py +623 -0
  8. sempy_labs/_clear_cache.py +8 -8
  9. sempy_labs/_connections.py +15 -13
  10. sempy_labs/_data_pipelines.py +118 -0
  11. sempy_labs/_documentation.py +144 -0
  12. sempy_labs/_eventhouses.py +118 -0
  13. sempy_labs/_eventstreams.py +118 -0
  14. sempy_labs/_generate_semantic_model.py +3 -3
  15. sempy_labs/_git.py +23 -24
  16. sempy_labs/_helper_functions.py +140 -47
  17. sempy_labs/_icons.py +40 -0
  18. sempy_labs/_kql_databases.py +134 -0
  19. sempy_labs/_kql_querysets.py +124 -0
  20. sempy_labs/_list_functions.py +218 -421
  21. sempy_labs/_mirrored_warehouses.py +50 -0
  22. sempy_labs/_ml_experiments.py +122 -0
  23. sempy_labs/_ml_models.py +120 -0
  24. sempy_labs/_model_auto_build.py +0 -4
  25. sempy_labs/_model_bpa.py +10 -12
  26. sempy_labs/_model_bpa_bulk.py +8 -7
  27. sempy_labs/_model_dependencies.py +26 -18
  28. sempy_labs/_notebooks.py +5 -16
  29. sempy_labs/_query_scale_out.py +6 -5
  30. sempy_labs/_refresh_semantic_model.py +7 -19
  31. sempy_labs/_spark.py +40 -45
  32. sempy_labs/_sql.py +60 -15
  33. sempy_labs/_vertipaq.py +25 -25
  34. sempy_labs/_warehouses.py +132 -0
  35. sempy_labs/_workspaces.py +0 -3
  36. sempy_labs/admin/__init__.py +53 -0
  37. sempy_labs/admin/_basic_functions.py +888 -0
  38. sempy_labs/admin/_domains.py +411 -0
  39. sempy_labs/directlake/_directlake_schema_sync.py +1 -1
  40. sempy_labs/directlake/_dl_helper.py +32 -16
  41. sempy_labs/directlake/_generate_shared_expression.py +11 -14
  42. sempy_labs/directlake/_guardrails.py +7 -7
  43. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +14 -24
  44. sempy_labs/directlake/_update_directlake_partition_entity.py +1 -1
  45. sempy_labs/directlake/_warm_cache.py +1 -1
  46. sempy_labs/lakehouse/_get_lakehouse_tables.py +3 -3
  47. sempy_labs/lakehouse/_lakehouse.py +3 -2
  48. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +5 -0
  49. sempy_labs/report/__init__.py +9 -6
  50. sempy_labs/report/_generate_report.py +1 -1
  51. sempy_labs/report/_report_bpa.py +369 -0
  52. sempy_labs/report/_report_bpa_rules.py +113 -0
  53. sempy_labs/report/_report_helper.py +254 -0
  54. sempy_labs/report/_report_list_functions.py +95 -0
  55. sempy_labs/report/_report_rebind.py +0 -4
  56. sempy_labs/report/_reportwrapper.py +2037 -0
  57. sempy_labs/tom/_model.py +333 -22
  58. {semantic_link_labs-0.7.4.dist-info → semantic_link_labs-0.8.1.dist-info}/LICENSE +0 -0
  59. {semantic_link_labs-0.7.4.dist-info → semantic_link_labs-0.8.1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,411 @@
1
+ import sempy.fabric as fabric
2
+ from typing import Optional, List
3
+ import sempy_labs._icons as icons
4
+ from sempy_labs._helper_functions import lro
5
+ from sempy.fabric.exceptions import FabricHTTPException
6
+ import pandas as pd
7
+ from uuid import UUID
8
+
9
+
10
+ def resolve_domain_id(domain_name: str) -> UUID:
11
+ """
12
+ Obtains the domain Id for a given domain name.
13
+
14
+ Parameters
15
+ ----------
16
+ domain_name : str
17
+ The domain name
18
+
19
+ Returns
20
+ -------
21
+ UUID
22
+ The domain Id.
23
+ """
24
+
25
+ dfL = list_domains()
26
+ dfL_filt = dfL[dfL["Domain Name"] == domain_name]
27
+ if len(dfL_filt) == 0:
28
+ raise ValueError(f"{icons.red_dot} '{domain_name}' is not a valid domain name.")
29
+
30
+ return dfL_filt["Domain ID"].iloc[0]
31
+
32
+
33
+ def list_domains(non_empty_only: bool = False) -> pd.DataFrame:
34
+ """
35
+ Shows a list of domains.
36
+
37
+ Parameters
38
+ ----------
39
+ non_empty_only : bool, default=False
40
+ When True, only return domains that have at least one workspace containing an item.
41
+ Defaults to False.
42
+
43
+ Returns
44
+ -------
45
+ pandas.DataFrame
46
+ A pandas dataframe showing a list of the domains.
47
+ """
48
+
49
+ # https://learn.microsoft.com/en-us/rest/api/fabric/admin/domains/list-domains?tabs=HTTP
50
+
51
+ df = pd.DataFrame(
52
+ columns=[
53
+ "Domain ID",
54
+ "Domain Name",
55
+ "Description",
56
+ "Parent Domain ID",
57
+ "Contributors Scope",
58
+ ]
59
+ )
60
+
61
+ client = fabric.FabricRestClient()
62
+ url = "/v1/admin/domains"
63
+ if non_empty_only:
64
+ url = f"{url}?nonEmptyOnly=True"
65
+ response = client.get(url)
66
+
67
+ if response.status_code != 200:
68
+ raise FabricHTTPException(response)
69
+
70
+ for v in response.json().get("domains", []):
71
+ new_data = {
72
+ "Domain ID": v.get("id"),
73
+ "Domain Name": v.get("displayName"),
74
+ "Description": v.get("description"),
75
+ "Parent Domain ID": v.get("parentDomainId"),
76
+ "Contributors Scope": v.get("contributorsScope"),
77
+ }
78
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
79
+
80
+ return df
81
+
82
+
83
+ def list_domain_workspaces(domain_name: str) -> pd.DataFrame:
84
+ """
85
+ Shows a list of workspaces within the domain.
86
+
87
+ Parameters
88
+ ----------
89
+ domain_name : str
90
+ The domain name.
91
+
92
+ Returns
93
+ -------
94
+ pandas.DataFrame
95
+ A pandas dataframe showing a list of workspaces within the domain.
96
+ """
97
+
98
+ # https://learn.microsoft.com/en-us/rest/api/fabric/admin/domains/list-domain-workspaces?tabs=HTTP
99
+
100
+ domain_id = resolve_domain_id(domain_name)
101
+
102
+ df = pd.DataFrame(columns=["Workspace ID", "Workspace Name"])
103
+
104
+ client = fabric.FabricRestClient()
105
+ response = client.get(f"/v1/admin/domains/{domain_id}/workspaces")
106
+
107
+ if response.status_code != 200:
108
+ raise FabricHTTPException(response)
109
+
110
+ for v in response.json().get("value", []):
111
+ new_data = {
112
+ "Workspace ID": v.get("id"),
113
+ "Workspace Name": v.get("displayName"),
114
+ }
115
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
116
+
117
+ return df
118
+
119
+
120
+ def create_domain(
121
+ domain_name: str,
122
+ description: Optional[str] = None,
123
+ parent_domain_name: Optional[str] = None,
124
+ ):
125
+ """
126
+ Creates a new domain.
127
+
128
+ Parameters
129
+ ----------
130
+ domain_name : str
131
+ The domain name.
132
+ description : str, default=None
133
+ The domain description.
134
+ parent_domain_name : str, default=None
135
+ The parent domain name.
136
+ """
137
+
138
+ # https://learn.microsoft.com/en-us/rest/api/fabric/admin/domains/create-domain?tabs=HTTP
139
+
140
+ if parent_domain_name is not None:
141
+ parent_domain_id = resolve_domain_id(parent_domain_name)
142
+
143
+ payload = {}
144
+ payload["displayName"] = domain_name
145
+ if description is not None:
146
+ payload["description"] = description
147
+ if parent_domain_name is not None:
148
+ payload["parentDomainId"] = parent_domain_id
149
+
150
+ client = fabric.FabricRestClient()
151
+ response = client.post("/v1/admin/domains", json=payload)
152
+
153
+ if response.status_code != 201:
154
+ raise FabricHTTPException(response)
155
+
156
+ print(f"{icons.green_dot} The '{domain_name}' domain has been created.")
157
+
158
+
159
+ def delete_domain(domain_name: str):
160
+ """
161
+ Deletes a domain.
162
+
163
+ Parameters
164
+ ----------
165
+ domain_name : str
166
+ The domain name.
167
+ """
168
+
169
+ # https://learn.microsoft.com/en-us/rest/api/fabric/admin/domains/delete-domain?tabs=HTTP
170
+
171
+ domain_id = resolve_domain_id(domain_name)
172
+
173
+ client = fabric.FabricRestClient()
174
+ response = client.delete(f"/v1/admin/domains/{domain_id}")
175
+
176
+ if response.status_code != 200:
177
+ raise FabricHTTPException(response)
178
+
179
+ print(f"{icons.green_dot} The '{domain_name}' domain has been deleted.")
180
+
181
+
182
+ def update_domain(
183
+ domain_name: str,
184
+ description: Optional[str] = None,
185
+ contributors_scope: Optional[str] = None,
186
+ ):
187
+ """
188
+ Updates a domain's properties.
189
+
190
+ Parameters
191
+ ----------
192
+ domain_name : str
193
+ The domain name.
194
+ description : str, default=None
195
+ The domain description.
196
+ contributors_scope : str, default=None
197
+ The domain `contributor scope <https://learn.microsoft.com/rest/api/fabric/admin/domains/update-domain?tabs=HTTP#contributorsscopetype>`_.
198
+ """
199
+
200
+ # https://learn.microsoft.com/en-us/rest/api/fabric/admin/domains/update-domain?tabs=HTTP
201
+
202
+ contributors_scopes = ["AdminsOnly", "AllTenant", "SpecificUsersAndGroups"]
203
+
204
+ if contributors_scope not in contributors_scopes:
205
+ raise ValueError(
206
+ f"{icons.red_dot} Invalid contributors scope. Valid options: {contributors_scopes}."
207
+ )
208
+
209
+ domain_id = resolve_domain_id(domain_name)
210
+
211
+ payload = {}
212
+ payload["displayName"] = domain_name
213
+
214
+ if description is not None:
215
+ payload["description"] = description
216
+ if contributors_scope is not None:
217
+ payload["contributorsScope"] = contributors_scope
218
+
219
+ client = fabric.FabricRestClient()
220
+ response = client.patch(f"/v1/admin/domains/{domain_id}", json=payload)
221
+
222
+ if response != 200:
223
+ raise FabricHTTPException(response)
224
+
225
+ print(f"{icons.green_dot} The '{domain_name}' domain has been updated.")
226
+
227
+
228
+ def assign_domain_workspaces_by_capacities(
229
+ domain_name: str, capacity_names: str | List[str]
230
+ ):
231
+ """
232
+ Assigns all workspaces that reside on the specified capacities to the specified domain.
233
+
234
+ Parameters
235
+ ----------
236
+ domain_name : str
237
+ The domain name.
238
+ capacity_names : str | List[str]
239
+ The capacity names.
240
+ """
241
+
242
+ # https://learn.microsoft.com/en-us/rest/api/fabric/admin/domains/assign-domain-workspaces-by-capacities?tabs=HTTP
243
+
244
+ from sempy_labs.admin import list_capacities
245
+
246
+ domain_id = resolve_domain_id(domain_name)
247
+
248
+ if isinstance(capacity_names, str):
249
+ capacity_names = [capacity_names]
250
+
251
+ dfC = list_capacities()
252
+
253
+ # Check for invalid capacities
254
+ invalid_capacities = [
255
+ name for name in capacity_names if name not in dfC["Display Name"].values
256
+ ]
257
+
258
+ if len(invalid_capacities) == 1:
259
+ raise ValueError(
260
+ f"{icons.red_dot} The {invalid_capacities} capacity is not valid."
261
+ )
262
+ elif len(invalid_capacities) > 1:
263
+ raise ValueError(
264
+ f"{icons.red_dot} The {invalid_capacities} capacities are not valid."
265
+ )
266
+
267
+ # Get list of capacity Ids for the payload
268
+ dfC_filt = dfC[dfC["Display Name"].isin(capacity_names)]
269
+ capacity_list = list(dfC_filt["Id"].str.upper())
270
+
271
+ payload = {"capacitiesIds": capacity_list}
272
+
273
+ client = fabric.FabricRestClient()
274
+ response = client.post(
275
+ f"/v1/admin/domains/{domain_id}/assignWorkspacesByCapacities",
276
+ json=payload,
277
+ )
278
+
279
+ lro(client, response)
280
+
281
+ print(
282
+ f"{icons.green_dot} The workspaces in the {capacity_names} capacities have been assigned to the '{domain_name}' domain."
283
+ )
284
+
285
+
286
+ def assign_domain_workspaces(domain_name: str, workspace_names: str | List[str]):
287
+ """
288
+ Assigns workspaces to the specified domain by workspace.
289
+
290
+ Parameters
291
+ ----------
292
+ domain_name : str
293
+ The domain name.
294
+ workspace_names : str | List[str]
295
+ The Fabric workspace(s).
296
+ """
297
+
298
+ # https://learn.microsoft.com/en-us/rest/api/fabric/admin/domains/assign-domain-workspaces-by-ids?tabs=HTTP
299
+
300
+ domain_id = resolve_domain_id(domain_name=domain_name)
301
+
302
+ if isinstance(workspace_names, str):
303
+ workspace_names = [workspace_names]
304
+
305
+ dfW = fabric.list_workspaces()
306
+
307
+ # Check for invalid capacities
308
+ invalid_workspaces = [
309
+ name for name in workspace_names if name not in dfW["Name"].values
310
+ ]
311
+
312
+ if len(invalid_workspaces) == 1:
313
+ raise ValueError(
314
+ f"{icons.red_dot} The {invalid_workspaces} workspace is not valid."
315
+ )
316
+ elif len(invalid_workspaces) > 1:
317
+ raise ValueError(
318
+ f"{icons.red_dot} The {invalid_workspaces} workspaces are not valid."
319
+ )
320
+
321
+ dfW_filt = dfW[dfW["Name"].isin(workspace_names)]
322
+ workspace_list = list(dfW_filt["Id"])
323
+
324
+ payload = {"workspacesIds": workspace_list}
325
+
326
+ client = fabric.FabricRestClient()
327
+ response = client.post(
328
+ f"/v1/admin/domains/{domain_id}/assignWorkspaces",
329
+ json=payload,
330
+ )
331
+
332
+ lro(client, response)
333
+
334
+ print(
335
+ f"{icons.green_dot} The {workspace_names} workspaces have been assigned to the '{domain_name}' domain."
336
+ )
337
+
338
+
339
+ def unassign_all_domain_workspaces(domain_name: str):
340
+ """
341
+ Unassigns all workspaces from the specified domain.
342
+
343
+ Parameters
344
+ ----------
345
+ domain_name : str
346
+ The domain name.
347
+ """
348
+
349
+ # https://learn.microsoft.com/en-us/rest/api/fabric/admin/domains/unassign-all-domain-workspaces?tabs=HTTP
350
+
351
+ domain_id = resolve_domain_id(domain_name=domain_name)
352
+
353
+ client = fabric.FabricRestClient()
354
+ response = client.post(f"/v1/admin/domains/{domain_id}/unassignAllWorkspaces")
355
+
356
+ if response.status_code != 200:
357
+ raise FabricHTTPException(response)
358
+ print(
359
+ f"{icons.green_dot} All workspaces assigned to the '{domain_name}' domain have been unassigned."
360
+ )
361
+
362
+
363
+ def unassign_domain_workspaces(domain_name: str, workspace_names: str | List[str]):
364
+ """
365
+ Unassigns workspaces from the specified domain by workspace.
366
+
367
+ Parameters
368
+ ----------
369
+ domain_name : str
370
+ The domain name.
371
+ workspace_names : str | List[str]
372
+ The Fabric workspace(s).
373
+ """
374
+
375
+ # https://learn.microsoft.com/en-us/rest/api/fabric/admin/domains/unassign-domain-workspaces-by-ids?tabs=HTTP
376
+
377
+ domain_id = resolve_domain_id(domain_name=domain_name)
378
+
379
+ if isinstance(workspace_names, str):
380
+ workspace_names = [workspace_names]
381
+
382
+ dfW = fabric.list_workspaces()
383
+
384
+ # Check for invalid capacities
385
+ invalid_workspaces = [
386
+ name for name in workspace_names if name not in dfW["Name"].values
387
+ ]
388
+
389
+ if len(invalid_workspaces) == 1:
390
+ raise ValueError(
391
+ f"{icons.red_dot} The {invalid_workspaces} workspace is not valid."
392
+ )
393
+ elif len(invalid_workspaces) > 1:
394
+ raise ValueError(
395
+ f"{icons.red_dot} The {invalid_workspaces} workspaces are not valid."
396
+ )
397
+
398
+ dfW_filt = dfW[dfW["Name"].isin(workspace_names)]
399
+ workspace_list = list(dfW_filt["Id"])
400
+
401
+ payload = {"workspacesIds": workspace_list}
402
+ client = fabric.FabricRestClient()
403
+ response = client.post(
404
+ f"/v1/admin/domains/{domain_id}/unassignWorkspaces", json=payload
405
+ )
406
+
407
+ if response.status_code != 200:
408
+ raise FabricHTTPException(response)
409
+ print(
410
+ f"{icons.green_dot} The {workspace_names} workspaces assigned to the '{domain_name}' domain have been unassigned."
411
+ )
@@ -12,7 +12,7 @@ import sempy_labs._icons as icons
12
12
  def direct_lake_schema_sync(
13
13
  dataset: str,
14
14
  workspace: Optional[str] = None,
15
- add_to_model: Optional[bool] = False,
15
+ add_to_model: bool = False,
16
16
  **kwargs,
17
17
  ):
18
18
  """
@@ -5,7 +5,11 @@ from typing import Optional, List, Union, Tuple
5
5
  from uuid import UUID
6
6
  import sempy_labs._icons as icons
7
7
  from sempy._utils._log import log
8
- from sempy_labs._helper_functions import retry, resolve_dataset_id
8
+ from sempy_labs._helper_functions import (
9
+ retry,
10
+ resolve_dataset_id,
11
+ resolve_lakehouse_name,
12
+ )
9
13
 
10
14
 
11
15
  def check_fallback_reason(
@@ -28,16 +32,17 @@ def check_fallback_reason(
28
32
  pandas.DataFrame
29
33
  The tables in the semantic model and their fallback reason.
30
34
  """
35
+ from sempy_labs.tom import connect_semantic_model
31
36
 
32
37
  workspace = fabric.resolve_workspace_name(workspace)
33
38
 
34
- dfP = fabric.list_partitions(dataset=dataset, workspace=workspace)
35
- dfP_filt = dfP[dfP["Mode"] == "DirectLake"]
36
-
37
- if len(dfP_filt) == 0:
38
- raise ValueError(
39
- f"{icons.red_dot} The '{dataset}' semantic model is not in Direct Lake. This function is only applicable to Direct Lake semantic models."
40
- )
39
+ with connect_semantic_model(
40
+ dataset=dataset, workspace=workspace, readonly=True
41
+ ) as tom:
42
+ if not tom.is_direct_lake():
43
+ raise ValueError(
44
+ f"{icons.red_dot} The '{dataset}' semantic model is not in Direct Lake. This function is only applicable to Direct Lake semantic models."
45
+ )
41
46
 
42
47
  df = fabric.evaluate_dax(
43
48
  dataset=dataset,
@@ -73,8 +78,9 @@ def generate_direct_lake_semantic_model(
73
78
  workspace: Optional[str] = None,
74
79
  lakehouse: Optional[str] = None,
75
80
  lakehouse_workspace: Optional[str] = None,
76
- overwrite: Optional[bool] = False,
77
- refresh: Optional[bool] = True,
81
+ schema: str = "dbo",
82
+ overwrite: bool = False,
83
+ refresh: bool = True,
78
84
  ):
79
85
  """
80
86
  Dynamically generates a Direct Lake semantic model based on tables in a Fabric lakehouse.
@@ -96,23 +102,32 @@ def generate_direct_lake_semantic_model(
96
102
  The Fabric workspace in which the lakehouse resides.
97
103
  Defaults to None which resolves to the workspace of the attached lakehouse
98
104
  or if no lakehouse attached, resolves to the workspace of the notebook.
105
+ schema : str, default="dbo"
106
+ The schema used for the lakehouse.
99
107
  overwrite : bool, default=False
100
108
  If set to True, overwrites the existing semantic model if it already exists.
101
109
  refresh: bool, default=True
102
110
  If True, refreshes the newly created semantic model after it is created.
103
-
104
- Returns
105
- -------
106
111
  """
107
112
 
108
113
  from sempy_labs.lakehouse import get_lakehouse_tables, get_lakehouse_columns
109
- from sempy_labs import create_blank_semantic_model, refresh_semantic_model
110
- from sempy_labs.tom import connect_semantic_model
111
114
  from sempy_labs.directlake import get_shared_expression
115
+ from sempy_labs.tom import connect_semantic_model
116
+ from sempy_labs._generate_semantic_model import create_blank_semantic_model
117
+ from sempy_labs._refresh_semantic_model import refresh_semantic_model
112
118
 
113
119
  if isinstance(lakehouse_tables, str):
114
120
  lakehouse_tables = [lakehouse_tables]
115
121
 
122
+ workspace = fabric.resolve_workspace_name(workspace)
123
+ if lakehouse_workspace is None:
124
+ lakehouse_workspace = workspace
125
+ if lakehouse is None:
126
+ lakehouse_id = fabric.get_lakehouse_id()
127
+ lakehouse_workspace_id = fabric.get_workspace_id()
128
+ lakehouse_workspace = fabric.resolve_workspace_name(lakehouse_workspace_id)
129
+ lakehouse = resolve_lakehouse_name(lakehouse_id, lakehouse_workspace)
130
+
116
131
  dfLT = get_lakehouse_tables(lakehouse=lakehouse, workspace=lakehouse_workspace)
117
132
 
118
133
  # Validate lakehouse tables
@@ -158,7 +173,7 @@ def generate_direct_lake_semantic_model(
158
173
 
159
174
  for t in lakehouse_tables:
160
175
  tom.add_table(name=t)
161
- tom.add_entity_partition(table_name=t, entity_name=t)
176
+ tom.add_entity_partition(table_name=t, entity_name=t, schema_name=schema)
162
177
  dfLC_filt = dfLC[dfLC["Table Name"] == t]
163
178
  for i, r in dfLC_filt.iterrows():
164
179
  lakeCName = r["Column Name"]
@@ -212,6 +227,7 @@ def get_direct_lake_source(
212
227
  response = client.post(
213
228
  "metadata/relations/upstream?apiVersion=3", json=request_body
214
229
  )
230
+
215
231
  artifacts = response.json().get("artifacts", [])
216
232
  sql_id, sql_object_name, sql_workspace_id, artifact_type = None, None, None, None
217
233
 
@@ -58,14 +58,15 @@ def generate_shared_expression(
58
58
  if response.status_code != 200:
59
59
  raise FabricHTTPException(response)
60
60
 
61
+ prop = response.json().get("properties")
62
+
61
63
  if item_type == "Lakehouse":
62
- prop = response.json()["properties"]["sqlEndpointProperties"]
63
- sqlEPCS = prop["connectionString"]
64
- sqlepid = prop["id"]
65
- provStatus = prop["provisioningStatus"]
64
+ sqlprop = prop.get("sqlEndpointProperties")
65
+ sqlEPCS = sqlprop.get("connectionString")
66
+ sqlepid = sqlprop.get("id")
67
+ provStatus = sqlprop.get("provisioningStatus")
66
68
  elif item_type == "Warehouse":
67
- prop = response.json()["properties"]
68
- sqlEPCS = prop["connectionString"]
69
+ sqlEPCS = prop.get("connectionString")
69
70
  sqlepid = item_id
70
71
  provStatus = None
71
72
 
@@ -74,12 +75,8 @@ def generate_shared_expression(
74
75
  f"{icons.red_dot} The SQL Endpoint for the '{item_name}' lakehouse within the '{workspace}' workspace has not yet been provisioned. Please wait until it has been provisioned."
75
76
  )
76
77
 
77
- sh = (
78
- 'let\n\tdatabase = Sql.Database("'
79
- + sqlEPCS
80
- + '", "'
81
- + sqlepid
82
- + '")\nin\n\tdatabase'
83
- )
78
+ start_expr = "let\n\tdatabase = "
79
+ end_expr = "\nin\n\tdatabase"
80
+ mid_expr = f'Sql.Database("{sqlEPCS}", "{sqlepid}")'
84
81
 
85
- return sh
82
+ return f"{start_expr}{mid_expr}{end_expr}"
@@ -7,10 +7,7 @@ import sempy_labs._icons as icons
7
7
  def get_direct_lake_guardrails() -> pd.DataFrame:
8
8
  """
9
9
  Shows the guardrails for when Direct Lake semantic models will fallback to Direct Query
10
- based on Microsoft's `online documentation <https://learn.microsoft.com/power-bi/enterprise/directlake-overview>`_.
11
-
12
- Parameters
13
- ----------
10
+ based on Microsoft's `online documentation <https://learn.microsoft.com/power-bi/enterprise/directlake-overview>`_.
14
11
 
15
12
  Returns
16
13
  -------
@@ -21,9 +18,12 @@ def get_direct_lake_guardrails() -> pd.DataFrame:
21
18
  url = "https://learn.microsoft.com/power-bi/enterprise/directlake-overview"
22
19
 
23
20
  tables = pd.read_html(url)
24
- df = tables[0]
25
- df["Fabric SKUs"] = df["Fabric SKUs"].str.split("/")
26
- df = df.explode("Fabric SKUs", ignore_index=True)
21
+ for df in tables:
22
+ first_column_name = df.columns[0]
23
+ if first_column_name.startswith("Fabric"):
24
+ df[first_column_name] = df[first_column_name].str.split("/")
25
+ df = df.explode(first_column_name, ignore_index=True)
26
+ break
27
27
 
28
28
  return df
29
29
 
@@ -32,10 +32,6 @@ def update_direct_lake_model_lakehouse_connection(
32
32
  The Fabric workspace used by the lakehouse.
33
33
  Defaults to None which resolves to the workspace of the attached lakehouse
34
34
  or if no lakehouse attached, resolves to the workspace of the notebook.
35
-
36
- Returns
37
- -------
38
-
39
35
  """
40
36
 
41
37
  workspace = fabric.resolve_workspace_name(workspace)
@@ -57,25 +53,19 @@ def update_direct_lake_model_lakehouse_connection(
57
53
  f"Therefore it cannot be used to support the '{dataset}' semantic model within the '{workspace}' workspace."
58
54
  )
59
55
 
60
- dfP = fabric.list_partitions(dataset=dataset, workspace=workspace)
61
- dfP_filt = dfP[dfP["Mode"] == "DirectLake"]
56
+ shEx = get_shared_expression(lakehouse, lakehouse_workspace)
62
57
 
63
- if len(dfP_filt) == 0:
64
- raise ValueError(
65
- f"{icons.red_dot} The '{dataset}' semantic model is not in Direct Lake. This function is only applicable to Direct Lake semantic models."
66
- )
67
- else:
68
- with connect_semantic_model(
69
- dataset=dataset, readonly=False, workspace=workspace
70
- ) as tom:
58
+ with connect_semantic_model(
59
+ dataset=dataset, readonly=False, workspace=workspace
60
+ ) as tom:
61
+
62
+ if not tom.is_direct_lake():
63
+ raise ValueError(
64
+ f"{icons.red_dot} The '{dataset}' semantic model is not in Direct Lake. This function is only applicable to Direct Lake semantic models."
65
+ )
66
+
67
+ tom.model.Expressions["DatabaseQuery"].Expression = shEx
71
68
 
72
- shEx = get_shared_expression(lakehouse, lakehouse_workspace)
73
- try:
74
- tom.model.Expressions["DatabaseQuery"].Expression = shEx
75
- print(
76
- f"{icons.green_dot} The expression in the '{dataset}' semantic model has been updated to point to the '{lakehouse}' lakehouse in the '{lakehouse_workspace}' workspace."
77
- )
78
- except Exception as e:
79
- raise ValueError(
80
- f"{icons.red_dot} The expression in the '{dataset}' semantic model was not updated."
81
- ) from e
69
+ print(
70
+ f"{icons.green_dot} The expression in the '{dataset}' semantic model has been updated to point to the '{lakehouse}' lakehouse in the '{lakehouse_workspace}' workspace."
71
+ )
@@ -100,7 +100,7 @@ def add_table_to_direct_lake_semantic_model(
100
100
  dataset: str,
101
101
  table_name: str,
102
102
  lakehouse_table_name: str,
103
- refresh: Optional[bool] = True,
103
+ refresh: bool = True,
104
104
  workspace: Optional[str] = None,
105
105
  ):
106
106
  """
@@ -15,7 +15,7 @@ import sempy_labs._icons as icons
15
15
  def warm_direct_lake_cache_perspective(
16
16
  dataset: str,
17
17
  perspective: str,
18
- add_dependencies: Optional[bool] = False,
18
+ add_dependencies: bool = False,
19
19
  workspace: Optional[str] = None,
20
20
  ) -> pd.DataFrame:
21
21
  """
@@ -24,9 +24,9 @@ from sempy.fabric.exceptions import FabricHTTPException
24
24
  def get_lakehouse_tables(
25
25
  lakehouse: Optional[str] = None,
26
26
  workspace: Optional[str] = None,
27
- extended: Optional[bool] = False,
28
- count_rows: Optional[bool] = False,
29
- export: Optional[bool] = False,
27
+ extended: bool = False,
28
+ count_rows: bool = False,
29
+ export: bool = False,
30
30
  ) -> pd.DataFrame:
31
31
  """
32
32
  Shows the tables of a lakehouse and their respective properties. Option to include additional properties relevant to Direct Lake guardrails.