semantic-link-labs 0.7.3__py3-none-any.whl → 0.8.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (75) hide show
  1. {semantic_link_labs-0.7.3.dist-info → semantic_link_labs-0.8.0.dist-info}/METADATA +19 -4
  2. {semantic_link_labs-0.7.3.dist-info → semantic_link_labs-0.8.0.dist-info}/RECORD +75 -50
  3. {semantic_link_labs-0.7.3.dist-info → semantic_link_labs-0.8.0.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +109 -31
  5. sempy_labs/_bpa_translation/{_translations_am-ET.po → _model/_translations_am-ET.po} +22 -0
  6. sempy_labs/_bpa_translation/{_translations_ar-AE.po → _model/_translations_ar-AE.po} +24 -0
  7. sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +938 -0
  8. sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +934 -0
  9. sempy_labs/_bpa_translation/{_translations_cs-CZ.po → _model/_translations_cs-CZ.po} +179 -157
  10. sempy_labs/_bpa_translation/{_translations_da-DK.po → _model/_translations_da-DK.po} +24 -0
  11. sempy_labs/_bpa_translation/{_translations_de-DE.po → _model/_translations_de-DE.po} +77 -52
  12. sempy_labs/_bpa_translation/{_translations_el-GR.po → _model/_translations_el-GR.po} +25 -0
  13. sempy_labs/_bpa_translation/{_translations_es-ES.po → _model/_translations_es-ES.po} +67 -43
  14. sempy_labs/_bpa_translation/{_translations_fa-IR.po → _model/_translations_fa-IR.po} +24 -0
  15. sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +915 -0
  16. sempy_labs/_bpa_translation/{_translations_fr-FR.po → _model/_translations_fr-FR.po} +83 -57
  17. sempy_labs/_bpa_translation/{_translations_ga-IE.po → _model/_translations_ga-IE.po} +25 -0
  18. sempy_labs/_bpa_translation/{_translations_he-IL.po → _model/_translations_he-IL.po} +23 -0
  19. sempy_labs/_bpa_translation/{_translations_hi-IN.po → _model/_translations_hi-IN.po} +24 -0
  20. sempy_labs/_bpa_translation/{_translations_hu-HU.po → _model/_translations_hu-HU.po} +25 -0
  21. sempy_labs/_bpa_translation/_model/_translations_id-ID.po +918 -0
  22. sempy_labs/_bpa_translation/{_translations_is-IS.po → _model/_translations_is-IS.po} +25 -0
  23. sempy_labs/_bpa_translation/{_translations_it-IT.po → _model/_translations_it-IT.po} +25 -0
  24. sempy_labs/_bpa_translation/{_translations_ja-JP.po → _model/_translations_ja-JP.po} +21 -0
  25. sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +823 -0
  26. sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +937 -0
  27. sempy_labs/_bpa_translation/{_translations_nl-NL.po → _model/_translations_nl-NL.po} +80 -56
  28. sempy_labs/_bpa_translation/{_translations_pl-PL.po → _model/_translations_pl-PL.po} +101 -76
  29. sempy_labs/_bpa_translation/{_translations_pt-BR.po → _model/_translations_pt-BR.po} +25 -0
  30. sempy_labs/_bpa_translation/{_translations_pt-PT.po → _model/_translations_pt-PT.po} +25 -0
  31. sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +939 -0
  32. sempy_labs/_bpa_translation/{_translations_ru-RU.po → _model/_translations_ru-RU.po} +25 -0
  33. sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +925 -0
  34. sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +922 -0
  35. sempy_labs/_bpa_translation/{_translations_ta-IN.po → _model/_translations_ta-IN.po} +26 -0
  36. sempy_labs/_bpa_translation/{_translations_te-IN.po → _model/_translations_te-IN.po} +24 -0
  37. sempy_labs/_bpa_translation/{_translations_th-TH.po → _model/_translations_th-TH.po} +24 -0
  38. sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +925 -0
  39. sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +933 -0
  40. sempy_labs/_bpa_translation/{_translations_zh-CN.po → _model/_translations_zh-CN.po} +116 -97
  41. sempy_labs/_bpa_translation/{_translations_zu-ZA.po → _model/_translations_zu-ZA.po} +25 -0
  42. sempy_labs/_capacities.py +577 -0
  43. sempy_labs/_capacity_migration.py +624 -0
  44. sempy_labs/_clear_cache.py +8 -8
  45. sempy_labs/_connections.py +140 -0
  46. sempy_labs/_environments.py +156 -0
  47. sempy_labs/_git.py +20 -21
  48. sempy_labs/_helper_functions.py +151 -10
  49. sempy_labs/_icons.py +62 -0
  50. sempy_labs/_list_functions.py +232 -887
  51. sempy_labs/_model_bpa.py +8 -32
  52. sempy_labs/_notebooks.py +143 -0
  53. sempy_labs/_query_scale_out.py +30 -8
  54. sempy_labs/_spark.py +460 -0
  55. sempy_labs/_sql.py +88 -19
  56. sempy_labs/_translations.py +3 -0
  57. sempy_labs/_vertipaq.py +162 -99
  58. sempy_labs/_workspaces.py +294 -0
  59. sempy_labs/admin/__init__.py +53 -0
  60. sempy_labs/admin/_basic_functions.py +806 -0
  61. sempy_labs/admin/_domains.py +411 -0
  62. sempy_labs/directlake/_directlake_schema_sync.py +1 -2
  63. sempy_labs/directlake/_generate_shared_expression.py +11 -14
  64. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +14 -24
  65. sempy_labs/report/__init__.py +9 -6
  66. sempy_labs/report/_report_bpa.py +359 -0
  67. sempy_labs/report/_report_bpa_rules.py +113 -0
  68. sempy_labs/report/_report_helper.py +254 -0
  69. sempy_labs/report/_report_list_functions.py +95 -0
  70. sempy_labs/report/_report_rebind.py +0 -4
  71. sempy_labs/report/_reportwrapper.py +2039 -0
  72. sempy_labs/tom/_model.py +83 -5
  73. {semantic_link_labs-0.7.3.dist-info → semantic_link_labs-0.8.0.dist-info}/LICENSE +0 -0
  74. {semantic_link_labs-0.7.3.dist-info → semantic_link_labs-0.8.0.dist-info}/top_level.txt +0 -0
  75. /sempy_labs/_bpa_translation/{_translations_sv-SE.po → _model/_translations_sv-SE.po} +0 -0
@@ -0,0 +1,411 @@
1
+ import sempy.fabric as fabric
2
+ from typing import Optional, List
3
+ import sempy_labs._icons as icons
4
+ from sempy_labs._helper_functions import lro
5
+ from sempy.fabric.exceptions import FabricHTTPException
6
+ import pandas as pd
7
+ from uuid import UUID
8
+
9
+
10
+ def resolve_domain_id(domain_name: str) -> UUID:
11
+ """
12
+ Obtains the domain Id for a given domain name.
13
+
14
+ Parameters
15
+ ----------
16
+ domain_name : str
17
+ The domain name
18
+
19
+ Returns
20
+ -------
21
+ UUID
22
+ The domain Id.
23
+ """
24
+
25
+ dfL = list_domains()
26
+ dfL_filt = dfL[dfL["Domain Name"] == domain_name]
27
+ if len(dfL_filt) == 0:
28
+ raise ValueError(f"{icons.red_dot} '{domain_name}' is not a valid domain name.")
29
+
30
+ return dfL_filt["Domain ID"].iloc[0]
31
+
32
+
33
+ def list_domains(non_empty_only: Optional[bool] = False) -> pd.DataFrame:
34
+ """
35
+ Shows a list of domains.
36
+
37
+ Parameters
38
+ ----------
39
+ non_empty_only : bool, default=False
40
+ When True, only return domains that have at least one workspace containing an item.
41
+ Defaults to False.
42
+
43
+ Returns
44
+ -------
45
+ pandas.DataFrame
46
+ A pandas dataframe showing a list of the domains.
47
+ """
48
+
49
+ # https://learn.microsoft.com/en-us/rest/api/fabric/admin/domains/list-domains?tabs=HTTP
50
+
51
+ df = pd.DataFrame(
52
+ columns=[
53
+ "Domain ID",
54
+ "Domain Name",
55
+ "Description",
56
+ "Parent Domain ID",
57
+ "Contributors Scope",
58
+ ]
59
+ )
60
+
61
+ client = fabric.FabricRestClient()
62
+ url = "/v1/admin/domains"
63
+ if non_empty_only:
64
+ url = f"{url}?nonEmptyOnly=True"
65
+ response = client.get(url)
66
+
67
+ if response.status_code != 200:
68
+ raise FabricHTTPException(response)
69
+
70
+ for v in response.json().get("domains", []):
71
+ new_data = {
72
+ "Domain ID": v.get("id"),
73
+ "Domain Name": v.get("displayName"),
74
+ "Description": v.get("description"),
75
+ "Parent Domain ID": v.get("parentDomainId"),
76
+ "Contributors Scope": v.get("contributorsScope"),
77
+ }
78
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
79
+
80
+ return df
81
+
82
+
83
+ def list_domain_workspaces(domain_name: str) -> pd.DataFrame:
84
+ """
85
+ Shows a list of workspaces within the domain.
86
+
87
+ Parameters
88
+ ----------
89
+ domain_name : str
90
+ The domain name.
91
+
92
+ Returns
93
+ -------
94
+ pandas.DataFrame
95
+ A pandas dataframe showing a list of workspaces within the domain.
96
+ """
97
+
98
+ # https://learn.microsoft.com/en-us/rest/api/fabric/admin/domains/list-domain-workspaces?tabs=HTTP
99
+
100
+ domain_id = resolve_domain_id(domain_name)
101
+
102
+ df = pd.DataFrame(columns=["Workspace ID", "Workspace Name"])
103
+
104
+ client = fabric.FabricRestClient()
105
+ response = client.get(f"/v1/admin/domains/{domain_id}/workspaces")
106
+
107
+ if response.status_code != 200:
108
+ raise FabricHTTPException(response)
109
+
110
+ for v in response.json().get("value", []):
111
+ new_data = {
112
+ "Workspace ID": v.get("id"),
113
+ "Workspace Name": v.get("displayName"),
114
+ }
115
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
116
+
117
+ return df
118
+
119
+
120
+ def create_domain(
121
+ domain_name: str,
122
+ description: Optional[str] = None,
123
+ parent_domain_name: Optional[str] = None,
124
+ ):
125
+ """
126
+ Creates a new domain.
127
+
128
+ Parameters
129
+ ----------
130
+ domain_name : str
131
+ The domain name.
132
+ description : str, default=None
133
+ The domain description.
134
+ parent_domain_name : str, default=None
135
+ The parent domain name.
136
+ """
137
+
138
+ # https://learn.microsoft.com/en-us/rest/api/fabric/admin/domains/create-domain?tabs=HTTP
139
+
140
+ if parent_domain_name is not None:
141
+ parent_domain_id = resolve_domain_id(parent_domain_name)
142
+
143
+ payload = {}
144
+ payload["displayName"] = domain_name
145
+ if description is not None:
146
+ payload["description"] = description
147
+ if parent_domain_name is not None:
148
+ payload["parentDomainId"] = parent_domain_id
149
+
150
+ client = fabric.FabricRestClient()
151
+ response = client.post("/v1/admin/domains", json=payload)
152
+
153
+ if response.status_code != 201:
154
+ raise FabricHTTPException(response)
155
+
156
+ print(f"{icons.green_dot} The '{domain_name}' domain has been created.")
157
+
158
+
159
+ def delete_domain(domain_name: str):
160
+ """
161
+ Deletes a domain.
162
+
163
+ Parameters
164
+ ----------
165
+ domain_name : str
166
+ The domain name.
167
+ """
168
+
169
+ # https://learn.microsoft.com/en-us/rest/api/fabric/admin/domains/delete-domain?tabs=HTTP
170
+
171
+ domain_id = resolve_domain_id(domain_name)
172
+
173
+ client = fabric.FabricRestClient()
174
+ response = client.delete(f"/v1/admin/domains/{domain_id}")
175
+
176
+ if response.status_code != 200:
177
+ raise FabricHTTPException(response)
178
+
179
+ print(f"{icons.green_dot} The '{domain_name}' domain has been deleted.")
180
+
181
+
182
+ def update_domain(
183
+ domain_name: str,
184
+ description: Optional[str] = None,
185
+ contributors_scope: Optional[str] = None,
186
+ ):
187
+ """
188
+ Updates a domain's properties.
189
+
190
+ Parameters
191
+ ----------
192
+ domain_name : str
193
+ The domain name.
194
+ description : str, default=None
195
+ The domain description.
196
+ contributors_scope : str, default=None
197
+ The domain `contributor scope <https://learn.microsoft.com/rest/api/fabric/admin/domains/update-domain?tabs=HTTP#contributorsscopetype>`_.
198
+ """
199
+
200
+ # https://learn.microsoft.com/en-us/rest/api/fabric/admin/domains/update-domain?tabs=HTTP
201
+
202
+ contributors_scopes = ["AdminsOnly", "AllTenant", "SpecificUsersAndGroups"]
203
+
204
+ if contributors_scope not in contributors_scopes:
205
+ raise ValueError(
206
+ f"{icons.red_dot} Invalid contributors scope. Valid options: {contributors_scopes}."
207
+ )
208
+
209
+ domain_id = resolve_domain_id(domain_name)
210
+
211
+ payload = {}
212
+ payload["displayName"] = domain_name
213
+
214
+ if description is not None:
215
+ payload["description"] = description
216
+ if contributors_scope is not None:
217
+ payload["contributorsScope"] = contributors_scope
218
+
219
+ client = fabric.FabricRestClient()
220
+ response = client.patch(f"/v1/admin/domains/{domain_id}", json=payload)
221
+
222
+ if response != 200:
223
+ raise FabricHTTPException(response)
224
+
225
+ print(f"{icons.green_dot} The '{domain_name}' domain has been updated.")
226
+
227
+
228
+ def assign_domain_workspaces_by_capacities(
229
+ domain_name: str, capacity_names: str | List[str]
230
+ ):
231
+ """
232
+ Assigns all workspaces that reside on the specified capacities to the specified domain.
233
+
234
+ Parameters
235
+ ----------
236
+ domain_name : str
237
+ The domain name.
238
+ capacity_names : str | List[str]
239
+ The capacity names.
240
+ """
241
+
242
+ # https://learn.microsoft.com/en-us/rest/api/fabric/admin/domains/assign-domain-workspaces-by-capacities?tabs=HTTP
243
+
244
+ from sempy_labs.admin import list_capacities
245
+
246
+ domain_id = resolve_domain_id(domain_name)
247
+
248
+ if isinstance(capacity_names, str):
249
+ capacity_names = [capacity_names]
250
+
251
+ dfC = list_capacities()
252
+
253
+ # Check for invalid capacities
254
+ invalid_capacities = [
255
+ name for name in capacity_names if name not in dfC["Display Name"].values
256
+ ]
257
+
258
+ if len(invalid_capacities) == 1:
259
+ raise ValueError(
260
+ f"{icons.red_dot} The {invalid_capacities} capacity is not valid."
261
+ )
262
+ elif len(invalid_capacities) > 1:
263
+ raise ValueError(
264
+ f"{icons.red_dot} The {invalid_capacities} capacities are not valid."
265
+ )
266
+
267
+ # Get list of capacity Ids for the payload
268
+ dfC_filt = dfC[dfC["Display Name"].isin(capacity_names)]
269
+ capacity_list = list(dfC_filt["Id"].str.upper())
270
+
271
+ payload = {"capacitiesIds": capacity_list}
272
+
273
+ client = fabric.FabricRestClient()
274
+ response = client.post(
275
+ f"/v1/admin/domains/{domain_id}/assignWorkspacesByCapacities",
276
+ json=payload,
277
+ )
278
+
279
+ lro(client, response)
280
+
281
+ print(
282
+ f"{icons.green_dot} The workspaces in the {capacity_names} capacities have been assigned to the '{domain_name}' domain."
283
+ )
284
+
285
+
286
+ def assign_domain_workspaces(domain_name: str, workspace_names: str | List[str]):
287
+ """
288
+ Assigns workspaces to the specified domain by workspace.
289
+
290
+ Parameters
291
+ ----------
292
+ domain_name : str
293
+ The domain name.
294
+ workspace_names : str | List[str]
295
+ The Fabric workspace(s).
296
+ """
297
+
298
+ # https://learn.microsoft.com/en-us/rest/api/fabric/admin/domains/assign-domain-workspaces-by-ids?tabs=HTTP
299
+
300
+ domain_id = resolve_domain_id(domain_name=domain_name)
301
+
302
+ if isinstance(workspace_names, str):
303
+ workspace_names = [workspace_names]
304
+
305
+ dfW = fabric.list_workspaces()
306
+
307
+ # Check for invalid capacities
308
+ invalid_workspaces = [
309
+ name for name in workspace_names if name not in dfW["Name"].values
310
+ ]
311
+
312
+ if len(invalid_workspaces) == 1:
313
+ raise ValueError(
314
+ f"{icons.red_dot} The {invalid_workspaces} workspace is not valid."
315
+ )
316
+ elif len(invalid_workspaces) > 1:
317
+ raise ValueError(
318
+ f"{icons.red_dot} The {invalid_workspaces} workspaces are not valid."
319
+ )
320
+
321
+ dfW_filt = dfW[dfW["Name"].isin(workspace_names)]
322
+ workspace_list = list(dfW_filt["Id"])
323
+
324
+ payload = {"workspacesIds": workspace_list}
325
+
326
+ client = fabric.FabricRestClient()
327
+ response = client.post(
328
+ f"/v1/admin/domains/{domain_id}/assignWorkspaces",
329
+ json=payload,
330
+ )
331
+
332
+ lro(client, response)
333
+
334
+ print(
335
+ f"{icons.green_dot} The {workspace_names} workspaces have been assigned to the '{domain_name}' domain."
336
+ )
337
+
338
+
339
+ def unassign_all_domain_workspaces(domain_name: str):
340
+ """
341
+ Unassigns all workspaces from the specified domain.
342
+
343
+ Parameters
344
+ ----------
345
+ domain_name : str
346
+ The domain name.
347
+ """
348
+
349
+ # https://learn.microsoft.com/en-us/rest/api/fabric/admin/domains/unassign-all-domain-workspaces?tabs=HTTP
350
+
351
+ domain_id = resolve_domain_id(domain_name=domain_name)
352
+
353
+ client = fabric.FabricRestClient()
354
+ response = client.post(f"/v1/admin/domains/{domain_id}/unassignAllWorkspaces")
355
+
356
+ if response.status_code != 200:
357
+ raise FabricHTTPException(response)
358
+ print(
359
+ f"{icons.green_dot} All workspaces assigned to the '{domain_name}' domain have been unassigned."
360
+ )
361
+
362
+
363
+ def unassign_domain_workspaces(domain_name: str, workspace_names: str | List[str]):
364
+ """
365
+ Unassigns workspaces from the specified domain by workspace.
366
+
367
+ Parameters
368
+ ----------
369
+ domain_name : str
370
+ The domain name.
371
+ workspace_names : str | List[str]
372
+ The Fabric workspace(s).
373
+ """
374
+
375
+ # https://learn.microsoft.com/en-us/rest/api/fabric/admin/domains/unassign-domain-workspaces-by-ids?tabs=HTTP
376
+
377
+ domain_id = resolve_domain_id(domain_name=domain_name)
378
+
379
+ if isinstance(workspace_names, str):
380
+ workspace_names = [workspace_names]
381
+
382
+ dfW = fabric.list_workspaces()
383
+
384
+ # Check for invalid capacities
385
+ invalid_workspaces = [
386
+ name for name in workspace_names if name not in dfW["Name"].values
387
+ ]
388
+
389
+ if len(invalid_workspaces) == 1:
390
+ raise ValueError(
391
+ f"{icons.red_dot} The {invalid_workspaces} workspace is not valid."
392
+ )
393
+ elif len(invalid_workspaces) > 1:
394
+ raise ValueError(
395
+ f"{icons.red_dot} The {invalid_workspaces} workspaces are not valid."
396
+ )
397
+
398
+ dfW_filt = dfW[dfW["Name"].isin(workspace_names)]
399
+ workspace_list = list(dfW_filt["Id"])
400
+
401
+ payload = {"workspacesIds": workspace_list}
402
+ client = fabric.FabricRestClient()
403
+ response = client.post(
404
+ f"/v1/admin/domains/{domain_id}/unassignWorkspaces", json=payload
405
+ )
406
+
407
+ if response.status_code != 200:
408
+ raise FabricHTTPException(response)
409
+ print(
410
+ f"{icons.green_dot} The {workspace_names} workspaces assigned to the '{domain_name}' domain have been unassigned."
411
+ )
@@ -32,7 +32,6 @@ def direct_lake_schema_sync(
32
32
 
33
33
  sempy.fabric._client._utils._init_analysis_services()
34
34
  import Microsoft.AnalysisServices.Tabular as TOM
35
- import System
36
35
 
37
36
  if "lakehouse" in kwargs:
38
37
  print(
@@ -99,7 +98,7 @@ def direct_lake_schema_sync(
99
98
  table_name=table_name,
100
99
  column_name=lakeCName,
101
100
  source_column=lakeCName,
102
- data_type=System.Enum.Parse(TOM.DataType, dt),
101
+ data_type=dt,
103
102
  )
104
103
  print(
105
104
  f"{icons.green_dot} The '{lakeCName}' column in the '{lakeTName}' lakehouse table was added to the '{dataset}' semantic model within the '{workspace}' workspace."
@@ -58,14 +58,15 @@ def generate_shared_expression(
58
58
  if response.status_code != 200:
59
59
  raise FabricHTTPException(response)
60
60
 
61
+ prop = response.json().get("properties")
62
+
61
63
  if item_type == "Lakehouse":
62
- prop = response.json()["properties"]["sqlEndpointProperties"]
63
- sqlEPCS = prop["connectionString"]
64
- sqlepid = prop["id"]
65
- provStatus = prop["provisioningStatus"]
64
+ sqlprop = prop.get("sqlEndpointProperties")
65
+ sqlEPCS = sqlprop.get("connectionString")
66
+ sqlepid = sqlprop.get("id")
67
+ provStatus = sqlprop.get("provisioningStatus")
66
68
  elif item_type == "Warehouse":
67
- prop = response.json()["properties"]
68
- sqlEPCS = prop["connectionString"]
69
+ sqlEPCS = prop.get("connectionString")
69
70
  sqlepid = item_id
70
71
  provStatus = None
71
72
 
@@ -74,12 +75,8 @@ def generate_shared_expression(
74
75
  f"{icons.red_dot} The SQL Endpoint for the '{item_name}' lakehouse within the '{workspace}' workspace has not yet been provisioned. Please wait until it has been provisioned."
75
76
  )
76
77
 
77
- sh = (
78
- 'let\n\tdatabase = Sql.Database("'
79
- + sqlEPCS
80
- + '", "'
81
- + sqlepid
82
- + '")\nin\n\tdatabase'
83
- )
78
+ start_expr = "let\n\tdatabase = "
79
+ end_expr = "\nin\n\tdatabase"
80
+ mid_expr = f'Sql.Database("{sqlEPCS}", "{sqlepid}")'
84
81
 
85
- return sh
82
+ return f"{start_expr}{mid_expr}{end_expr}"
@@ -32,10 +32,6 @@ def update_direct_lake_model_lakehouse_connection(
32
32
  The Fabric workspace used by the lakehouse.
33
33
  Defaults to None which resolves to the workspace of the attached lakehouse
34
34
  or if no lakehouse attached, resolves to the workspace of the notebook.
35
-
36
- Returns
37
- -------
38
-
39
35
  """
40
36
 
41
37
  workspace = fabric.resolve_workspace_name(workspace)
@@ -57,25 +53,19 @@ def update_direct_lake_model_lakehouse_connection(
57
53
  f"Therefore it cannot be used to support the '{dataset}' semantic model within the '{workspace}' workspace."
58
54
  )
59
55
 
60
- dfP = fabric.list_partitions(dataset=dataset, workspace=workspace)
61
- dfP_filt = dfP[dfP["Mode"] == "DirectLake"]
56
+ shEx = get_shared_expression(lakehouse, lakehouse_workspace)
62
57
 
63
- if len(dfP_filt) == 0:
64
- raise ValueError(
65
- f"{icons.red_dot} The '{dataset}' semantic model is not in Direct Lake. This function is only applicable to Direct Lake semantic models."
66
- )
67
- else:
68
- with connect_semantic_model(
69
- dataset=dataset, readonly=False, workspace=workspace
70
- ) as tom:
58
+ with connect_semantic_model(
59
+ dataset=dataset, readonly=False, workspace=workspace
60
+ ) as tom:
61
+
62
+ if not tom.is_direct_lake():
63
+ raise ValueError(
64
+ f"{icons.red_dot} The '{dataset}' semantic model is not in Direct Lake. This function is only applicable to Direct Lake semantic models."
65
+ )
66
+
67
+ tom.model.Expressions["DatabaseQuery"].Expression = shEx
71
68
 
72
- shEx = get_shared_expression(lakehouse, lakehouse_workspace)
73
- try:
74
- tom.model.Expressions["DatabaseQuery"].Expression = shEx
75
- print(
76
- f"{icons.green_dot} The expression in the '{dataset}' semantic model has been updated to point to the '{lakehouse}' lakehouse in the '{lakehouse_workspace}' workspace."
77
- )
78
- except Exception as e:
79
- raise ValueError(
80
- f"{icons.red_dot} The expression in the '{dataset}' semantic model was not updated."
81
- ) from e
69
+ print(
70
+ f"{icons.green_dot} The expression in the '{dataset}' semantic model has been updated to point to the '{lakehouse}' lakehouse in the '{lakehouse_workspace}' workspace."
71
+ )
@@ -1,3 +1,7 @@
1
+ from sempy_labs.report._reportwrapper import (
2
+ ReportWrapper,
3
+ )
4
+
1
5
  from sempy_labs.report._generate_report import (
2
6
  create_report_from_reportjson,
3
7
  get_report_definition,
@@ -10,15 +14,14 @@ from sempy_labs.report._report_functions import (
10
14
  export_report,
11
15
  clone_report,
12
16
  launch_report,
13
- # list_report_pages,
14
- # list_report_visuals,
15
- # list_report_bookmarks,
16
17
  # translate_report_titles
17
18
  )
18
19
  from sempy_labs.report._report_rebind import (
19
20
  report_rebind,
20
21
  report_rebind_all,
21
22
  )
23
+ from sempy_labs.report._report_bpa_rules import report_bpa_rules
24
+ from sempy_labs.report._report_bpa import run_report_bpa
22
25
 
23
26
  __all__ = [
24
27
  "create_report_from_reportjson",
@@ -28,12 +31,12 @@ __all__ = [
28
31
  "export_report",
29
32
  "clone_report",
30
33
  "launch_report",
31
- # list_report_pages,
32
- # list_report_visuals,
33
- # list_report_bookmarks,
34
34
  # translate_report_titles,
35
35
  "report_rebind",
36
36
  "report_rebind_all",
37
37
  "get_report_definition",
38
38
  "create_model_bpa_report",
39
+ "ReportWrapper",
40
+ "report_bpa_rules",
41
+ "run_report_bpa",
39
42
  ]