semantic-link-labs 0.9.2__py3-none-any.whl → 0.9.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (54) hide show
  1. {semantic_link_labs-0.9.2.dist-info → semantic_link_labs-0.9.4.dist-info}/METADATA +10 -6
  2. {semantic_link_labs-0.9.2.dist-info → semantic_link_labs-0.9.4.dist-info}/RECORD +54 -44
  3. {semantic_link_labs-0.9.2.dist-info → semantic_link_labs-0.9.4.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +27 -1
  5. sempy_labs/_ai.py +8 -5
  6. sempy_labs/_capacity_migration.py +3 -2
  7. sempy_labs/_connections.py +45 -9
  8. sempy_labs/_dax.py +17 -3
  9. sempy_labs/_delta_analyzer.py +308 -138
  10. sempy_labs/_eventhouses.py +70 -1
  11. sempy_labs/_gateways.py +56 -8
  12. sempy_labs/_generate_semantic_model.py +30 -9
  13. sempy_labs/_helper_functions.py +84 -9
  14. sempy_labs/_job_scheduler.py +226 -2
  15. sempy_labs/_list_functions.py +42 -19
  16. sempy_labs/_ml_experiments.py +1 -1
  17. sempy_labs/_model_bpa.py +17 -2
  18. sempy_labs/_model_bpa_rules.py +20 -8
  19. sempy_labs/_semantic_models.py +117 -0
  20. sempy_labs/_sql.py +73 -6
  21. sempy_labs/_sqldatabase.py +227 -0
  22. sempy_labs/_translations.py +2 -2
  23. sempy_labs/_vertipaq.py +3 -3
  24. sempy_labs/_warehouses.py +1 -1
  25. sempy_labs/admin/__init__.py +49 -8
  26. sempy_labs/admin/_activities.py +166 -0
  27. sempy_labs/admin/_apps.py +143 -0
  28. sempy_labs/admin/_basic_functions.py +32 -652
  29. sempy_labs/admin/_capacities.py +250 -0
  30. sempy_labs/admin/_datasets.py +184 -0
  31. sempy_labs/admin/_domains.py +1 -3
  32. sempy_labs/admin/_items.py +3 -1
  33. sempy_labs/admin/_reports.py +165 -0
  34. sempy_labs/admin/_scanner.py +53 -49
  35. sempy_labs/admin/_shared.py +74 -0
  36. sempy_labs/admin/_tenant.py +489 -0
  37. sempy_labs/directlake/_dl_helper.py +0 -1
  38. sempy_labs/directlake/_update_directlake_partition_entity.py +6 -0
  39. sempy_labs/graph/_teams.py +1 -1
  40. sempy_labs/graph/_users.py +9 -1
  41. sempy_labs/lakehouse/_get_lakehouse_columns.py +2 -2
  42. sempy_labs/lakehouse/_get_lakehouse_tables.py +2 -2
  43. sempy_labs/lakehouse/_lakehouse.py +3 -3
  44. sempy_labs/lakehouse/_shortcuts.py +29 -16
  45. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +2 -2
  46. sempy_labs/migration/_refresh_calc_tables.py +2 -2
  47. sempy_labs/report/__init__.py +3 -1
  48. sempy_labs/report/_download_report.py +4 -1
  49. sempy_labs/report/_export_report.py +272 -0
  50. sempy_labs/report/_report_functions.py +11 -263
  51. sempy_labs/report/_report_rebind.py +1 -1
  52. sempy_labs/tom/_model.py +281 -29
  53. {semantic_link_labs-0.9.2.dist-info → semantic_link_labs-0.9.4.dist-info}/LICENSE +0 -0
  54. {semantic_link_labs-0.9.2.dist-info → semantic_link_labs-0.9.4.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,489 @@
1
+ from sempy_labs._helper_functions import (
2
+ _update_dataframe_datatypes,
3
+ _base_api,
4
+ _create_dataframe,
5
+ )
6
+ from sempy._utils._log import log
7
+ import pandas as pd
8
+ from uuid import UUID
9
+ from sempy_labs.admin._capacities import _resolve_capacity_name_and_id
10
+ import sempy_labs._icons as icons
11
+ from typing import Optional, List
12
+
13
+
14
+ @log
15
+ def list_tenant_settings() -> pd.DataFrame:
16
+ """
17
+ Lists all tenant settings.
18
+
19
+ This is a wrapper function for the following API: `Tenants - List Tenant Settings <https://learn.microsoft.com/rest/api/fabric/admin/tenants/list-tenant-settings>`_.
20
+
21
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
22
+
23
+ Returns
24
+ -------
25
+ pandas.DataFrame
26
+ A pandas dataframe showing the tenant settings.
27
+ """
28
+
29
+ columns = {
30
+ "Setting Name": "string",
31
+ "Title": "string",
32
+ "Enabled": "bool",
33
+ "Can Specify Security Groups": "bool",
34
+ "Tenant Setting Group": "string",
35
+ "Enabled Security Groups": "string",
36
+ }
37
+ df = _create_dataframe(columns=columns)
38
+
39
+ response = _base_api(request="/v1/admin/tenantsettings", client="fabric_sp")
40
+
41
+ for i in response.json().get("value", []):
42
+ new_data = {
43
+ "Setting Name": i.get("settingName"),
44
+ "Title": i.get("title"),
45
+ "Enabled": i.get("enabled"),
46
+ "Can Specify Security Groups": i.get("canSpecifySecurityGroups"),
47
+ "Tenant Setting Group": i.get("tenantSettingGroup"),
48
+ "Enabled Security Groups": [i.get("enabledSecurityGroups", [])],
49
+ }
50
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
51
+
52
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
53
+
54
+ return df
55
+
56
+
57
+ @log
58
+ def list_capacity_tenant_settings_overrides(
59
+ capacity: Optional[str | UUID] = None,
60
+ return_dataframe: bool = True,
61
+ ) -> pd.DataFrame | dict:
62
+ """
63
+ Returns list of tenant setting overrides that override at the capacities.
64
+
65
+ This is a wrapper function for the following API: `Tenants - List Capacities Tenant Settings Overrides <https://learn.microsoft.com/rest/api/fabric/admin/tenants/list-capacities-tenant-settings-overrides>`_.
66
+
67
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
68
+
69
+ Parameters
70
+ ----------
71
+ capacity : str | uuid.UUID, default=None
72
+ The capacity name or ID.
73
+ Defaults to None which resolves to showing all capacities.
74
+ return_dataframe : bool, default=True
75
+ If True, returns a dataframe. If False, returns a dictionary.
76
+
77
+ Returns
78
+ -------
79
+ pandas.DataFrame | dict
80
+ A pandas dataframe showing a list of tenant setting overrides that override at the capacities.
81
+ """
82
+
83
+ columns = {
84
+ "Capacity Id": "string",
85
+ "Setting Name": "string",
86
+ "Setting Title": "string",
87
+ "Setting Enabled": "bool",
88
+ "Can Specify Security Groups": "bool",
89
+ "Enabled Security Groups": "string",
90
+ "Tenant Setting Group": "string",
91
+ "Tenant Setting Properties": "string",
92
+ "Delegate to Workspace": "bool",
93
+ "Delegated From": "string",
94
+ }
95
+
96
+ if capacity is None:
97
+ url = "/v1/admin/capacities/delegatedTenantSettingOverrides"
98
+ else:
99
+ (_, capacity_id) = _resolve_capacity_name_and_id(capacity=capacity)
100
+ url = f"/v1/admin/capacities/{capacity_id}/delegatedTenantSettingOverrides"
101
+ responses = _base_api(
102
+ request=url,
103
+ client="fabric_sp",
104
+ uses_pagination=True,
105
+ )
106
+
107
+ def create_new_data(setting, capacity_id=None):
108
+ return {
109
+ "Capacity Id": capacity_id or setting.get("id"),
110
+ "Setting Name": setting.get("settingName"),
111
+ "Setting Title": setting.get("title"),
112
+ "Setting Enabled": setting.get("enabled"),
113
+ "Can Specify Security Groups": setting.get("canSpecifySecurityGroups"),
114
+ "Enabled Security Groups": setting.get("enabledSecurityGroups", []),
115
+ "Tenant Setting Group": setting.get("tenantSettingGroup"),
116
+ "Tenant Setting Properties": setting.get("properties", []),
117
+ "Delegate to Workspace": setting.get("delegateToWorkspace"),
118
+ "Delegated From": setting.get("delegatedFrom"),
119
+ }
120
+
121
+ def process_responses(responses, capacity_id=None, return_dataframe=False):
122
+ data = []
123
+ df = _create_dataframe(columns=columns)
124
+
125
+ for r in responses:
126
+ if capacity_id is None:
127
+ # If capacity_id is None, we access 'Overrides' -> 'tenantSettings'
128
+ for override in r.get("overrides", []):
129
+ tenant_settings = override.get("tenantSettings", [])
130
+ for setting in tenant_settings:
131
+ data.append(
132
+ create_new_data(setting)
133
+ ) # No capacity_id needed here
134
+ else:
135
+ # If capacity_id is provided, we access 'value' directly for tenantSettings
136
+ for setting in r.get("value", []):
137
+ data.append(
138
+ create_new_data(setting, capacity_id)
139
+ ) # Use provided capacity_id
140
+
141
+ if return_dataframe:
142
+ if data:
143
+ df = pd.DataFrame(data)
144
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
145
+ return df
146
+ else:
147
+ key = "overrides" if capacity_id is None else "value"
148
+ continuation_uri = r.get("continuationUri", "")
149
+ continuation_token = r.get("continuationToken", "")
150
+
151
+ return {
152
+ key: data,
153
+ "continuationUri": continuation_uri,
154
+ "continuationToken": continuation_token,
155
+ }
156
+
157
+ # Main logic
158
+ if capacity is None:
159
+ return (
160
+ process_responses(responses, return_dataframe=True)
161
+ if return_dataframe
162
+ else process_responses(responses)
163
+ )
164
+ else:
165
+ return (
166
+ process_responses(responses, capacity_id=capacity_id, return_dataframe=True)
167
+ if return_dataframe
168
+ else process_responses(responses, capacity_id=capacity_id)
169
+ )
170
+
171
+
172
+ @log
173
+ def list_capacities_delegated_tenant_settings(
174
+ return_dataframe: bool = True,
175
+ ) -> pd.DataFrame | dict:
176
+ """
177
+ Returns list of tenant setting overrides that override at the capacities.
178
+
179
+ NOTE: This function is to be deprecated. Please use the `list_capacity_tenant_settings_overrides` function instead.
180
+
181
+ This is a wrapper function for the following API: `Tenants - List Capacities Tenant Settings Overrides <https://learn.microsoft.com/rest/api/fabric/admin/tenants/list-capacities-tenant-settings-overrides>`_.
182
+
183
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
184
+
185
+ Parameters
186
+ ----------
187
+ return_dataframe : bool, default=True
188
+ If True, returns a dataframe. If False, returns a dictionary.
189
+
190
+ Returns
191
+ -------
192
+ pandas.DataFrame | dict
193
+ A pandas dataframe showing a list of tenant setting overrides that override at the capacities.
194
+ """
195
+
196
+ list_capacity_tenant_settings_overrides(return_dataframe=return_dataframe)
197
+
198
+
199
+ @log
200
+ def delete_capacity_tenant_setting_override(capacity: str | UUID, tenant_setting: str):
201
+ """
202
+ Remove given tenant setting override for given capacity Id.
203
+
204
+ This is a wrapper function for the following API: `Tenants - Delete Capacity Tenant Setting Override <https://learn.microsoft.com/rest/api/fabric/admin/tenants/delete-capacity-tenant-setting-override>`_.
205
+
206
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
207
+
208
+ Parameters
209
+ ----------
210
+ capacity : str | uuid.UUID
211
+ The capacity name or ID.
212
+ tenant_setting : str
213
+ The tenant setting name. Example: "TenantSettingForCapacityDelegatedSwitch"
214
+ """
215
+
216
+ (capacity_name, capacity_id) = _resolve_capacity_name_and_id(capacity=capacity)
217
+
218
+ _base_api(
219
+ request=f"/v1/admin/capacities/{capacity_id}/delegatedTenantSettingOverrides/{tenant_setting}",
220
+ client="fabric_sp",
221
+ method="delete",
222
+ )
223
+
224
+ print(
225
+ f"{icons.green_dot} The '{tenant_setting}' tenant setting has been removed from the '{capacity_name}' capacity."
226
+ )
227
+
228
+
229
+ @log
230
+ def update_tenant_setting(
231
+ tenant_setting: str,
232
+ enabled: bool,
233
+ delegate_to_capacity: Optional[bool] = None,
234
+ delegate_to_domain: Optional[bool] = None,
235
+ delegate_to_workspace: Optional[bool] = None,
236
+ enabled_security_groups: Optional[List[dict]] = None,
237
+ excluded_security_groups: Optional[List[dict]] = None,
238
+ properties: Optional[List[dict]] = None,
239
+ ):
240
+ """
241
+ Update a given tenant setting.
242
+
243
+ This is a wrapper function for the following API: `Tenants - Update Tenant Setting <https://learn.microsoft.com/rest/api/fabric/admin/tenants/update-tenant-setting>`_.
244
+
245
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
246
+
247
+ Parameters
248
+ ----------
249
+ tenant_setting : str
250
+ The tenant setting name. Example: "TenantSettingForCapacityDelegatedSwitch"
251
+ enabled : bool
252
+ The status of the tenant setting. False - Disabled, True - Enabled.
253
+ delegate_to_capacity : bool, default=None
254
+ Indicates whether the tenant setting can be delegated to a capacity admin. False - Capacity admin cannot override the tenant setting. True - Capacity admin can override the tenant setting.
255
+ delegate_to_domain : bool, default=None
256
+ Indicates whether the tenant setting can be delegated to a domain admin. False - Domain admin cannot override the tenant setting. True - Domain admin can override the tenant setting.
257
+ delegate_to_workspace : bool, default=None
258
+ Indicates whether the tenant setting can be delegated to a workspace admin. False - Workspace admin cannot override the tenant setting. True - Workspace admin can override the tenant setting.
259
+ enabled_security_groups : List[dict], default=None
260
+ A list of enabled security groups. Example:
261
+ [
262
+ {
263
+ "graphId": "f51b705f-a409-4d40-9197-c5d5f349e2f0",
264
+ "name": "TestComputeCdsa"
265
+ }
266
+ ]
267
+ excluded_security_groups : List[dict], default=None
268
+ A list of excluded security groups. Example:
269
+ [
270
+ {
271
+ "graphId": "f51b705f-a409-4d40-9197-c5d5f349e2f0",
272
+ "name": "TestComputeCdsa"
273
+ }
274
+ ]
275
+ properties : List[dict], default=None
276
+ Tenant setting properties. Example:
277
+ [
278
+ {
279
+ "name": "CreateP2w",
280
+ "value": "true",
281
+ "type": "Boolean"
282
+ }
283
+ ]
284
+ """
285
+
286
+ payload = {"enabled": enabled}
287
+
288
+ if delegate_to_capacity is not None:
289
+ payload["delegateToCapacity"] = delegate_to_capacity
290
+ if delegate_to_domain is not None:
291
+ payload["delegateToDomain"] = delegate_to_domain
292
+ if delegate_to_workspace is not None:
293
+ payload["delegateToWorkspace"] = delegate_to_workspace
294
+ if enabled_security_groups is not None:
295
+ payload["enabledSecurityGroups"] = enabled_security_groups
296
+ if excluded_security_groups is not None:
297
+ payload["excludedSecurityGroups"] = excluded_security_groups
298
+ if properties is not None:
299
+ payload["properties"] = properties
300
+
301
+ _base_api(
302
+ request=f"/v1/admin/tenantsettings/{tenant_setting}/update",
303
+ client="fabric_sp",
304
+ method="post",
305
+ payload=payload,
306
+ )
307
+
308
+ print(f"{icons.green_dot} The '{tenant_setting}' tenant setting has been updated.")
309
+
310
+
311
+ @log
312
+ def update_capacity_tenant_setting_override(
313
+ capacity: str | UUID,
314
+ tenant_setting: str,
315
+ enabled: bool,
316
+ delegate_to_workspace: Optional[bool] = None,
317
+ enabled_security_groups: Optional[List[dict]] = None,
318
+ excluded_security_groups: Optional[List[dict]] = None,
319
+ ):
320
+ """
321
+ Update given tenant setting override for given capacity.
322
+
323
+ This is a wrapper function for the following API: `Tenants - Update Capacity Tenant Setting Override <https://learn.microsoft.com/en-us/rest/api/fabric/admin/tenants/update-capacity-tenant-setting-override>`_.
324
+
325
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
326
+
327
+ Parameters
328
+ ----------
329
+ capacity : str | uuid.UUID
330
+ The capacity name or ID.
331
+ tenant_setting : str
332
+ The tenant setting name. Example: "TenantSettingForCapacityDelegatedSwitch"
333
+ enabled : bool
334
+ The status of the tenant setting. False - Disabled, True - Enabled.
335
+ delegate_to_workspace : bool, default=None
336
+ Indicates whether the tenant setting can be delegated to a workspace admin. False - Workspace admin cannot override the tenant setting. True - Workspace admin can override the tenant setting.
337
+ enabled_security_groups : List[dict], default=None
338
+ A list of enabled security groups. Example:
339
+ [
340
+ {
341
+ "graphId": "f51b705f-a409-4d40-9197-c5d5f349e2f0",
342
+ "name": "TestComputeCdsa"
343
+ }
344
+ ]
345
+ excluded_security_groups : List[dict], default=None
346
+ A list of excluded security groups. Example:
347
+ [
348
+ {
349
+ "graphId": "f51b705f-a409-4d40-9197-c5d5f349e2f0",
350
+ "name": "TestComputeCdsa"
351
+ }
352
+ ]
353
+ """
354
+
355
+ (capacity_name, capacity_id) = _resolve_capacity_name_and_id(capacity=capacity)
356
+
357
+ payload = {"enabled": enabled}
358
+
359
+ if delegate_to_workspace is not None:
360
+ payload["delegateToWorkspace"] = delegate_to_workspace
361
+ if enabled_security_groups is not None:
362
+ payload["enabledSecurityGroups"] = enabled_security_groups
363
+ if excluded_security_groups is not None:
364
+ payload["excludedSecurityGroups"] = excluded_security_groups
365
+
366
+ _base_api(
367
+ request=f"/v1/admin/capacities/{capacity_id}/delegatedTenantSettingOverrides/{tenant_setting}/update",
368
+ client="fabric_sp",
369
+ method="post",
370
+ payload=payload,
371
+ )
372
+
373
+ print(
374
+ f"{icons.green_dot} The '{tenant_setting}' tenant setting for the '{capacity_name}' capacity has been updated."
375
+ )
376
+
377
+
378
+ @log
379
+ def list_workspaces_tenant_settings_overrides() -> pd.DataFrame:
380
+ """
381
+ Shows a list of workspace delegation setting overrides. In order to run this function, you must enable the workspace's delegated OneLake settings. To do this, navigate to the workspace, Workspace Settings -> Delegated Settings -> OneLake settings -> Set to 'On'.
382
+
383
+ This is a wrapper function for the following API: `Tenants - List Workspaces Tenant Settings Overrides <https://learn.microsoft.com/rest/api/fabric/admin/tenants/list-workspaces-tenant-settings-overrides>`_.
384
+
385
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
386
+
387
+ Returns
388
+ -------
389
+ pandas.DataFrame
390
+ A pandas dataframe showing a list of workspace delegation setting overrides.
391
+ """
392
+
393
+ columns = {
394
+ "Setting Name": "string",
395
+ "Title": "string",
396
+ "Enabled": "bool",
397
+ "Can Specify Security Groups": "bool",
398
+ "Enabled Security Groups": "string",
399
+ "Tenant Setting Group": "string",
400
+ "Delegated From": "string",
401
+ }
402
+ df = _create_dataframe(columns=columns)
403
+
404
+ responses = _base_api(
405
+ request="/v1/admin/workspaces/delegatedTenantSettingOverrides",
406
+ client="fabric_sp",
407
+ uses_pagination=True,
408
+ )
409
+
410
+ for r in responses:
411
+ for v in r.get("value", []):
412
+ for setting in v.get("tenantSettings", []):
413
+ new_data = {
414
+ "Setting Name": setting.get("settingName"),
415
+ "Title": setting.get("title"),
416
+ "Enabled": setting.get("enabled"),
417
+ "Can Specify Security Groups": setting.get(
418
+ "canSpecifySecurityGroups"
419
+ ),
420
+ "Enabled Security Groups": [
421
+ setting.get("enabledSecurityGroups", [])
422
+ ],
423
+ "Tenant Setting Group": setting.get("tenantSettingGroup"),
424
+ "Delegated From": setting.get("delegatedFrom"),
425
+ }
426
+
427
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
428
+
429
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
430
+
431
+ return df
432
+
433
+
434
+ @log
435
+ def list_domain_tenant_settings_overrides() -> pd.DataFrame:
436
+ """
437
+ Shows a list of domain delegation setting overrides.
438
+
439
+ This is a wrapper function for the following API: `Tenants - List Domains Tenant Settings Overrides <https://learn.microsoft.com/rest/api/fabric/admin/tenants/list-domains-tenant-settings-overrides>`_.
440
+
441
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
442
+
443
+ Returns
444
+ -------
445
+ pandas.DataFrame
446
+ A pandas dataframe showing a list of domain delegation setting overrides.
447
+ """
448
+
449
+ columns = {
450
+ "Setting Name": "string",
451
+ "Title": "string",
452
+ "Enabled": "bool",
453
+ "Can Specify Security Groups": "bool",
454
+ "Enabled Security Groups": "string",
455
+ "Tenant Setting Group": "string",
456
+ "Delegated To Workspace": "bool",
457
+ "Delegated From": "string",
458
+ }
459
+ df = _create_dataframe(columns=columns)
460
+
461
+ responses = _base_api(
462
+ request="/v1/admin/domains/delegatedTenantSettingOverrides",
463
+ client="fabric_sp",
464
+ uses_pagination=True,
465
+ )
466
+
467
+ for r in responses:
468
+ for v in r.get("value", []):
469
+ for setting in v.get("tenantSettings", []):
470
+ new_data = {
471
+ "Setting Name": setting.get("settingName"),
472
+ "Title": setting.get("title"),
473
+ "Enabled": setting.get("enabled"),
474
+ "Can Specify Security Groups": setting.get(
475
+ "canSpecifySecurityGroups"
476
+ ),
477
+ "Enabled Security Groups": [
478
+ setting.get("enabledSecurityGroups", [])
479
+ ],
480
+ "Tenant Setting Group": setting.get("tenantSettingGroup"),
481
+ "Delegated To Workspace": setting.get("delegateToWorkspace"),
482
+ "Delegated From": setting.get("delegatedFrom"),
483
+ }
484
+
485
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
486
+
487
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
488
+
489
+ return df
@@ -11,7 +11,6 @@ from sempy_labs._helper_functions import (
11
11
  _convert_data_type,
12
12
  resolve_dataset_name_and_id,
13
13
  resolve_workspace_name_and_id,
14
- _base_api,
15
14
  )
16
15
 
17
16
 
@@ -77,6 +77,12 @@ def update_direct_lake_partition_entity(
77
77
  )
78
78
 
79
79
  tom.model.Tables[tName].Partitions[part_name].Source.EntityName = eName
80
+
81
+ # Update source lineage tag
82
+ schema = (
83
+ tom.model.Tables[tName].Partitions[part_name].Source.SchemaName or "dbo"
84
+ )
85
+ tom.model.Tables[tName].SourceLineageTag = f"[{schema}].[{eName}]"
80
86
  print(
81
87
  f"{icons.green_dot} The '{tName}' table in the '{dataset_name}' semantic model within the '{workspace_name}' workspace has been updated to point to the '{eName}' table."
82
88
  )
@@ -35,7 +35,7 @@ def list_teams() -> pd.DataFrame:
35
35
  "Archived": "bool",
36
36
  "Favorite By Me": "bool",
37
37
  "Discoverable By Me": "bool",
38
- "Member Count": "int",
38
+ "Member Count": "int_fillna",
39
39
  }
40
40
 
41
41
  df = _create_dataframe(columns=columns)
@@ -125,6 +125,7 @@ def send_mail(
125
125
  subject: str,
126
126
  to_recipients: str | List[str],
127
127
  content: str,
128
+ content_type: str = "Text",
128
129
  cc_recipients: str | List[str] = None,
129
130
  ):
130
131
  """
@@ -144,10 +145,17 @@ def send_mail(
144
145
  The email address of the recipients.
145
146
  content : str
146
147
  The email content.
148
+ content_type : str, default="Text"
149
+ The email content type. Options: "Text" or "HTML".
147
150
  cc_recipients : str | List[str], default=None
148
151
  The email address of the CC recipients.
149
152
  """
150
153
 
154
+ if content_type.lower() == "html":
155
+ content_type = "HTML"
156
+ else:
157
+ content_type = "Text"
158
+
151
159
  user_id = resolve_user_id(user=user)
152
160
 
153
161
  if isinstance(to_recipients, str):
@@ -170,7 +178,7 @@ def send_mail(
170
178
  "message": {
171
179
  "subject": subject,
172
180
  "body": {
173
- "contentType": "Text",
181
+ "contentType": content_type,
174
182
  "content": content,
175
183
  },
176
184
  "toRecipients": to_email_addresses,
@@ -1,10 +1,10 @@
1
1
  import pandas as pd
2
- from pyspark.sql import SparkSession
3
2
  from sempy_labs._helper_functions import (
4
3
  format_dax_object_name,
5
4
  resolve_workspace_name_and_id,
6
5
  resolve_lakehouse_name_and_id,
7
6
  _create_dataframe,
7
+ _create_spark_session,
8
8
  )
9
9
  from typing import Optional
10
10
  from sempy._utils._log import log
@@ -51,7 +51,7 @@ def get_lakehouse_columns(
51
51
  lakehouse=lakehouse, workspace=workspace_id
52
52
  )
53
53
 
54
- spark = SparkSession.builder.getOrCreate()
54
+ spark = _create_spark_session()
55
55
 
56
56
  tables = get_lakehouse_tables(
57
57
  lakehouse=lakehouse_id, workspace=workspace_id, extended=False, count_rows=False
@@ -1,6 +1,5 @@
1
1
  import sempy.fabric as fabric
2
2
  import pandas as pd
3
- from pyspark.sql import SparkSession
4
3
  import pyarrow.parquet as pq
5
4
  import datetime
6
5
  from sempy_labs._helper_functions import (
@@ -10,6 +9,7 @@ from sempy_labs._helper_functions import (
10
9
  save_as_delta_table,
11
10
  _base_api,
12
11
  _create_dataframe,
12
+ _create_spark_session,
13
13
  )
14
14
  from sempy_labs.directlake._guardrails import (
15
15
  get_sku_size,
@@ -112,7 +112,7 @@ def get_lakehouse_tables(
112
112
  if extended:
113
113
  sku_value = get_sku_size(workspace_id)
114
114
  guardrail = get_directlake_guardrails_for_sku(sku_value)
115
- spark = SparkSession.builder.getOrCreate()
115
+ spark = _create_spark_session()
116
116
  df["Files"] = None
117
117
  df["Row Groups"] = None
118
118
  df["Table Size"] = None
@@ -6,6 +6,7 @@ from sempy_labs._helper_functions import (
6
6
  _base_api,
7
7
  resolve_lakehouse_name_and_id,
8
8
  resolve_workspace_name_and_id,
9
+ _create_spark_session,
9
10
  )
10
11
  import sempy_labs._icons as icons
11
12
  import re
@@ -54,7 +55,6 @@ def optimize_lakehouse_tables(
54
55
  or if no lakehouse attached, resolves to the workspace of the notebook.
55
56
  """
56
57
 
57
- from pyspark.sql import SparkSession
58
58
  from sempy_labs.lakehouse._get_lakehouse_tables import get_lakehouse_tables
59
59
  from delta import DeltaTable
60
60
 
@@ -69,7 +69,7 @@ def optimize_lakehouse_tables(
69
69
  else:
70
70
  tables_filt = lakeTablesDelta.copy()
71
71
 
72
- spark = SparkSession.builder.getOrCreate()
72
+ spark = _create_spark_session()
73
73
 
74
74
  for _, r in (bar := tqdm(tables_filt.iterrows())):
75
75
  tableName = r["Table Name"]
@@ -122,7 +122,7 @@ def vacuum_lakehouse_tables(
122
122
  else:
123
123
  tables_filt = lakeTablesDelta.copy()
124
124
 
125
- spark = SparkSession.builder.getOrCreate()
125
+ spark = _create_spark_session()
126
126
  spark.conf.set("spark.databricks.delta.vacuum.parallelDelete.enabled", "true")
127
127
 
128
128
  for _, r in (bar := tqdm(tables_filt.iterrows())):