semantic-link-labs 0.11.2__py3-none-any.whl → 0.12.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (93) hide show
  1. {semantic_link_labs-0.11.2.dist-info → semantic_link_labs-0.12.0.dist-info}/METADATA +7 -6
  2. {semantic_link_labs-0.11.2.dist-info → semantic_link_labs-0.12.0.dist-info}/RECORD +90 -84
  3. sempy_labs/__init__.py +18 -18
  4. sempy_labs/_a_lib_info.py +1 -1
  5. sempy_labs/_authentication.py +81 -32
  6. sempy_labs/_capacities.py +2 -2
  7. sempy_labs/_capacity_migration.py +4 -4
  8. sempy_labs/_clear_cache.py +1 -1
  9. sempy_labs/_connections.py +107 -70
  10. sempy_labs/_dashboards.py +6 -2
  11. sempy_labs/_data_pipelines.py +1 -1
  12. sempy_labs/_dataflows.py +1 -1
  13. sempy_labs/_dax.py +3 -3
  14. sempy_labs/_delta_analyzer.py +4 -4
  15. sempy_labs/_delta_analyzer_history.py +1 -1
  16. sempy_labs/_deployment_pipelines.py +1 -1
  17. sempy_labs/_environments.py +1 -1
  18. sempy_labs/_eventhouses.py +9 -3
  19. sempy_labs/_eventstreams.py +1 -1
  20. sempy_labs/_external_data_shares.py +56 -2
  21. sempy_labs/_gateways.py +14 -7
  22. sempy_labs/_generate_semantic_model.py +7 -12
  23. sempy_labs/_git.py +1 -1
  24. sempy_labs/_graphQL.py +1 -1
  25. sempy_labs/_helper_functions.py +293 -22
  26. sempy_labs/_job_scheduler.py +12 -1
  27. sempy_labs/_kql_databases.py +1 -1
  28. sempy_labs/_kql_querysets.py +10 -2
  29. sempy_labs/_kusto.py +2 -2
  30. sempy_labs/_labels.py +126 -0
  31. sempy_labs/_list_functions.py +2 -2
  32. sempy_labs/_managed_private_endpoints.py +1 -1
  33. sempy_labs/_mirrored_databases.py +40 -16
  34. sempy_labs/_mirrored_warehouses.py +1 -1
  35. sempy_labs/_ml_experiments.py +1 -1
  36. sempy_labs/_model_bpa.py +6 -6
  37. sempy_labs/_model_bpa_bulk.py +3 -3
  38. sempy_labs/_model_dependencies.py +1 -1
  39. sempy_labs/_mounted_data_factories.py +3 -3
  40. sempy_labs/_notebooks.py +153 -3
  41. sempy_labs/_query_scale_out.py +2 -2
  42. sempy_labs/_refresh_semantic_model.py +1 -1
  43. sempy_labs/_semantic_models.py +15 -3
  44. sempy_labs/_spark.py +1 -1
  45. sempy_labs/_sql.py +3 -3
  46. sempy_labs/_sql_endpoints.py +5 -3
  47. sempy_labs/_sqldatabase.py +5 -1
  48. sempy_labs/_tags.py +3 -1
  49. sempy_labs/_translations.py +7 -360
  50. sempy_labs/_user_delegation_key.py +2 -2
  51. sempy_labs/_utils.py +27 -0
  52. sempy_labs/_vertipaq.py +3 -3
  53. sempy_labs/_vpax.py +1 -1
  54. sempy_labs/_warehouses.py +5 -0
  55. sempy_labs/_workloads.py +1 -1
  56. sempy_labs/_workspace_identity.py +1 -1
  57. sempy_labs/_workspaces.py +145 -11
  58. sempy_labs/admin/__init__.py +6 -0
  59. sempy_labs/admin/_capacities.py +34 -11
  60. sempy_labs/admin/_items.py +2 -2
  61. sempy_labs/admin/_tenant_keys.py +89 -0
  62. sempy_labs/directlake/_dl_helper.py +5 -2
  63. sempy_labs/graph/_users.py +3 -5
  64. sempy_labs/lakehouse/__init__.py +4 -0
  65. sempy_labs/lakehouse/_helper.py +18 -9
  66. sempy_labs/lakehouse/_lakehouse.py +18 -9
  67. sempy_labs/lakehouse/_materialized_lake_views.py +76 -0
  68. sempy_labs/lakehouse/_shortcuts.py +8 -2
  69. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +38 -47
  70. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +12 -22
  71. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +7 -11
  72. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +14 -23
  73. sempy_labs/ml_model/__init__.py +23 -0
  74. sempy_labs/ml_model/_functions.py +427 -0
  75. sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
  76. sempy_labs/report/_bpareporttemplate/.platform +11 -0
  77. sempy_labs/report/_download_report.py +4 -1
  78. sempy_labs/report/_export_report.py +12 -5
  79. sempy_labs/report/_generate_report.py +11 -3
  80. sempy_labs/report/_paginated.py +21 -15
  81. sempy_labs/report/_report_functions.py +19 -11
  82. sempy_labs/report/_report_rebind.py +21 -10
  83. sempy_labs/report/_reportwrapper.py +1 -1
  84. sempy_labs/theme/_org_themes.py +5 -6
  85. sempy_labs/tom/_model.py +13 -19
  86. sempy_labs/variable_library/__init__.py +19 -0
  87. sempy_labs/variable_library/_functions.py +403 -0
  88. sempy_labs/_dax_query_view.py +0 -57
  89. sempy_labs/_ml_models.py +0 -111
  90. sempy_labs/_variable_libraries.py +0 -92
  91. {semantic_link_labs-0.11.2.dist-info → semantic_link_labs-0.12.0.dist-info}/WHEEL +0 -0
  92. {semantic_link_labs-0.11.2.dist-info → semantic_link_labs-0.12.0.dist-info}/licenses/LICENSE +0 -0
  93. {semantic_link_labs-0.11.2.dist-info → semantic_link_labs-0.12.0.dist-info}/top_level.txt +0 -0
sempy_labs/_workloads.py CHANGED
@@ -1,7 +1,7 @@
1
1
  import pandas as pd
2
2
  from typing import Optional
3
3
  import sempy_labs._icons as icons
4
- from ._helper_functions import (
4
+ from sempy_labs._helper_functions import (
5
5
  _update_dataframe_datatypes,
6
6
  _base_api,
7
7
  _create_dataframe,
@@ -1,4 +1,4 @@
1
- from ._helper_functions import (
1
+ from sempy_labs._helper_functions import (
2
2
  resolve_workspace_name_and_id,
3
3
  _base_api,
4
4
  )
sempy_labs/_workspaces.py CHANGED
@@ -1,7 +1,8 @@
1
1
  import pandas as pd
2
2
  import sempy_labs._icons as icons
3
- from typing import Optional
4
- from ._helper_functions import (
3
+ from typing import Optional, Literal
4
+ from sempy_labs._helper_functions import (
5
+ resolve_workspace_id,
5
6
  resolve_workspace_name_and_id,
6
7
  resolve_capacity_id,
7
8
  _base_api,
@@ -127,7 +128,7 @@ def list_workspace_users(workspace: Optional[str | UUID] = None) -> pd.DataFrame
127
128
  A pandas dataframe the users of a workspace and their properties.
128
129
  """
129
130
 
130
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
131
+ workspace_id = resolve_workspace_id(workspace)
131
132
 
132
133
  columns = {
133
134
  "User Name": "string",
@@ -176,6 +177,8 @@ def add_user_to_workspace(
176
177
 
177
178
  This is a wrapper function for the following API: `Groups - Add Group User <https://learn.microsoft.com/rest/api/power-bi/groups/add-group-user>`_.
178
179
 
180
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
181
+
179
182
  Parameters
180
183
  ----------
181
184
  email_address : str
@@ -217,6 +220,7 @@ def add_user_to_workspace(
217
220
  request=f"/v1.0/myorg/groups/{workspace_id}/users",
218
221
  method="post",
219
222
  payload=payload,
223
+ client="fabric_sp",
220
224
  )
221
225
  print(
222
226
  f"{icons.green_dot} The '{email_address}' user has been added as a{plural} '{role_name}' within the '{workspace_name}' workspace."
@@ -234,6 +238,8 @@ def assign_workspace_to_capacity(
234
238
 
235
239
  This is a wrapper function for the following API: `Workspaces - Assign To Capacity <https://learn.microsoft.com/rest/api/fabric/core/workspaces/assign-to-capacity>`_.
236
240
 
241
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
242
+
237
243
  Parameters
238
244
  ----------
239
245
  capacity : str | uuid.UUID
@@ -260,6 +266,7 @@ def assign_workspace_to_capacity(
260
266
  method="post",
261
267
  payload=payload,
262
268
  status_codes=[200, 202],
269
+ client="fabric_sp",
263
270
  )
264
271
  print(
265
272
  f"{icons.green_dot} The '{workspace_name}' workspace has been assigned to the '{capacity}' capacity."
@@ -320,7 +327,7 @@ def list_workspace_role_assignments(
320
327
  A pandas dataframe showing the members of a given workspace and their roles.
321
328
  """
322
329
 
323
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
330
+ workspace_id = resolve_workspace_id(workspace)
324
331
 
325
332
  columns = {
326
333
  "User Name": "string",
@@ -336,16 +343,23 @@ def list_workspace_role_assignments(
336
343
  client="fabric_sp",
337
344
  )
338
345
 
346
+ rows = []
339
347
  for r in responses:
340
348
  for i in r.get("value", []):
341
349
  principal = i.get("principal", {})
342
- new_data = {
343
- "User Name": principal.get("displayName"),
344
- "Role Name": i.get("role"),
345
- "Type": principal.get("type"),
346
- "User Email": principal.get("userDetails", {}).get("userPrincipalName"),
347
- }
348
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
350
+ rows.append(
351
+ {
352
+ "User Name": principal.get("displayName"),
353
+ "Role Name": i.get("role"),
354
+ "Type": principal.get("type"),
355
+ "User Email": principal.get("userDetails", {}).get(
356
+ "userPrincipalName"
357
+ ),
358
+ }
359
+ )
360
+
361
+ if rows:
362
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
349
363
 
350
364
  return df
351
365
 
@@ -357,6 +371,8 @@ def delete_workspace(workspace: Optional[str | UUID] = None):
357
371
 
358
372
  This is a wrapper function for the following API: `Workspaces - Delete Workspace <https://learn.microsoft.com/rest/api/fabric/core/workspaces/delete-workspace>`_.
359
373
 
374
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
375
+
360
376
  Parameters
361
377
  ----------
362
378
  workspace : str | uuid.UUID, default=None
@@ -372,3 +388,121 @@ def delete_workspace(workspace: Optional[str | UUID] = None):
372
388
  )
373
389
 
374
390
  print(f"{icons.green_dot} The '{workspace_name}' workspace has been deleted.")
391
+
392
+
393
+ @log
394
+ def get_workspace_network_communication_policy(
395
+ workspace: Optional[str | UUID] = None,
396
+ ) -> pd.DataFrame:
397
+ """
398
+ Returns networking communication policy for the specified workspace. This feature is currently in preview.
399
+
400
+ This is a wrapper function for the following API: `Workspaces - Get Network Communication Policy <https://learn.microsoft.com/rest/api/fabric/core/workspaces/get-network-communication-policy>`_.
401
+
402
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
403
+
404
+ Parameters
405
+ ----------
406
+ workspace : str | uuid.UUID, default=None
407
+ The workspace name or ID.
408
+ Defaults to None which resolves to the workspace of the attached lakehouse
409
+ or if no lakehouse attached, resolves to the workspace of the notebook.
410
+
411
+ Returns
412
+ -------
413
+ pandas.DataFrame
414
+ A pandas dataframe showing the networking communication policy for the specified workspace.
415
+ """
416
+
417
+ workspace_id = resolve_workspace_id(workspace)
418
+
419
+ columns = {
420
+ "Inbound Public Access Rules": "string",
421
+ "Outbound Public Access Rules": "string",
422
+ }
423
+
424
+ df = _create_dataframe(columns=columns)
425
+
426
+ data = _base_api(
427
+ request=f"/v1/workspaces/{workspace_id}/networking/communicationPolicy",
428
+ client="fabric_sp",
429
+ ).json()
430
+
431
+ if data:
432
+ df = pd.DataFrame(
433
+ [
434
+ {
435
+ "Inbound Public Access Rules": data.get("inbound", {})
436
+ .get("publicAccessRules", {})
437
+ .get("defaultAction"),
438
+ "Outbound Public Access Rules": data.get("outbound", {})
439
+ .get("publicAccessRules", {})
440
+ .get("defaultAction"),
441
+ }
442
+ ]
443
+ )
444
+
445
+ return df
446
+
447
+
448
+ @log
449
+ def set_workspace_network_communication_policy(
450
+ inbound_policy: Literal["Allow", "Deny"],
451
+ outbound_policy: Literal["Allow", "Deny"],
452
+ workspace: Optional[str | UUID] = None,
453
+ ):
454
+ """
455
+ Sets networking communication policy for the specified workspace. This API uses the PUT method and will overwrite all settings. Remaining policy will be set to default value if only partial policy is provided in the request body. Always run Get Network Communication Policy first and provide full policy in the request body. This feature is currently in preview.
456
+
457
+ This is a wrapper function for the following API: `Workspaces - Set Network Communication Policy <https://learn.microsoft.com/rest/api/fabric/core/workspaces/set-network-communication-policy>`_.
458
+
459
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
460
+
461
+ Parameters
462
+ ----------
463
+ inbound_policy : Literal['Allow', 'Deny']
464
+ The policy for all inbound communications to a workspace.
465
+ outbound_policy : Literal['Allow', 'Deny']
466
+ The policy for all outbound communications to a workspace.
467
+ workspace : str | uuid.UUID, default=None
468
+ The workspace name or ID.
469
+ Defaults to None which resolves to the workspace of the attached lakehouse
470
+ or if no lakehouse attached, resolves to the workspace of the notebook.
471
+ """
472
+
473
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
474
+
475
+ inbound_policy = inbound_policy.capitalize()
476
+ outbound_policy = outbound_policy.capitalize()
477
+
478
+ if inbound_policy not in ["Allow", "Deny"]:
479
+ raise ValueError(
480
+ f"{icons.red_dot} The 'inbound_policy' must be either 'Allow' or 'Deny'."
481
+ )
482
+ if outbound_policy not in ["Allow", "Deny"]:
483
+ raise ValueError(
484
+ f"{icons.red_dot} The 'outbound_policy' must be either 'Allow' or 'Deny'."
485
+ )
486
+
487
+ payload = {
488
+ "inbound": {
489
+ "publicAccessRules": {
490
+ "defaultAction": inbound_policy,
491
+ }
492
+ },
493
+ "outbound": {
494
+ "publicAccessRules": {
495
+ "defaultAction": outbound_policy,
496
+ }
497
+ },
498
+ }
499
+ _base_api(
500
+ request=f"/v1/workspaces/{workspace_id}/networking/communicationPolicy",
501
+ client="fabric_sp",
502
+ payload=payload,
503
+ method="put",
504
+ )
505
+
506
+ print(
507
+ f"{icons.green_dot} The networking communication policy has been updated for the '{workspace_name}' workspace."
508
+ )
@@ -90,6 +90,10 @@ from ._tags import (
90
90
  create_tags,
91
91
  delete_tag,
92
92
  )
93
+ from ._tenant_keys import (
94
+ list_tenant_keys,
95
+ rotate_tenant_key,
96
+ )
93
97
 
94
98
  __all__ = [
95
99
  "list_items",
@@ -149,4 +153,6 @@ __all__ = [
149
153
  "list_tags",
150
154
  "create_tags",
151
155
  "delete_tag",
156
+ "list_tenant_keys",
157
+ "rotate_tenant_key",
152
158
  ]
@@ -3,7 +3,7 @@ from uuid import UUID
3
3
  import sempy_labs._icons as icons
4
4
  from typing import Optional, Tuple
5
5
  from sempy._utils._log import log
6
- from .._helper_functions import (
6
+ from sempy_labs._helper_functions import (
7
7
  _base_api,
8
8
  _build_url,
9
9
  _create_dataframe,
@@ -226,6 +226,7 @@ def get_capacity_state(capacity: Optional[str | UUID] = None):
226
226
  @log
227
227
  def list_capacities(
228
228
  capacity: Optional[str | UUID] = None,
229
+ include_tenant_key: bool = False,
229
230
  ) -> pd.DataFrame:
230
231
  """
231
232
  Shows the a list of capacities and their properties.
@@ -238,6 +239,8 @@ def list_capacities(
238
239
  ----------
239
240
  capacity : str | uuid.UUID, default=None
240
241
  The capacity name or ID to filter. If None, all capacities are returned.
242
+ include_tenant_key : bool, default=False
243
+ If True, obtains the `tenant key <https://learn.microsoft.com/rest/api/power-bi/admin/get-capacities-as-admin#example-with-expand-on-tenant-key>`_ properties.
241
244
 
242
245
  Returns
243
246
  -------
@@ -252,27 +255,47 @@ def list_capacities(
252
255
  "Region": "string",
253
256
  "State": "string",
254
257
  "Admins": "list",
258
+ "Users": "list",
255
259
  }
260
+ if include_tenant_key:
261
+ columns.update(
262
+ {
263
+ "Tenant Key Id": "string",
264
+ "Tenant Key Name": "string",
265
+ }
266
+ )
256
267
  df = _create_dataframe(columns=columns)
257
268
 
258
- responses = _base_api(
259
- request="/v1.0/myorg/admin/capacities", client="fabric_sp", uses_pagination=True
260
- )
269
+ url = "/v1.0/myorg/admin/capacities"
270
+ if include_tenant_key:
271
+ url += "?$expand=tenantKey"
272
+
273
+ responses = _base_api(request=url, client="fabric_sp", uses_pagination=True)
261
274
 
262
275
  rows = []
263
276
  for r in responses:
264
277
  for i in r.get("value", []):
265
- rows.append(
278
+ row = {
279
+ "Capacity Id": i.get("id", "").lower(),
280
+ "Capacity Name": i.get("displayName"),
281
+ "Sku": i.get("sku"),
282
+ "Region": i.get("region"),
283
+ "State": i.get("state"),
284
+ "Admins": i.get("admins", []),
285
+ "Users": i.get("users", []),
286
+ }
287
+
288
+ if include_tenant_key:
289
+ tenant_key = i.get("tenantKey") or {}
290
+ row.update(
266
291
  {
267
- "Capacity Id": i.get("id").lower(),
268
- "Capacity Name": i.get("displayName"),
269
- "Sku": i.get("sku"),
270
- "Region": i.get("region"),
271
- "State": i.get("state"),
272
- "Admins": [i.get("admins", [])],
292
+ "Tenant Key Id": tenant_key.get("id"),
293
+ "Tenant Key Name": tenant_key.get("name"),
273
294
  }
274
295
  )
275
296
 
297
+ rows.append(row)
298
+
276
299
  if rows:
277
300
  df = pd.DataFrame(rows, columns=list(columns.keys()))
278
301
 
@@ -228,8 +228,8 @@ def list_item_access_details(
228
228
  f"{icons.red_dot} The parameter 'item' and 'type' are mandatory."
229
229
  )
230
230
 
231
- workspace_name, workspace_id = _resolve_workspace_name_and_id(workspace)
232
- item_name, item_id = _resolve_item_name_and_id(
231
+ (workspace_name, workspace_id) = _resolve_workspace_name_and_id(workspace)
232
+ (item_name, item_id) = _resolve_item_name_and_id(
233
233
  item=item, type=type, workspace=workspace_name
234
234
  )
235
235
 
@@ -0,0 +1,89 @@
1
+ import pandas as pd
2
+ from sempy_labs._helper_functions import (
3
+ _create_dataframe,
4
+ _update_dataframe_datatypes,
5
+ _base_api,
6
+ )
7
+ from sempy._utils._log import log
8
+ from uuid import UUID
9
+ import sempy_labs._icons as icons
10
+
11
+
12
+ @log
13
+ def list_tenant_keys() -> pd.DataFrame:
14
+ """
15
+ Returns the encryption keys for the tenant.
16
+
17
+ This is a wrapper function for the following API: `Admin - Get Power BI Encryption Keys <https://learn.microsoft.com/rest/api/power-bi/admin/get-power-bi-encryption-keys>`_.
18
+
19
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
20
+
21
+ Returns
22
+ -------
23
+ pandas.DataFrame
24
+ A pandas dataframe showing the encryption keys for the tenant.
25
+ """
26
+
27
+ columns = {
28
+ "Tenant Key Id": "str",
29
+ "Tenant Key Name": "str",
30
+ "Key Vault Key Identifier": "str",
31
+ "Is Default": "bool",
32
+ "Created At": "datetime",
33
+ "Updated At": "datetime",
34
+ }
35
+
36
+ df = _create_dataframe(columns=columns)
37
+
38
+ result = _base_api(
39
+ request="/v1.0/myorg/admin/tenantKeys", client="fabric_sp"
40
+ ).json()
41
+
42
+ rows = []
43
+ for i in result.get("value", []):
44
+ rows.append(
45
+ {
46
+ "Tenant Key Id": i.get("id"),
47
+ "Tenant Key Name": i.get("name"),
48
+ "Key Vault Key Identifier": i.get("keyVaultKeyIdentifier"),
49
+ "Is Default": i.get("isDefault"),
50
+ "Created At": i.get("createdAt"),
51
+ "Updated At": i.get("updatedAt"),
52
+ }
53
+ )
54
+
55
+ if rows:
56
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
57
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
58
+
59
+ return df
60
+
61
+
62
+ @log
63
+ def rotate_tenant_key(tenant_key_id: UUID, key_vault_key_identifier: str):
64
+ """
65
+ Rotate the encryption key for Power BI workspaces assigned to a capacity.
66
+
67
+ This is a wrapper function for the following API: `Admin - Rotate Power BI Encryption Key <https://learn.microsoft.com/rest/api/power-bi/admin/rotate-power-bi-encryption-key>`_.
68
+
69
+ Parameters
70
+ ----------
71
+ tenant_key_id : uuid.UUID
72
+ The tenant key ID
73
+ key_vault_key_identifier : str
74
+ The URI that uniquely specifies the encryption key in Azure Key Vault
75
+ """
76
+
77
+ payload = {
78
+ "keyVaultKeyIdentifier": key_vault_key_identifier,
79
+ }
80
+
81
+ _base_api(
82
+ request=f"/v1.0/myorg/admin/tenantKeys/{tenant_key_id}/Default.Rotate",
83
+ method="post",
84
+ payload=payload,
85
+ )
86
+
87
+ print(
88
+ f"{icons.green_dot} The encryption key for tenant {tenant_key_id} has been rotated successfully using the '{key_vault_key_identifier}' Key Vault key identifier."
89
+ )
@@ -145,7 +145,7 @@ def generate_direct_lake_semantic_model(
145
145
  expr = generate_shared_expression(
146
146
  item_name=lakehouse, item_type="Lakehouse", workspace=lakehouse_workspace
147
147
  )
148
- dfD = fabric.list_datasets(workspace=workspace_id)
148
+ dfD = fabric.list_datasets(workspace=workspace_id, mode="rest")
149
149
  dfD_filt = dfD[dfD["Dataset Name"] == dataset]
150
150
 
151
151
  if len(dfD_filt) > 0 and not overwrite:
@@ -225,7 +225,10 @@ def get_direct_lake_source(
225
225
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
226
226
  sql_endpoint_id = get_direct_lake_sql_endpoint(dataset=dataset, workspace=workspace)
227
227
  dfI = fabric.list_items(workspace=workspace)
228
- dfI_filt = dfI[(dfI["Id"] == sql_endpoint_id) & (dfI["Type"] == "SQLEndpoint")]
228
+ dfI_filt = dfI[
229
+ (dfI["Id"] == sql_endpoint_id)
230
+ & (dfI["Type"].isin(["SQLEndpoint", "Warehouse"]))
231
+ ]
229
232
 
230
233
  artifact_type, artifact_name, artifact_id = None, None, None
231
234
 
@@ -137,7 +137,6 @@ def send_mail(
137
137
  cc_recipients: Optional[str | List[str]] = None,
138
138
  bcc_recipients: Optional[str | List[str]] = None,
139
139
  priority: Literal["Normal", "High", "Low"] = "Normal",
140
- follow_up_flag: bool = False,
141
140
  attachments: Optional[str | List[str]] = None,
142
141
  ):
143
142
  """
@@ -165,8 +164,6 @@ def send_mail(
165
164
  The email address of the BCC recipients.
166
165
  priority : Literal["Normal", "High", "Low"], default="Normal"
167
166
  The email priority.
168
- follow_up_flag : bool, default=False
169
- Whether to set a follow-up flag for the email.
170
167
  attachments : str | List[str], default=None
171
168
  The abfss path or a list of the abfss paths of the attachments to include in the email.
172
169
  """
@@ -220,8 +217,8 @@ def send_mail(
220
217
  if bcc_email_addresses:
221
218
  payload["message"]["bccRecipients"] = bcc_email_addresses
222
219
 
223
- if follow_up_flag:
224
- payload["message"]["flag"] = {"flagStatus": "flagged"}
220
+ # if follow_up_flag:
221
+ # payload["message"]["flag"] = {"flagStatus": "flagged"}
225
222
 
226
223
  content_types = {
227
224
  ".txt": "text/plain",
@@ -244,6 +241,7 @@ def send_mail(
244
241
  ".pbip": "application/vnd.ms-powerbi.report",
245
242
  ".pbit": "application/vnd.ms-powerbi.report",
246
243
  ".vpax": "application/zip",
244
+ ".geojson": "application/geo+json",
247
245
  }
248
246
 
249
247
  def file_path_to_content_bytes(file_path):
@@ -30,6 +30,9 @@ from ._helper import (
30
30
  update_lakehouse,
31
31
  load_table,
32
32
  )
33
+ from ._materialized_lake_views import (
34
+ refresh_materialized_lake_views,
35
+ )
33
36
 
34
37
  __all__ = [
35
38
  "get_lakehouse_columns",
@@ -50,4 +53,5 @@ __all__ = [
50
53
  "delete_lakehouse",
51
54
  "update_lakehouse",
52
55
  "load_table",
56
+ "refresh_materialized_lake_views",
53
57
  ]
@@ -1,7 +1,7 @@
1
1
  from uuid import UUID
2
2
  from typing import Optional, Literal
3
3
  import pyarrow.dataset as ds
4
- from .._helper_functions import (
4
+ from sempy_labs._helper_functions import (
5
5
  _mount,
6
6
  delete_item,
7
7
  _base_api,
@@ -68,14 +68,23 @@ def is_v_ordered(
68
68
  latest_file = os.path.join(delta_log_path, json_files[0])
69
69
 
70
70
  with open(latest_file, "r") as f:
71
- for line in f:
72
- try:
73
- data = json.loads(line)
74
- if "commitInfo" in data:
75
- tags = data["commitInfo"].get("tags", {})
76
- return tags.get("VORDER", "false").lower() == "true"
77
- except json.JSONDecodeError:
78
- continue # Skip malformed lines
71
+ all_data = [
72
+ json.loads(line) for line in f if line.strip()
73
+ ] # one dict per line
74
+ for data in all_data:
75
+ if "metaData" in data:
76
+ return (
77
+ data.get("metaData", {})
78
+ .get("configuration", {})
79
+ .get("delta.parquet.vorder.enabled", "false")
80
+ == "true"
81
+ )
82
+
83
+ # If no metaData, fall back to commitInfo
84
+ for data in all_data:
85
+ if "commitInfo" in data:
86
+ tags = data["commitInfo"].get("tags", {})
87
+ return tags.get("VORDER", "false").lower() == "true"
79
88
 
80
89
  return False # Default if not found
81
90
 
@@ -2,7 +2,7 @@ from tqdm.auto import tqdm
2
2
  from typing import List, Optional, Union
3
3
  from sempy._utils._log import log
4
4
  from uuid import UUID
5
- from .._helper_functions import (
5
+ from sempy_labs._helper_functions import (
6
6
  _base_api,
7
7
  resolve_lakehouse_name_and_id,
8
8
  resolve_workspace_name_and_id,
@@ -13,7 +13,7 @@ import sempy_labs._icons as icons
13
13
  import re
14
14
  import time
15
15
  import pandas as pd
16
- from .._job_scheduler import (
16
+ from sempy_labs._job_scheduler import (
17
17
  _get_item_job_instance,
18
18
  )
19
19
 
@@ -100,11 +100,15 @@ def optimize_lakehouse_tables(
100
100
  tables = [tables]
101
101
 
102
102
  df_tables = df_delta[df_delta["Table Name"].isin(tables)] if tables else df_delta
103
+ df_tables.reset_index(drop=True, inplace=True)
103
104
 
104
- for _, r in (bar := tqdm(df_tables.iterrows())):
105
+ total = len(df_tables)
106
+ for idx, r in (bar := tqdm(df_tables.iterrows(), total=total, bar_format="{desc}")):
105
107
  table_name = r["Table Name"]
106
108
  path = r["Location"]
107
- bar.set_description(f"Optimizing the '{table_name}' table...")
109
+ bar.set_description(
110
+ f"Optimizing the '{table_name}' table ({idx + 1}/{total})..."
111
+ )
108
112
  _optimize_table(path=path)
109
113
 
110
114
 
@@ -145,11 +149,13 @@ def vacuum_lakehouse_tables(
145
149
  tables = [tables]
146
150
 
147
151
  df_tables = df_delta[df_delta["Table Name"].isin(tables)] if tables else df_delta
152
+ df_tables.reset_index(drop=True, inplace=True)
148
153
 
149
- for _, r in (bar := tqdm(df_tables.iterrows())):
154
+ total = len(df_tables)
155
+ for idx, r in (bar := tqdm(df_tables.iterrows(), total=total, bar_format="{desc}")):
150
156
  table_name = r["Table Name"]
151
157
  path = r["Location"]
152
- bar.set_description(f"Vacuuming the '{table_name}' table...")
158
+ bar.set_description(f"Vacuuming the '{table_name}' table ({idx}/{total})...")
153
159
  _vacuum_table(path=path, retain_n_hours=retain_n_hours)
154
160
 
155
161
 
@@ -231,7 +237,7 @@ def run_table_maintenance(
231
237
  if optimize:
232
238
  payload["executionData"]["optimizeSettings"] = {}
233
239
  if v_order:
234
- payload["executionData"]["optimizeSettings"] = {"vorder": True}
240
+ payload["executionData"]["optimizeSettings"] = {"vOrder": True}
235
241
  if vacuum:
236
242
  payload["executionData"]["vacuumSettings"] = {}
237
243
  if vacuum and retention_period is not None:
@@ -242,16 +248,19 @@ def run_table_maintenance(
242
248
  method="post",
243
249
  payload=payload,
244
250
  status_codes=202,
251
+ client="fabric_sp",
245
252
  )
246
253
 
247
- f"{icons.in_progress} The table maintenance job for the '{table_name}' table in the '{lakehouse_name}' lakehouse within the '{workspace_name}' workspace has been initiated."
254
+ print(
255
+ f"{icons.in_progress} The table maintenance job for the '{table_name}' table in the '{lakehouse_name}' lakehouse within the '{workspace_name}' workspace has been initiated."
256
+ )
248
257
 
249
258
  status_url = response.headers.get("Location").split("fabric.microsoft.com")[1]
250
259
  status = None
251
260
  while status not in ["Completed", "Failed"]:
252
261
  response = _base_api(request=status_url)
253
262
  status = response.json().get("status")
254
- time.sleep(10)
263
+ time.sleep(3)
255
264
 
256
265
  df = _get_item_job_instance(url=status_url)
257
266