semantic-link-labs 0.8.10__py3-none-any.whl → 0.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (81) hide show
  1. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/METADATA +6 -5
  2. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/RECORD +81 -80
  3. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +34 -3
  5. sempy_labs/_authentication.py +80 -4
  6. sempy_labs/_capacities.py +770 -200
  7. sempy_labs/_capacity_migration.py +7 -37
  8. sempy_labs/_clear_cache.py +37 -35
  9. sempy_labs/_connections.py +13 -13
  10. sempy_labs/_data_pipelines.py +20 -20
  11. sempy_labs/_dataflows.py +27 -28
  12. sempy_labs/_dax.py +41 -47
  13. sempy_labs/_deployment_pipelines.py +1 -1
  14. sempy_labs/_environments.py +26 -23
  15. sempy_labs/_eventhouses.py +16 -15
  16. sempy_labs/_eventstreams.py +16 -15
  17. sempy_labs/_external_data_shares.py +18 -20
  18. sempy_labs/_gateways.py +16 -14
  19. sempy_labs/_generate_semantic_model.py +107 -62
  20. sempy_labs/_git.py +105 -43
  21. sempy_labs/_helper_functions.py +251 -194
  22. sempy_labs/_job_scheduler.py +227 -0
  23. sempy_labs/_kql_databases.py +16 -15
  24. sempy_labs/_kql_querysets.py +16 -15
  25. sempy_labs/_list_functions.py +150 -126
  26. sempy_labs/_managed_private_endpoints.py +19 -17
  27. sempy_labs/_mirrored_databases.py +51 -48
  28. sempy_labs/_mirrored_warehouses.py +5 -4
  29. sempy_labs/_ml_experiments.py +16 -15
  30. sempy_labs/_ml_models.py +15 -14
  31. sempy_labs/_model_bpa.py +210 -207
  32. sempy_labs/_model_bpa_bulk.py +2 -2
  33. sempy_labs/_model_bpa_rules.py +3 -3
  34. sempy_labs/_model_dependencies.py +55 -29
  35. sempy_labs/_notebooks.py +29 -25
  36. sempy_labs/_one_lake_integration.py +23 -26
  37. sempy_labs/_query_scale_out.py +75 -64
  38. sempy_labs/_refresh_semantic_model.py +25 -26
  39. sempy_labs/_spark.py +33 -32
  40. sempy_labs/_sql.py +19 -12
  41. sempy_labs/_translations.py +10 -7
  42. sempy_labs/_vertipaq.py +38 -33
  43. sempy_labs/_warehouses.py +26 -25
  44. sempy_labs/_workspace_identity.py +11 -10
  45. sempy_labs/_workspaces.py +40 -33
  46. sempy_labs/admin/_basic_functions.py +166 -115
  47. sempy_labs/admin/_domains.py +7 -2
  48. sempy_labs/admin/_external_data_share.py +3 -3
  49. sempy_labs/admin/_git.py +4 -1
  50. sempy_labs/admin/_items.py +11 -6
  51. sempy_labs/admin/_scanner.py +10 -5
  52. sempy_labs/directlake/_directlake_schema_compare.py +25 -16
  53. sempy_labs/directlake/_directlake_schema_sync.py +24 -12
  54. sempy_labs/directlake/_dl_helper.py +74 -55
  55. sempy_labs/directlake/_generate_shared_expression.py +10 -9
  56. sempy_labs/directlake/_get_directlake_lakehouse.py +32 -36
  57. sempy_labs/directlake/_get_shared_expression.py +4 -3
  58. sempy_labs/directlake/_guardrails.py +12 -6
  59. sempy_labs/directlake/_list_directlake_model_calc_tables.py +15 -9
  60. sempy_labs/directlake/_show_unsupported_directlake_objects.py +16 -10
  61. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +35 -31
  62. sempy_labs/directlake/_update_directlake_partition_entity.py +39 -31
  63. sempy_labs/directlake/_warm_cache.py +87 -65
  64. sempy_labs/lakehouse/_get_lakehouse_columns.py +23 -26
  65. sempy_labs/lakehouse/_get_lakehouse_tables.py +27 -38
  66. sempy_labs/lakehouse/_lakehouse.py +7 -20
  67. sempy_labs/lakehouse/_shortcuts.py +42 -23
  68. sempy_labs/migration/_create_pqt_file.py +16 -11
  69. sempy_labs/migration/_refresh_calc_tables.py +16 -10
  70. sempy_labs/report/_download_report.py +9 -8
  71. sempy_labs/report/_generate_report.py +85 -44
  72. sempy_labs/report/_paginated.py +9 -9
  73. sempy_labs/report/_report_bpa.py +15 -11
  74. sempy_labs/report/_report_functions.py +80 -91
  75. sempy_labs/report/_report_helper.py +8 -4
  76. sempy_labs/report/_report_list_functions.py +24 -13
  77. sempy_labs/report/_report_rebind.py +17 -16
  78. sempy_labs/report/_reportwrapper.py +41 -33
  79. sempy_labs/tom/_model.py +139 -21
  80. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/LICENSE +0 -0
  81. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/top_level.txt +0 -0
@@ -8,9 +8,10 @@ from sempy_labs._helper_functions import (
8
8
  pagination,
9
9
  )
10
10
  from sempy.fabric.exceptions import FabricHTTPException
11
+ from uuid import UUID
11
12
 
12
13
 
13
- def list_eventstreams(workspace: Optional[str] = None) -> pd.DataFrame:
14
+ def list_eventstreams(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
14
15
  """
15
16
  Shows the eventstreams within a workspace.
16
17
 
@@ -18,8 +19,8 @@ def list_eventstreams(workspace: Optional[str] = None) -> pd.DataFrame:
18
19
 
19
20
  Parameters
20
21
  ----------
21
- workspace : str, default=None
22
- The Fabric workspace name.
22
+ workspace : str | uuid.UUID, default=None
23
+ The Fabric workspace name or ID.
23
24
  Defaults to None which resolves to the workspace of the attached lakehouse
24
25
  or if no lakehouse attached, resolves to the workspace of the notebook.
25
26
 
@@ -31,7 +32,7 @@ def list_eventstreams(workspace: Optional[str] = None) -> pd.DataFrame:
31
32
 
32
33
  df = pd.DataFrame(columns=["Eventstream Name", "Eventstream Id", "Description"])
33
34
 
34
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
35
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
35
36
 
36
37
  client = fabric.FabricRestClient()
37
38
  response = client.get(f"/v1/workspaces/{workspace_id}/eventstreams")
@@ -53,7 +54,7 @@ def list_eventstreams(workspace: Optional[str] = None) -> pd.DataFrame:
53
54
 
54
55
 
55
56
  def create_eventstream(
56
- name: str, description: Optional[str] = None, workspace: Optional[str] = None
57
+ name: str, description: Optional[str] = None, workspace: Optional[str | UUID] = None
57
58
  ):
58
59
  """
59
60
  Creates a Fabric eventstream.
@@ -66,13 +67,13 @@ def create_eventstream(
66
67
  Name of the eventstream.
67
68
  description : str, default=None
68
69
  A description of the environment.
69
- workspace : str, default=None
70
- The Fabric workspace name.
70
+ workspace : str | uuid.UUID, default=None
71
+ The Fabric workspace name or ID.
71
72
  Defaults to None which resolves to the workspace of the attached lakehouse
72
73
  or if no lakehouse attached, resolves to the workspace of the notebook.
73
74
  """
74
75
 
75
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
76
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
76
77
 
77
78
  request_body = {"displayName": name}
78
79
 
@@ -87,11 +88,11 @@ def create_eventstream(
87
88
  lro(client, response, status_codes=[201, 202])
88
89
 
89
90
  print(
90
- f"{icons.green_dot} The '{name}' eventstream has been created within the '{workspace}' workspace."
91
+ f"{icons.green_dot} The '{name}' eventstream has been created within the '{workspace_name}' workspace."
91
92
  )
92
93
 
93
94
 
94
- def delete_eventstream(name: str, workspace: Optional[str] = None):
95
+ def delete_eventstream(name: str, workspace: Optional[str | UUID] = None):
95
96
  """
96
97
  Deletes a Fabric eventstream.
97
98
 
@@ -101,16 +102,16 @@ def delete_eventstream(name: str, workspace: Optional[str] = None):
101
102
  ----------
102
103
  name: str
103
104
  Name of the eventstream.
104
- workspace : str, default=None
105
- The Fabric workspace name.
105
+ workspace : str | uuid.UUID, default=None
106
+ The Fabric workspace name or ID.
106
107
  Defaults to None which resolves to the workspace of the attached lakehouse
107
108
  or if no lakehouse attached, resolves to the workspace of the notebook.
108
109
  """
109
110
 
110
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
111
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
111
112
 
112
113
  item_id = fabric.resolve_item_id(
113
- item_name=name, type="Eventstream", workspace=workspace
114
+ item_name=name, type="Eventstream", workspace=workspace_id
114
115
  )
115
116
 
116
117
  client = fabric.FabricRestClient()
@@ -120,5 +121,5 @@ def delete_eventstream(name: str, workspace: Optional[str] = None):
120
121
  raise FabricHTTPException(response)
121
122
 
122
123
  print(
123
- f"{icons.green_dot} The '{name}' eventstream within the '{workspace}' workspace has been deleted."
124
+ f"{icons.green_dot} The '{name}' eventstream within the '{workspace_name}' workspace has been deleted."
124
125
  )
@@ -15,7 +15,7 @@ def create_external_data_share(
15
15
  item_type: str,
16
16
  paths: str | List[str],
17
17
  recipient: str,
18
- workspace: Optional[str] = None,
18
+ workspace: Optional[str | UUID] = None,
19
19
  ):
20
20
  """
21
21
  Creates an external data share for a given path or list of paths in the specified item.
@@ -32,17 +32,15 @@ def create_external_data_share(
32
32
  The path or list of paths that are to be externally shared. Currently, only a single path is supported.
33
33
  recipient : str
34
34
  The email address of the recipient.
35
- workspace : str, default=None
36
- The Fabric workspace name.
35
+ workspace : str | uuid.UUID, default=None
36
+ The Fabric workspace name or ID.
37
37
  Defaults to None which resolves to the workspace of the attached lakehouse
38
38
  or if no lakehouse attached, resolves to the workspace of the notebook.
39
39
  """
40
40
 
41
- # https://learn.microsoft.com/en-us/rest/api/fabric/core/external-data-shares/create-external-data-share?tabs=HTTP
42
-
43
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
41
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
44
42
  item_id = fabric.resolve_item_id(
45
- item_name=item_name, type=item_type, workspace=workspace
43
+ item_name=item_name, type=item_type, workspace=workspace_id
46
44
  )
47
45
 
48
46
  if isinstance(paths, str):
@@ -60,7 +58,7 @@ def create_external_data_share(
60
58
  raise FabricHTTPException(response)
61
59
 
62
60
  print(
63
- f"{icons.green_dot} An external data share was created for the '{item_name}' {item_type} within the '{workspace}' workspace for the {paths} paths."
61
+ f"{icons.green_dot} An external data share was created for the '{item_name}' {item_type} within the '{workspace_name}' workspace for the {paths} paths."
64
62
  )
65
63
 
66
64
 
@@ -68,7 +66,7 @@ def revoke_external_data_share(
68
66
  external_data_share_id: UUID,
69
67
  item_name: str,
70
68
  item_type: str,
71
- workspace: Optional[str] = None,
69
+ workspace: Optional[str | UUID] = None,
72
70
  ):
73
71
  """
74
72
  Revokes the specified external data share. Note: This action cannot be undone.
@@ -77,21 +75,21 @@ def revoke_external_data_share(
77
75
 
78
76
  Parameters
79
77
  ----------
80
- external_data_share_id : UUID
78
+ external_data_share_id : uuid.UUID
81
79
  The external data share ID.
82
80
  item_name : str
83
81
  The item name.
84
82
  item_type : str
85
83
  The `item type <https://learn.microsoft.com/rest/api/fabric/core/items/list-items?tabs=HTTP#itemtype>`_.
86
- workspace : str, default=None
87
- The Fabric workspace name.
84
+ workspace : str | uuid.UUID, default=None
85
+ The Fabric workspace name or ID.
88
86
  Defaults to None which resolves to the workspace of the attached lakehouse
89
87
  or if no lakehouse attached, resolves to the workspace of the notebook.
90
88
  """
91
89
 
92
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
90
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
93
91
  item_id = fabric.resolve_item_id(
94
- item_name=item_name, type=item_type, workspace=workspace
92
+ item_name=item_name, type=item_type, workspace=workspace_id
95
93
  )
96
94
 
97
95
  client = fabric.FabricRestClient()
@@ -103,12 +101,12 @@ def revoke_external_data_share(
103
101
  raise FabricHTTPException(response)
104
102
 
105
103
  print(
106
- f"{icons.green_dot} The '{external_data_share_id}' external data share for the '{item_name}' {item_type} within the '{workspace}' workspace has been revoked."
104
+ f"{icons.green_dot} The '{external_data_share_id}' external data share for the '{item_name}' {item_type} within the '{workspace_name}' workspace has been revoked."
107
105
  )
108
106
 
109
107
 
110
108
  def list_external_data_shares_in_item(
111
- item_name: str, item_type: str, workspace: Optional[str] = None
109
+ item_name: str, item_type: str, workspace: Optional[str | UUID] = None
112
110
  ) -> pd.DataFrame:
113
111
  """
114
112
  Returns a list of the external data shares that exist for the specified item.
@@ -121,8 +119,8 @@ def list_external_data_shares_in_item(
121
119
  The item name.
122
120
  item_type : str
123
121
  The `item type <https://learn.microsoft.com/rest/api/fabric/core/items/list-items?tabs=HTTP#itemtype>`_.
124
- workspace : str, default=None
125
- The Fabric workspace name.
122
+ workspace : str | uuid.UUID, default=None
123
+ The Fabric workspace name or ID.
126
124
  Defaults to None which resolves to the workspace of the attached lakehouse
127
125
  or if no lakehouse attached, resolves to the workspace of the notebook.
128
126
 
@@ -132,9 +130,9 @@ def list_external_data_shares_in_item(
132
130
  A pandas dataframe showing a list of the external data shares that exist for the specified item.
133
131
  """
134
132
 
135
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
133
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
136
134
  item_id = fabric.resolve_item_id(
137
- item_name=item_name, type=item_type, workspace=workspace
135
+ item_name=item_name, type=item_type, workspace=workspace_id
138
136
  )
139
137
 
140
138
  client = fabric.FabricRestClient()
sempy_labs/_gateways.py CHANGED
@@ -1,4 +1,5 @@
1
1
  import sempy.fabric as fabric
2
+ from sempy._utils._log import log
2
3
  import pandas as pd
3
4
  from typing import Optional
4
5
  from sempy.fabric.exceptions import FabricHTTPException
@@ -13,6 +14,7 @@ from uuid import UUID
13
14
  import sempy_labs._icons as icons
14
15
 
15
16
 
17
+ @log
16
18
  def list_gateways() -> pd.DataFrame:
17
19
  """
18
20
  Returns a list of all gateways the user has permission for, including on-premises, on-premises (personal mode), and virtual network gateways.
@@ -95,7 +97,7 @@ def delete_gateway(gateway: str | UUID):
95
97
 
96
98
  Parameters
97
99
  ----------
98
- gateway : str | UUID
100
+ gateway : str | uuid.UUID
99
101
  The gateway name or ID.
100
102
  """
101
103
 
@@ -117,7 +119,7 @@ def list_gateway_role_assigments(gateway: str | UUID) -> pd.DataFrame:
117
119
 
118
120
  Parameters
119
121
  ----------
120
- gateway : str | UUID
122
+ gateway : str | uuid.UUID
121
123
  The gateway name or ID.
122
124
 
123
125
  Returns
@@ -159,9 +161,9 @@ def delete_gateway_role_assignment(gateway: str | UUID, role_assignement_id: UUI
159
161
 
160
162
  Parameters
161
163
  ----------
162
- gateway : str | UUID
164
+ gateway : str | uuid.UUID
163
165
  The gateway name or ID.
164
- role_assignement_id : UUID
166
+ role_assignement_id : uuid.UUID
165
167
  The role assignment ID.
166
168
  """
167
169
 
@@ -204,9 +206,9 @@ def delete_gateway_member(gateway: str | UUID, gateway_member: str | UUID):
204
206
 
205
207
  Parameters
206
208
  ----------
207
- gateway : str | UUID
209
+ gateway : str | uuid.UUID
208
210
  The gateway name or ID.
209
- gateway_member : str | UUID
211
+ gateway_member : str | uuid.UUID
210
212
  The gateway member name or ID.
211
213
  """
212
214
 
@@ -234,7 +236,7 @@ def list_gateway_members(gateway: str | UUID) -> pd.DataFrame:
234
236
 
235
237
  Parameters
236
238
  ----------
237
- gateway : str | UUID
239
+ gateway : str | uuid.UUID
238
240
  The gateway name or ID.
239
241
 
240
242
  Returns
@@ -298,7 +300,7 @@ def create_vnet_gateway(
298
300
  ----------
299
301
  name : str
300
302
  The gateway name.
301
- capacity : str | UUID
303
+ capacity : str | uuid.UUID
302
304
  The capacity name or Id.
303
305
  inactivity_minutes_before_sleep : int
304
306
  The minutes of inactivity before the virtual network gateway goes into auto-sleep. Must be one of the following values: 30, 60, 90, 120, 150, 240, 360, 480, 720, 1440.
@@ -353,7 +355,7 @@ def update_on_premises_gateway(
353
355
 
354
356
  Parameters
355
357
  ----------
356
- gateway : str | UUID
358
+ gateway : str | uuid.UUID
357
359
  The gateway name or ID.
358
360
  allow_cloud_connection_refresh : bool, default=None
359
361
  Whether to allow cloud connections to refresh through this on-premises gateway. True - Allow, False - Do not allow.
@@ -403,9 +405,9 @@ def update_vnet_gateway(
403
405
 
404
406
  Parameters
405
407
  ----------
406
- gateway : str | UUID
408
+ gateway : str | uuid.UUID
407
409
  The gateway name or ID.
408
- capacity: str | UUID
410
+ capacity: str | uuid.UUID
409
411
  The capacity name or ID.
410
412
  inactivity_minutes_before_sleep : int, default=None
411
413
  The minutes of inactivity before the virtual network gateway goes into auto-sleep. Must be one of the following values: 30, 60, 90, 120, 150, 240, 360, 480, 720, 1440.
@@ -451,11 +453,11 @@ def bind_semantic_model_to_gateway(
451
453
 
452
454
  Parameters
453
455
  ----------
454
- dataset : str | UUID
456
+ dataset : str | uuid.UUID
455
457
  The name or ID of the semantic model.
456
- gateway : str | UUID
458
+ gateway : str | uuid.UUID
457
459
  The name or ID of the gateway.
458
- workspace : str | UUID, default=None
460
+ workspace : str | uuid.UUID, default=None
459
461
  The Fabric workspace name.
460
462
  Defaults to None which resolves to the workspace of the attached lakehouse
461
463
  or if no lakehouse attached, resolves to the workspace of the notebook.