semantic-link-labs 0.11.1__py3-none-any.whl → 0.11.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (137) hide show
  1. {semantic_link_labs-0.11.1.dist-info → semantic_link_labs-0.11.3.dist-info}/METADATA +7 -6
  2. semantic_link_labs-0.11.3.dist-info/RECORD +212 -0
  3. sempy_labs/__init__.py +65 -71
  4. sempy_labs/_a_lib_info.py +1 -1
  5. sempy_labs/_ai.py +1 -1
  6. sempy_labs/_capacities.py +2 -2
  7. sempy_labs/_capacity_migration.py +5 -5
  8. sempy_labs/_clear_cache.py +1 -1
  9. sempy_labs/_connections.py +2 -2
  10. sempy_labs/_dashboards.py +16 -16
  11. sempy_labs/_data_pipelines.py +1 -1
  12. sempy_labs/_dataflows.py +101 -26
  13. sempy_labs/_dax.py +3 -3
  14. sempy_labs/_delta_analyzer.py +4 -4
  15. sempy_labs/_delta_analyzer_history.py +1 -1
  16. sempy_labs/_deployment_pipelines.py +1 -1
  17. sempy_labs/_environments.py +22 -21
  18. sempy_labs/_eventhouses.py +12 -11
  19. sempy_labs/_eventstreams.py +12 -11
  20. sempy_labs/_external_data_shares.py +78 -23
  21. sempy_labs/_gateways.py +47 -45
  22. sempy_labs/_generate_semantic_model.py +3 -3
  23. sempy_labs/_git.py +1 -1
  24. sempy_labs/_graphQL.py +12 -11
  25. sempy_labs/_helper_functions.py +169 -5
  26. sempy_labs/_job_scheduler.py +56 -54
  27. sempy_labs/_kql_databases.py +16 -17
  28. sempy_labs/_kql_querysets.py +12 -11
  29. sempy_labs/_kusto.py +2 -2
  30. sempy_labs/_labels.py +126 -0
  31. sempy_labs/_list_functions.py +2 -2
  32. sempy_labs/_managed_private_endpoints.py +18 -15
  33. sempy_labs/_mirrored_databases.py +16 -15
  34. sempy_labs/_mirrored_warehouses.py +12 -11
  35. sempy_labs/_ml_experiments.py +11 -10
  36. sempy_labs/_model_auto_build.py +3 -3
  37. sempy_labs/_model_bpa.py +5 -5
  38. sempy_labs/_model_bpa_bulk.py +3 -3
  39. sempy_labs/_model_dependencies.py +1 -1
  40. sempy_labs/_mounted_data_factories.py +12 -12
  41. sempy_labs/_notebooks.py +151 -2
  42. sempy_labs/_one_lake_integration.py +1 -1
  43. sempy_labs/_query_scale_out.py +1 -1
  44. sempy_labs/_refresh_semantic_model.py +1 -1
  45. sempy_labs/_semantic_models.py +30 -28
  46. sempy_labs/_spark.py +1 -1
  47. sempy_labs/_sql.py +1 -1
  48. sempy_labs/_sql_endpoints.py +12 -11
  49. sempy_labs/_sqldatabase.py +15 -15
  50. sempy_labs/_tags.py +11 -10
  51. sempy_labs/_translations.py +1 -1
  52. sempy_labs/_user_delegation_key.py +2 -2
  53. sempy_labs/_vertipaq.py +3 -3
  54. sempy_labs/_vpax.py +1 -1
  55. sempy_labs/_warehouses.py +15 -14
  56. sempy_labs/_workloads.py +1 -1
  57. sempy_labs/_workspace_identity.py +1 -1
  58. sempy_labs/_workspaces.py +14 -13
  59. sempy_labs/admin/__init__.py +18 -18
  60. sempy_labs/admin/_activities.py +46 -46
  61. sempy_labs/admin/_apps.py +28 -26
  62. sempy_labs/admin/_artifacts.py +15 -15
  63. sempy_labs/admin/_basic_functions.py +1 -2
  64. sempy_labs/admin/_capacities.py +84 -82
  65. sempy_labs/admin/_dataflows.py +2 -2
  66. sempy_labs/admin/_datasets.py +50 -48
  67. sempy_labs/admin/_domains.py +25 -19
  68. sempy_labs/admin/_external_data_share.py +24 -22
  69. sempy_labs/admin/_git.py +17 -17
  70. sempy_labs/admin/_items.py +47 -45
  71. sempy_labs/admin/_reports.py +61 -58
  72. sempy_labs/admin/_scanner.py +2 -2
  73. sempy_labs/admin/_shared.py +18 -18
  74. sempy_labs/admin/_tags.py +2 -2
  75. sempy_labs/admin/_tenant.py +57 -51
  76. sempy_labs/admin/_users.py +16 -15
  77. sempy_labs/admin/_workspaces.py +2 -2
  78. sempy_labs/directlake/__init__.py +12 -12
  79. sempy_labs/directlake/_directlake_schema_compare.py +3 -3
  80. sempy_labs/directlake/_directlake_schema_sync.py +9 -7
  81. sempy_labs/directlake/_dl_helper.py +5 -2
  82. sempy_labs/directlake/_generate_shared_expression.py +1 -1
  83. sempy_labs/directlake/_get_directlake_lakehouse.py +1 -1
  84. sempy_labs/directlake/_guardrails.py +1 -1
  85. sempy_labs/directlake/_list_directlake_model_calc_tables.py +3 -3
  86. sempy_labs/directlake/_show_unsupported_directlake_objects.py +1 -1
  87. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +3 -3
  88. sempy_labs/directlake/_update_directlake_partition_entity.py +4 -4
  89. sempy_labs/directlake/_warm_cache.py +3 -3
  90. sempy_labs/graph/__init__.py +3 -3
  91. sempy_labs/graph/_groups.py +81 -78
  92. sempy_labs/graph/_teams.py +21 -21
  93. sempy_labs/graph/_users.py +109 -10
  94. sempy_labs/lakehouse/__init__.py +7 -7
  95. sempy_labs/lakehouse/_blobs.py +30 -30
  96. sempy_labs/lakehouse/_get_lakehouse_columns.py +2 -2
  97. sempy_labs/lakehouse/_get_lakehouse_tables.py +29 -27
  98. sempy_labs/lakehouse/_helper.py +38 -1
  99. sempy_labs/lakehouse/_lakehouse.py +16 -7
  100. sempy_labs/lakehouse/_livy_sessions.py +47 -42
  101. sempy_labs/lakehouse/_shortcuts.py +22 -21
  102. sempy_labs/migration/__init__.py +8 -8
  103. sempy_labs/migration/_create_pqt_file.py +2 -2
  104. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +35 -44
  105. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +9 -20
  106. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +5 -9
  107. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +11 -20
  108. sempy_labs/migration/_migration_validation.py +1 -2
  109. sempy_labs/migration/_refresh_calc_tables.py +2 -2
  110. sempy_labs/mirrored_azure_databricks_catalog/__init__.py +2 -2
  111. sempy_labs/mirrored_azure_databricks_catalog/_discover.py +40 -40
  112. sempy_labs/mirrored_azure_databricks_catalog/_refresh_catalog_metadata.py +1 -1
  113. sempy_labs/ml_model/__init__.py +23 -0
  114. sempy_labs/ml_model/_functions.py +427 -0
  115. sempy_labs/report/__init__.py +10 -10
  116. sempy_labs/report/_download_report.py +2 -2
  117. sempy_labs/report/_export_report.py +2 -2
  118. sempy_labs/report/_generate_report.py +1 -1
  119. sempy_labs/report/_paginated.py +1 -1
  120. sempy_labs/report/_report_bpa.py +4 -3
  121. sempy_labs/report/_report_functions.py +3 -3
  122. sempy_labs/report/_report_list_functions.py +3 -3
  123. sempy_labs/report/_report_rebind.py +1 -1
  124. sempy_labs/report/_reportwrapper.py +248 -250
  125. sempy_labs/report/_save_report.py +3 -3
  126. sempy_labs/theme/_org_themes.py +19 -6
  127. sempy_labs/tom/__init__.py +1 -1
  128. sempy_labs/tom/_model.py +13 -8
  129. sempy_labs/variable_library/__init__.py +19 -0
  130. sempy_labs/variable_library/_functions.py +403 -0
  131. semantic_link_labs-0.11.1.dist-info/RECORD +0 -210
  132. sempy_labs/_dax_query_view.py +0 -57
  133. sempy_labs/_ml_models.py +0 -110
  134. sempy_labs/_variable_libraries.py +0 -91
  135. {semantic_link_labs-0.11.1.dist-info → semantic_link_labs-0.11.3.dist-info}/WHEEL +0 -0
  136. {semantic_link_labs-0.11.1.dist-info → semantic_link_labs-0.11.3.dist-info}/licenses/LICENSE +0 -0
  137. {semantic_link_labs-0.11.1.dist-info → semantic_link_labs-0.11.3.dist-info}/top_level.txt +0 -0
@@ -2,11 +2,13 @@ from uuid import UUID
2
2
  import pandas as pd
3
3
  from typing import Optional, List
4
4
  import sempy_labs._icons as icons
5
- from sempy_labs._helper_functions import (
5
+ from ._helper_functions import (
6
6
  resolve_workspace_name_and_id,
7
7
  _base_api,
8
8
  _create_dataframe,
9
9
  resolve_item_id,
10
+ resolve_item_name_and_id,
11
+ resolve_workspace_id,
10
12
  )
11
13
  from sempy._utils._log import log
12
14
 
@@ -24,6 +26,8 @@ def create_external_data_share(
24
26
 
25
27
  This is a wrapper function for the following API: `External Data Shares - Create External Data Share <https://learn.microsoft.com/rest/api/fabric/core/external-data-shares/create-external-data-share>`_.
26
28
 
29
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
30
+
27
31
  Parameters
28
32
  ----------
29
33
  item_name : str
@@ -53,6 +57,7 @@ def create_external_data_share(
53
57
  method="post",
54
58
  status_codes=201,
55
59
  payload=payload,
60
+ client="fabric_sp",
56
61
  )
57
62
  print(
58
63
  f"{icons.green_dot} An external data share was created for the '{item_name}' {item_type} within the '{workspace_name}' workspace for the {paths} paths."
@@ -71,6 +76,8 @@ def revoke_external_data_share(
71
76
 
72
77
  This is a wrapper function for the following API: `External Data Shares - Revoke External Data Share <https://learn.microsoft.com/rest/api/fabric/core/external-data-shares/revoke-external-data-share`_.
73
78
 
79
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
80
+
74
81
  Parameters
75
82
  ----------
76
83
  external_data_share_id : uuid.UUID
@@ -91,6 +98,7 @@ def revoke_external_data_share(
91
98
  _base_api(
92
99
  request=f"/v1/workspaces/{workspace_id}/items/{item_id}/externalDataShares/{external_data_share_id}/revoke",
93
100
  method="post",
101
+ client="fabric_sp",
94
102
  )
95
103
  print(
96
104
  f"{icons.green_dot} The '{external_data_share_id}' external data share for the '{item_name}' {item_type} within the '{workspace_name}' workspace has been revoked."
@@ -106,6 +114,8 @@ def list_external_data_shares_in_item(
106
114
 
107
115
  This is a wrapper function for the following API: `External Data Shares - List External Data Shares In Item <https://learn.microsoft.com/rest/api/fabric/core/external-data-shares/list-external-data-shares-in-item`_.
108
116
 
117
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
118
+
109
119
  Parameters
110
120
  ----------
111
121
  item_name : str
@@ -123,7 +133,7 @@ def list_external_data_shares_in_item(
123
133
  A pandas dataframe showing a list of the external data shares that exist for the specified item.
124
134
  """
125
135
 
126
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
136
+ workspace_id = resolve_workspace_id(workspace)
127
137
  item_id = resolve_item_id(item=item_name, type=item_type, workspace=workspace_id)
128
138
 
129
139
  columns = {
@@ -145,31 +155,76 @@ def list_external_data_shares_in_item(
145
155
  responses = _base_api(
146
156
  request=f"/v1/workspaces/{workspace_id}/items/{item_id}/externalDataShares",
147
157
  uses_pagination=True,
158
+ client="fabric_sp",
148
159
  )
149
160
 
150
- dfs = []
161
+ rows = []
151
162
  for r in responses:
152
163
  for i in r.get("value", []):
153
164
  item_id = i.get("itemId")
154
- new_data = {
155
- "External Data Share Id": i.get("id"),
156
- "Paths": [i.get("paths")],
157
- "Creator Principal Id": i.get("creatorPrincipal", {}).get("id"),
158
- "Creator Principal Type": i.get("creatorPrincipal", {}).get("type"),
159
- "Recipient User Principal Name": i.get("recipient", {}).get(
160
- "userPrincipalName"
161
- ),
162
- "Status": i.get("status"),
163
- "Expiration Time UTC": i.get("expriationTimeUtc"),
164
- "Workspace Id": i.get("workspaceId"),
165
- "Item Id": item_id,
166
- "Item Name": item_name,
167
- "Item Type": item_type,
168
- "Invitation URL": i.get("invitationUrl"),
169
- }
170
- dfs.append(pd.DataFrame(new_data, index=[0]))
171
-
172
- if dfs:
173
- df = pd.concat(dfs, ignore_index=True)
165
+ rows.append(
166
+ {
167
+ "External Data Share Id": i.get("id"),
168
+ "Paths": [i.get("paths")],
169
+ "Creator Principal Id": i.get("creatorPrincipal", {}).get("id"),
170
+ "Creator Principal Type": i.get("creatorPrincipal", {}).get("type"),
171
+ "Recipient User Principal Name": i.get("recipient", {}).get(
172
+ "userPrincipalName"
173
+ ),
174
+ "Status": i.get("status"),
175
+ "Expiration Time UTC": i.get("expriationTimeUtc"),
176
+ "Workspace Id": i.get("workspaceId"),
177
+ "Item Id": item_id,
178
+ "Item Name": item_name,
179
+ "Item Type": item_type,
180
+ "Invitation URL": i.get("invitationUrl"),
181
+ }
182
+ )
183
+
184
+ if rows:
185
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
174
186
 
175
187
  return df
188
+
189
+
190
+ @log
191
+ def delete_external_data_share(
192
+ external_data_share_id: UUID,
193
+ item: str | UUID,
194
+ item_type: str,
195
+ workspace: Optional[str | UUID] = None,
196
+ ):
197
+ """
198
+ Deletes the specified external data share.
199
+
200
+ This is a wrapper function for the following API: `External Data Shares Provider - Delete External Data Share <https://learn.microsoft.com/rest/api/fabric/core/external-data-shares-provider/delete-external-data-share`_.
201
+
202
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
203
+
204
+ Parameters
205
+ ----------
206
+ external_data_share_id : uuid.UUID
207
+ The external data share ID.
208
+ item : str | uuid.UUID
209
+ The item name or ID.
210
+ item_type : str
211
+ The `item type <https://learn.microsoft.com/rest/api/fabric/core/items/list-items?tabs=HTTP#itemtype>`_.
212
+ workspace : str | uuid.UUID, default=None
213
+ The Fabric workspace name or ID.
214
+ Defaults to None which resolves to the workspace of the attached lakehouse
215
+ or if no lakehouse attached, resolves to the workspace of the notebook.
216
+ """
217
+
218
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
219
+ (item_name, item_id) = resolve_item_name_and_id(
220
+ item=item, type=item_type, workspace=workspace_id
221
+ )
222
+
223
+ _base_api(
224
+ request=f"/v1/workspaces/{workspace_id}/items/{item_id}/externalDataShares/{external_data_share_id}",
225
+ method="delete",
226
+ client="fabric_sp",
227
+ )
228
+ print(
229
+ f"{icons.green_dot} The '{external_data_share_id}' external data share for the '{item_name}' {item_type} within the '{workspace_name}' workspace has been revoked."
230
+ )
sempy_labs/_gateways.py CHANGED
@@ -1,7 +1,7 @@
1
1
  from sempy._utils._log import log
2
2
  import pandas as pd
3
3
  from typing import Optional
4
- from sempy_labs._helper_functions import (
4
+ from ._helper_functions import (
5
5
  _is_valid_uuid,
6
6
  resolve_capacity_id,
7
7
  resolve_workspace_name_and_id,
@@ -47,26 +47,28 @@ def list_gateways() -> pd.DataFrame:
47
47
  request="/v1/gateways", client="fabric_sp", uses_pagination=True
48
48
  )
49
49
 
50
- dfs = []
50
+ rows = []
51
51
  for r in responses:
52
52
  for v in r.get("value", []):
53
- new_data = {
54
- "Gateway Name": v.get("displayName"),
55
- "Gateway Id": v.get("id"),
56
- "Type": v.get("type"),
57
- "Public Key Exponent": v.get("publicKey", {}).get("exponent"),
58
- "Public Key Modulus": v.get("publicKey", {}).get("modulus"),
59
- "Version": v.get("version"),
60
- "Number Of Member Gateways": v.get("numberOfMemberGateways", 0),
61
- "Load Balancing Setting": v.get("loadBalancingSetting"),
62
- "Allow Cloud Connection Refresh": v.get("allowCloudConnectionRefresh"),
63
- "Allow Custom Connectors": v.get("allowCustomConnectors"),
64
- }
65
-
66
- dfs.append(pd.DataFrame(new_data, index=[0]))
67
-
68
- if dfs:
69
- df = pd.concat(dfs, ignore_index=True)
53
+ rows.append(
54
+ {
55
+ "Gateway Name": v.get("displayName"),
56
+ "Gateway Id": v.get("id"),
57
+ "Type": v.get("type"),
58
+ "Public Key Exponent": v.get("publicKey", {}).get("exponent"),
59
+ "Public Key Modulus": v.get("publicKey", {}).get("modulus"),
60
+ "Version": v.get("version"),
61
+ "Number Of Member Gateways": v.get("numberOfMemberGateways", 0),
62
+ "Load Balancing Setting": v.get("loadBalancingSetting"),
63
+ "Allow Cloud Connection Refresh": v.get(
64
+ "allowCloudConnectionRefresh"
65
+ ),
66
+ "Allow Custom Connectors": v.get("allowCustomConnectors"),
67
+ }
68
+ )
69
+
70
+ if rows:
71
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
70
72
  _update_dataframe_datatypes(dataframe=df, column_map=columns)
71
73
 
72
74
  return df
@@ -141,20 +143,20 @@ def list_gateway_role_assigments(gateway: str | UUID) -> pd.DataFrame:
141
143
  uses_pagination=True,
142
144
  )
143
145
 
144
- dfs = []
146
+ rows = []
145
147
  for r in responses:
146
148
  for v in r.get("value", []):
147
- new_data = {
148
- "Gateway Role Assignment Id": v.get("id"),
149
- "Principal Id": v.get("principal", {}).get("id"),
150
- "Principal Type": v.get("principal", {}).get("type"),
151
- "Role": v.get("role"),
152
- }
153
-
154
- dfs.append(pd.DataFrame(new_data, index=[0]))
155
-
156
- if dfs:
157
- df = pd.concat(dfs, ignore_index=True)
149
+ rows.append(
150
+ {
151
+ "Gateway Role Assignment Id": v.get("id"),
152
+ "Principal Id": v.get("principal", {}).get("id"),
153
+ "Principal Type": v.get("principal", {}).get("type"),
154
+ "Role": v.get("role"),
155
+ }
156
+ )
157
+
158
+ if rows:
159
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
158
160
 
159
161
  return df
160
162
 
@@ -274,21 +276,21 @@ def list_gateway_members(gateway: str | UUID) -> pd.DataFrame:
274
276
  request=f"/v1/gateways/{gateway_id}/members", client="fabric_sp"
275
277
  )
276
278
 
277
- dfs = []
279
+ rows = []
278
280
  for v in response.json().get("value", []):
279
- new_data = {
280
- "Member Id": v.get("id"),
281
- "Member Name": v.get("displayName"),
282
- "Public Key Exponent": v.get("publicKey", {}).get("exponent"),
283
- "Public Key Modulus": v.get("publicKey", {}).get("modulus"),
284
- "Version": v.get("version"),
285
- "Enabled": v.get("enabled"),
286
- }
287
-
288
- dfs.append(pd.DataFrame(new_data, index=[0]))
289
-
290
- if dfs:
291
- df = pd.concat(dfs, ignore_index=True)
281
+ rows.append(
282
+ {
283
+ "Member Id": v.get("id"),
284
+ "Member Name": v.get("displayName"),
285
+ "Public Key Exponent": v.get("publicKey", {}).get("exponent"),
286
+ "Public Key Modulus": v.get("publicKey", {}).get("modulus"),
287
+ "Version": v.get("version"),
288
+ "Enabled": v.get("enabled"),
289
+ }
290
+ )
291
+
292
+ if rows:
293
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
292
294
  _update_dataframe_datatypes(dataframe=df, column_map=columns)
293
295
 
294
296
  return df
@@ -4,7 +4,7 @@ import json
4
4
  import os
5
5
  from typing import Optional, List
6
6
  from sempy._utils._log import log
7
- from sempy_labs._helper_functions import (
7
+ from ._helper_functions import (
8
8
  resolve_workspace_name_and_id,
9
9
  resolve_dataset_name_and_id,
10
10
  _conv_b64,
@@ -13,9 +13,9 @@ from sempy_labs._helper_functions import (
13
13
  _mount,
14
14
  resolve_workspace_id,
15
15
  )
16
- from sempy_labs.lakehouse._lakehouse import lakehouse_attached
16
+ from .lakehouse._lakehouse import lakehouse_attached
17
17
  import sempy_labs._icons as icons
18
- from sempy_labs._refresh_semantic_model import refresh_semantic_model
18
+ from ._refresh_semantic_model import refresh_semantic_model
19
19
  from uuid import UUID
20
20
 
21
21
 
sempy_labs/_git.py CHANGED
@@ -1,7 +1,7 @@
1
1
  import pandas as pd
2
2
  import sempy_labs._icons as icons
3
3
  from typing import Optional, List
4
- from sempy_labs._helper_functions import (
4
+ from ._helper_functions import (
5
5
  resolve_workspace_name_and_id,
6
6
  _base_api,
7
7
  _create_dataframe,
sempy_labs/_graphQL.py CHANGED
@@ -1,7 +1,7 @@
1
1
  import pandas as pd
2
2
  from uuid import UUID
3
3
  from typing import Optional
4
- from sempy_labs._helper_functions import (
4
+ from ._helper_functions import (
5
5
  _base_api,
6
6
  _create_dataframe,
7
7
  resolve_workspace_id,
@@ -47,18 +47,19 @@ def list_graphql_apis(workspace: Optional[str | UUID]) -> pd.DataFrame:
47
47
  client="fabric_sp",
48
48
  )
49
49
 
50
- dfs = []
50
+ rows = []
51
51
  for r in responses:
52
52
  for v in r.get("value", []):
53
- new_data = {
54
- "GraphQL API Name": v.get("displayName"),
55
- "GraphQL API Id": v.get("id"),
56
- "Description": v.get("description"),
57
- }
58
- dfs.append(pd.DataFrame(new_data, index=[0]))
59
-
60
- if dfs:
61
- df = pd.concat(dfs, ignore_index=True)
53
+ rows.append(
54
+ {
55
+ "GraphQL API Name": v.get("displayName"),
56
+ "GraphQL API Id": v.get("id"),
57
+ "Description": v.get("description"),
58
+ }
59
+ )
60
+
61
+ if rows:
62
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
62
63
 
63
64
  return df
64
65
 
@@ -20,6 +20,7 @@ import sempy_labs._authentication as auth
20
20
  from jsonpath_ng.ext import parse
21
21
  from jsonpath_ng.jsonpath import Fields, Index
22
22
  from sempy._utils._log import log
23
+ from os import PathLike
23
24
 
24
25
 
25
26
  def _build_url(url: str, params: dict) -> str:
@@ -238,6 +239,18 @@ def delete_item(
238
239
  )
239
240
 
240
241
 
242
+ @log
243
+ def create_folder_if_not_exists(
244
+ folder: str | PathLike, workspace: Optional[str | UUID] = None
245
+ ) -> UUID:
246
+ try:
247
+ x = fabric.resolve_folder_id(folder=folder, workspace=workspace)
248
+ except:
249
+ x = fabric.create_folder(folder=folder, workspace=workspace)
250
+
251
+ return x
252
+
253
+
241
254
  @log
242
255
  def create_item(
243
256
  name: str,
@@ -245,6 +258,7 @@ def create_item(
245
258
  description: Optional[str] = None,
246
259
  definition: Optional[dict] = None,
247
260
  workspace: Optional[str | UUID] = None,
261
+ folder: Optional[str | PathLike] = None,
248
262
  ):
249
263
  """
250
264
  Creates an item in a Fabric workspace.
@@ -263,6 +277,9 @@ def create_item(
263
277
  The Fabric workspace name or ID.
264
278
  Defaults to None which resolves to the workspace of the attached lakehouse
265
279
  or if no lakehouse attached, resolves to the workspace of the notebook.
280
+ folder : str | os.PathLike, default=None
281
+ The folder within the workspace where the item will be created.
282
+ Defaults to None which places the item in the root of the workspace.
266
283
  """
267
284
  from sempy_labs._utils import item_types
268
285
 
@@ -277,6 +294,10 @@ def create_item(
277
294
  payload["description"] = description
278
295
  if definition:
279
296
  payload["definition"] = definition
297
+ if folder:
298
+ payload["folderId"] = create_folder_if_not_exists(
299
+ folder=folder, workspace=workspace_id
300
+ )
280
301
 
281
302
  _base_api(
282
303
  request=f"/v1/workspaces/{workspace_id}/{item_type_url}",
@@ -290,6 +311,146 @@ def create_item(
290
311
  )
291
312
 
292
313
 
314
+ @log
315
+ def copy_item(
316
+ item: str | UUID,
317
+ type: str,
318
+ target_name: Optional[str] = None,
319
+ source_workspace: Optional[str | UUID] = None,
320
+ target_workspace: Optional[str | UUID] = None,
321
+ overwrite: bool = False,
322
+ ):
323
+ """
324
+ Copies an item (with its definition) from one location to another location.
325
+
326
+ Parameters
327
+ ----------
328
+ item : str | uuid.UUID
329
+ The name or ID of the item to be copied.
330
+ type : str
331
+ The type of the item.
332
+ target_name: str, default=None
333
+ The name of the item in the target workspace. Defaults to the same name as the source item.
334
+ source_workspace : str | uuid.UUID, default=None
335
+ The workspace name or ID in which the item exists.
336
+ Defaults to None which resolves to the workspace of the attached lakehouse
337
+ or if no lakehouse attached, resolves to the workspace of the notebook.
338
+ target_workspace : str | uuid.UUID, default=None
339
+ The workspace name or ID to which the item will be copied.
340
+ Defaults to None which resolves to the workspace of the attached lakehouse
341
+ or if no lakehouse attached, resolves to the workspace of the notebook.
342
+ overwrite: bool, default=False
343
+ If True, overwrites the item in the target workspace if it already exists.
344
+ """
345
+
346
+ items = {
347
+ "CopyJob": "copyJobs",
348
+ "Dataflow": "dataflows",
349
+ "Eventhouse": "eventhouses",
350
+ "GraphQLApi": "GraphQLApis",
351
+ "Report": "reports",
352
+ "SemanticModel": "semanticModels",
353
+ # "Environment": "environments",
354
+ "KQLDatabase": "kqlDatabases",
355
+ "KQLDashboard": "kqlDashboards",
356
+ "KQLQueryset": "kqlQuerysets",
357
+ "DataPipeline": "dataPipelines",
358
+ "Notebook": "notebooks",
359
+ "SparkJobDefinition": "sparkJobDefinitions",
360
+ "Eventstream": "eventstreams",
361
+ "MirroredWarehouse": "mirroredWarehouses",
362
+ "MirroredDatabase": "mirroredDatabases",
363
+ "MountedDataFactory": "mountedDataFactories",
364
+ "VariableLibrary": "variableLibraries",
365
+ "ApacheAirFlowJob": "ApacheAirflowJobs",
366
+ "WarehouseSnapshot": "warehousesnapshots",
367
+ "DigitalTwinBuilder": "digitaltwinbuilders",
368
+ "DigitalTwinBuilderFlow": "DigitalTwinBuilderFlows",
369
+ "MirroredAzureDatabricksCatalog": "mirroredAzureDatabricksCatalogs",
370
+ }
371
+ if type not in items:
372
+ raise ValueError(
373
+ f"{icons.red_dot} The '{type}' item type does not have a definition and cannot be copied."
374
+ )
375
+ type_url = items.get(type)
376
+
377
+ (item_name, item_id) = resolve_item_name_and_id(
378
+ item=item, type=type, workspace=source_workspace
379
+ )
380
+ (source_workspace_name, source_workspace_id) = resolve_workspace_name_and_id(
381
+ source_workspace
382
+ )
383
+ (target_workspace_name, target_workspace_id) = resolve_workspace_name_and_id(
384
+ target_workspace
385
+ )
386
+
387
+ if target_name is None:
388
+ target_name = item_name
389
+
390
+ if source_workspace_id == target_workspace_id and target_name == item_name:
391
+ raise ValueError(
392
+ f"{icons.red_dot} The source and target workspaces are the same and the target name is the same as the source name. No action taken."
393
+ )
394
+
395
+ result = _base_api(
396
+ request=f"v1/workspaces/{source_workspace_id}/{type_url}/{item_id}",
397
+ client="fabric_sp",
398
+ )
399
+ description = result.json().get("description")
400
+
401
+ payload = _base_api(
402
+ request=f"v1/workspaces/{source_workspace_id}/{type_url}/{item_id}/getDefinition",
403
+ method="post",
404
+ client="fabric_sp",
405
+ status_codes=None,
406
+ lro_return_json=True,
407
+ )
408
+ payload["displayName"] = target_name
409
+ if description:
410
+ payload["description"] = description
411
+
412
+ # Check if item exists in target workspace
413
+ exists = False
414
+ try:
415
+ target_item_id = resolve_item_id(
416
+ item=target_name, type=type, workspace=target_workspace_id
417
+ )
418
+ exists = True
419
+ except Exception:
420
+ exists = False
421
+
422
+ if exists and not overwrite:
423
+ raise ValueError(
424
+ f"{icons.warning} The item '{target_name}' of type '{type}' already exists in the target workspace '{target_workspace_name}' and overwrite is set to False."
425
+ )
426
+ elif exists and overwrite:
427
+ # Update item definition
428
+ print(
429
+ f"{icons.in_progress} Updating existing item '{target_name}' of type '{type}' in the target workspace '{target_workspace_name}'..."
430
+ )
431
+ _base_api(
432
+ request=f"/v1/workspaces/{target_workspace_id}/{type_url}/{target_item_id}/updateDefinition",
433
+ method="post",
434
+ client="fabric_sp",
435
+ payload=payload,
436
+ lro_return_status_code=True,
437
+ status_codes=None,
438
+ )
439
+ print(
440
+ f"{icons.green_dot} The item '{target_name}' of type '{type}' has been successfully updated in the target workspace '{target_workspace_name}'."
441
+ )
442
+ else:
443
+ print(
444
+ f"{icons.in_progress} Creating new item '{target_name}' of type '{type}' in the target workspace '{target_workspace_name}'..."
445
+ )
446
+ create_item(
447
+ name=target_name,
448
+ type=type,
449
+ definition=payload["definition"],
450
+ workspace=target_workspace_id,
451
+ )
452
+
453
+
293
454
  @log
294
455
  def get_item_definition(
295
456
  item: str | UUID,
@@ -997,15 +1158,15 @@ def _decode_b64(file, format: Optional[str] = "utf-8"):
997
1158
 
998
1159
  @log
999
1160
  def is_default_semantic_model(
1000
- dataset: str, workspace: Optional[str | UUID] = None
1161
+ dataset: str | UUID, workspace: Optional[str | UUID] = None
1001
1162
  ) -> bool:
1002
1163
  """
1003
1164
  Identifies whether a semantic model is a default semantic model.
1004
1165
 
1005
1166
  Parameters
1006
1167
  ----------
1007
- dataset : str
1008
- The name of the semantic model.
1168
+ dataset : str | uuid.UUID
1169
+ The name or ID of the semantic model.
1009
1170
  workspace : str | uuid.UUID, default=None
1010
1171
  The Fabric workspace name or ID.
1011
1172
  Defaults to None which resolves to the workspace of the attached lakehouse
@@ -1017,7 +1178,10 @@ def is_default_semantic_model(
1017
1178
  A True/False value indicating whether the semantic model is a default semantic model.
1018
1179
  """
1019
1180
 
1020
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
1181
+ workspace_id = resolve_workspace_id(workspace)
1182
+ (dataset_name, dataset_id) = resolve_item_name_and_id(
1183
+ item=dataset, type="SemanticModel", workspace=workspace_id
1184
+ )
1021
1185
 
1022
1186
  dfI = fabric.list_items(workspace=workspace_id)
1023
1187
  filtered_df = dfI.groupby("Display Name").filter(
@@ -1026,7 +1190,7 @@ def is_default_semantic_model(
1026
1190
  )
1027
1191
  default_semantic_models = filtered_df["Display Name"].unique().tolist()
1028
1192
 
1029
- return dataset in default_semantic_models
1193
+ return dataset_name in default_semantic_models
1030
1194
 
1031
1195
 
1032
1196
  @log