semantic-link-labs 0.11.2__py3-none-any.whl → 0.12.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.11.2.dist-info → semantic_link_labs-0.12.0.dist-info}/METADATA +7 -6
- {semantic_link_labs-0.11.2.dist-info → semantic_link_labs-0.12.0.dist-info}/RECORD +90 -84
- sempy_labs/__init__.py +18 -18
- sempy_labs/_a_lib_info.py +1 -1
- sempy_labs/_authentication.py +81 -32
- sempy_labs/_capacities.py +2 -2
- sempy_labs/_capacity_migration.py +4 -4
- sempy_labs/_clear_cache.py +1 -1
- sempy_labs/_connections.py +107 -70
- sempy_labs/_dashboards.py +6 -2
- sempy_labs/_data_pipelines.py +1 -1
- sempy_labs/_dataflows.py +1 -1
- sempy_labs/_dax.py +3 -3
- sempy_labs/_delta_analyzer.py +4 -4
- sempy_labs/_delta_analyzer_history.py +1 -1
- sempy_labs/_deployment_pipelines.py +1 -1
- sempy_labs/_environments.py +1 -1
- sempy_labs/_eventhouses.py +9 -3
- sempy_labs/_eventstreams.py +1 -1
- sempy_labs/_external_data_shares.py +56 -2
- sempy_labs/_gateways.py +14 -7
- sempy_labs/_generate_semantic_model.py +7 -12
- sempy_labs/_git.py +1 -1
- sempy_labs/_graphQL.py +1 -1
- sempy_labs/_helper_functions.py +293 -22
- sempy_labs/_job_scheduler.py +12 -1
- sempy_labs/_kql_databases.py +1 -1
- sempy_labs/_kql_querysets.py +10 -2
- sempy_labs/_kusto.py +2 -2
- sempy_labs/_labels.py +126 -0
- sempy_labs/_list_functions.py +2 -2
- sempy_labs/_managed_private_endpoints.py +1 -1
- sempy_labs/_mirrored_databases.py +40 -16
- sempy_labs/_mirrored_warehouses.py +1 -1
- sempy_labs/_ml_experiments.py +1 -1
- sempy_labs/_model_bpa.py +6 -6
- sempy_labs/_model_bpa_bulk.py +3 -3
- sempy_labs/_model_dependencies.py +1 -1
- sempy_labs/_mounted_data_factories.py +3 -3
- sempy_labs/_notebooks.py +153 -3
- sempy_labs/_query_scale_out.py +2 -2
- sempy_labs/_refresh_semantic_model.py +1 -1
- sempy_labs/_semantic_models.py +15 -3
- sempy_labs/_spark.py +1 -1
- sempy_labs/_sql.py +3 -3
- sempy_labs/_sql_endpoints.py +5 -3
- sempy_labs/_sqldatabase.py +5 -1
- sempy_labs/_tags.py +3 -1
- sempy_labs/_translations.py +7 -360
- sempy_labs/_user_delegation_key.py +2 -2
- sempy_labs/_utils.py +27 -0
- sempy_labs/_vertipaq.py +3 -3
- sempy_labs/_vpax.py +1 -1
- sempy_labs/_warehouses.py +5 -0
- sempy_labs/_workloads.py +1 -1
- sempy_labs/_workspace_identity.py +1 -1
- sempy_labs/_workspaces.py +145 -11
- sempy_labs/admin/__init__.py +6 -0
- sempy_labs/admin/_capacities.py +34 -11
- sempy_labs/admin/_items.py +2 -2
- sempy_labs/admin/_tenant_keys.py +89 -0
- sempy_labs/directlake/_dl_helper.py +5 -2
- sempy_labs/graph/_users.py +3 -5
- sempy_labs/lakehouse/__init__.py +4 -0
- sempy_labs/lakehouse/_helper.py +18 -9
- sempy_labs/lakehouse/_lakehouse.py +18 -9
- sempy_labs/lakehouse/_materialized_lake_views.py +76 -0
- sempy_labs/lakehouse/_shortcuts.py +8 -2
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +38 -47
- sempy_labs/migration/_migrate_calctables_to_semantic_model.py +12 -22
- sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +7 -11
- sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +14 -23
- sempy_labs/ml_model/__init__.py +23 -0
- sempy_labs/ml_model/_functions.py +427 -0
- sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
- sempy_labs/report/_bpareporttemplate/.platform +11 -0
- sempy_labs/report/_download_report.py +4 -1
- sempy_labs/report/_export_report.py +12 -5
- sempy_labs/report/_generate_report.py +11 -3
- sempy_labs/report/_paginated.py +21 -15
- sempy_labs/report/_report_functions.py +19 -11
- sempy_labs/report/_report_rebind.py +21 -10
- sempy_labs/report/_reportwrapper.py +1 -1
- sempy_labs/theme/_org_themes.py +5 -6
- sempy_labs/tom/_model.py +13 -19
- sempy_labs/variable_library/__init__.py +19 -0
- sempy_labs/variable_library/_functions.py +403 -0
- sempy_labs/_dax_query_view.py +0 -57
- sempy_labs/_ml_models.py +0 -111
- sempy_labs/_variable_libraries.py +0 -92
- {semantic_link_labs-0.11.2.dist-info → semantic_link_labs-0.12.0.dist-info}/WHEEL +0 -0
- {semantic_link_labs-0.11.2.dist-info → semantic_link_labs-0.12.0.dist-info}/licenses/LICENSE +0 -0
- {semantic_link_labs-0.11.2.dist-info → semantic_link_labs-0.12.0.dist-info}/top_level.txt +0 -0
sempy_labs/_clear_cache.py
CHANGED
sempy_labs/_connections.py
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
import pandas as pd
|
|
2
2
|
from typing import Optional
|
|
3
|
-
from ._helper_functions import (
|
|
3
|
+
from sempy_labs._helper_functions import (
|
|
4
4
|
_is_valid_uuid,
|
|
5
|
-
|
|
5
|
+
resolve_workspace_id,
|
|
6
6
|
_update_dataframe_datatypes,
|
|
7
7
|
_base_api,
|
|
8
8
|
_create_dataframe,
|
|
@@ -10,8 +10,9 @@ from ._helper_functions import (
|
|
|
10
10
|
)
|
|
11
11
|
from uuid import UUID
|
|
12
12
|
import sempy_labs._icons as icons
|
|
13
|
-
from ._gateways import _resolve_gateway_id
|
|
13
|
+
from sempy_labs._gateways import _resolve_gateway_id
|
|
14
14
|
from sempy._utils._log import log
|
|
15
|
+
import warnings
|
|
15
16
|
|
|
16
17
|
|
|
17
18
|
@log
|
|
@@ -68,13 +69,13 @@ def delete_connection_role_assignment(connection: str | UUID, role_assignment_id
|
|
|
68
69
|
@log
|
|
69
70
|
def _resolve_connection_id(connection: str | UUID) -> UUID:
|
|
70
71
|
|
|
71
|
-
dfC = list_connections()
|
|
72
72
|
if _is_valid_uuid(connection):
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
73
|
+
return connection
|
|
74
|
+
|
|
75
|
+
dfC = list_connections()
|
|
76
|
+
dfC_filt = dfC[dfC["Connection Name"] == connection]
|
|
76
77
|
|
|
77
|
-
if
|
|
78
|
+
if dfC_filt.empty:
|
|
78
79
|
raise ValueError(
|
|
79
80
|
f"{icons.red_dot} The '{connection}' is not a valid connection."
|
|
80
81
|
)
|
|
@@ -119,16 +120,20 @@ def list_connection_role_assignments(connection: str | UUID) -> pd.DataFrame:
|
|
|
119
120
|
uses_pagination=True,
|
|
120
121
|
)
|
|
121
122
|
|
|
123
|
+
rows = []
|
|
122
124
|
for r in responses:
|
|
123
125
|
for v in r.get("value", []):
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
126
|
+
rows.append(
|
|
127
|
+
{
|
|
128
|
+
"Connection Role Assignment Id": v.get("id"),
|
|
129
|
+
"Principal Id": v.get("principal", {}).get("id"),
|
|
130
|
+
"Principal Type": v.get("principal", {}).get("type"),
|
|
131
|
+
"Role": v.get("role"),
|
|
132
|
+
}
|
|
133
|
+
)
|
|
130
134
|
|
|
131
|
-
|
|
135
|
+
if rows:
|
|
136
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
132
137
|
|
|
133
138
|
return df
|
|
134
139
|
|
|
@@ -165,51 +170,57 @@ def list_connections() -> pd.DataFrame:
|
|
|
165
170
|
request="/v1/connections", client="fabric_sp", uses_pagination=True
|
|
166
171
|
)
|
|
167
172
|
|
|
173
|
+
rows = []
|
|
168
174
|
for r in responses:
|
|
169
175
|
for i in r.get("value", []):
|
|
170
176
|
connection_details = i.get("connectionDetails", {})
|
|
171
177
|
credential_details = i.get("credentialDetails", {})
|
|
172
178
|
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
179
|
+
rows.append(
|
|
180
|
+
{
|
|
181
|
+
"Connection Id": i.get("id"),
|
|
182
|
+
"Connection Name": i.get("displayName"),
|
|
183
|
+
"Gateway Id": i.get("gatewayId"),
|
|
184
|
+
"Connectivity Type": i.get("connectivityType"),
|
|
185
|
+
"Connection Path": connection_details.get("path"),
|
|
186
|
+
"Connection Type": connection_details.get("type"),
|
|
187
|
+
"Privacy Level": i.get("privacyLevel"),
|
|
188
|
+
"Credential Type": (
|
|
189
|
+
credential_details.get("credentialType")
|
|
190
|
+
if credential_details
|
|
191
|
+
else None
|
|
192
|
+
),
|
|
193
|
+
"Single Sign On Type": (
|
|
194
|
+
credential_details.get("singleSignOnType")
|
|
195
|
+
if credential_details
|
|
196
|
+
else None
|
|
197
|
+
),
|
|
198
|
+
"Connection Encryption": (
|
|
199
|
+
credential_details.get("connectionEncryption")
|
|
200
|
+
if credential_details
|
|
201
|
+
else None
|
|
202
|
+
),
|
|
203
|
+
"Skip Test Connection": (
|
|
204
|
+
credential_details.get("skipTestConnection")
|
|
205
|
+
if credential_details
|
|
206
|
+
else None
|
|
207
|
+
),
|
|
208
|
+
}
|
|
209
|
+
)
|
|
210
|
+
|
|
211
|
+
if rows:
|
|
212
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
213
|
+
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
206
214
|
|
|
207
215
|
return df
|
|
208
216
|
|
|
209
217
|
|
|
210
218
|
@log
|
|
211
219
|
def list_item_connections(
|
|
212
|
-
|
|
220
|
+
item: Optional[str | UUID] = None,
|
|
221
|
+
type: Optional[str] = None,
|
|
222
|
+
workspace: Optional[str | UUID] = None,
|
|
223
|
+
**kwargs,
|
|
213
224
|
) -> pd.DataFrame:
|
|
214
225
|
"""
|
|
215
226
|
Shows the list of connections that the specified item is connected to.
|
|
@@ -220,9 +231,9 @@ def list_item_connections(
|
|
|
220
231
|
|
|
221
232
|
Parameters
|
|
222
233
|
----------
|
|
223
|
-
|
|
224
|
-
The item name.
|
|
225
|
-
|
|
234
|
+
item : str | uuid.UUID
|
|
235
|
+
The item name or ID.
|
|
236
|
+
type : str
|
|
226
237
|
The `item type <https://learn.microsoft.com/rest/api/fabric/core/items/update-item?tabs=HTTP#itemtype>`_.
|
|
227
238
|
workspace : str | uuid.UUID, default=None
|
|
228
239
|
The Fabric workspace name or ID.
|
|
@@ -235,9 +246,32 @@ def list_item_connections(
|
|
|
235
246
|
A pandas dataframe showing the list of connections that the specified item is connected to.
|
|
236
247
|
"""
|
|
237
248
|
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
249
|
+
if "item_name" in kwargs:
|
|
250
|
+
if item is not None:
|
|
251
|
+
raise TypeError("Cannot specify both 'item' and 'item_name'")
|
|
252
|
+
item = kwargs.pop("item_name")
|
|
253
|
+
warnings.warn(
|
|
254
|
+
"'item_name' parameter is deprecated, use 'item' instead.",
|
|
255
|
+
FutureWarning,
|
|
256
|
+
stacklevel=2,
|
|
257
|
+
)
|
|
258
|
+
if "item_type" in kwargs:
|
|
259
|
+
if type is not None:
|
|
260
|
+
raise TypeError("Cannot specify both 'type' and 'item_type'")
|
|
261
|
+
type = kwargs.pop("item_type")
|
|
262
|
+
warnings.warn(
|
|
263
|
+
"'item_type' parameter is deprecated, use 'type' instead.",
|
|
264
|
+
FutureWarning,
|
|
265
|
+
stacklevel=2,
|
|
266
|
+
)
|
|
267
|
+
|
|
268
|
+
if item is None or type is None:
|
|
269
|
+
raise TypeError(
|
|
270
|
+
"Missing required parameters: 'item' and 'type' must be provided either directly or via 'item_name' and 'item_type'."
|
|
271
|
+
)
|
|
272
|
+
|
|
273
|
+
workspace_id = resolve_workspace_id(workspace)
|
|
274
|
+
item_id = resolve_item_id(item=item, type=type, workspace=workspace_id)
|
|
241
275
|
|
|
242
276
|
columns = {
|
|
243
277
|
"Connection Name": "string",
|
|
@@ -255,18 +289,22 @@ def list_item_connections(
|
|
|
255
289
|
uses_pagination=True,
|
|
256
290
|
)
|
|
257
291
|
|
|
292
|
+
rows = []
|
|
258
293
|
for r in responses:
|
|
259
294
|
for v in r.get("value", []):
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
295
|
+
rows.append(
|
|
296
|
+
{
|
|
297
|
+
"Connection Name": v.get("displayName"),
|
|
298
|
+
"Connection Id": v.get("id"),
|
|
299
|
+
"Connectivity Type": v.get("connectivityType"),
|
|
300
|
+
"Connection Type": v.get("connectionDetails", {}).get("type"),
|
|
301
|
+
"Connection Path": v.get("connectionDetails", {}).get("path"),
|
|
302
|
+
"Gateway Id": v.get("gatewayId"),
|
|
303
|
+
}
|
|
304
|
+
)
|
|
268
305
|
|
|
269
|
-
|
|
306
|
+
if rows:
|
|
307
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
270
308
|
|
|
271
309
|
return df
|
|
272
310
|
|
|
@@ -293,10 +331,10 @@ def _list_supported_connection_types(
|
|
|
293
331
|
url = url.rstrip("&")
|
|
294
332
|
responses = _base_api(request=url, client="fabric_sp", uses_pagination=True)
|
|
295
333
|
|
|
296
|
-
|
|
334
|
+
rows = []
|
|
297
335
|
for r in responses:
|
|
298
336
|
for v in r.get("value", []):
|
|
299
|
-
|
|
337
|
+
rows.append(
|
|
300
338
|
{
|
|
301
339
|
"Connection Type": v.get("type"),
|
|
302
340
|
"Creation Method": v["creationMethods"][0]["name"],
|
|
@@ -310,10 +348,9 @@ def _list_supported_connection_types(
|
|
|
310
348
|
}
|
|
311
349
|
)
|
|
312
350
|
|
|
313
|
-
if
|
|
314
|
-
df = pd.DataFrame(
|
|
315
|
-
|
|
316
|
-
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
351
|
+
if rows:
|
|
352
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
353
|
+
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
317
354
|
|
|
318
355
|
return df
|
|
319
356
|
|
sempy_labs/_dashboards.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from typing import Optional
|
|
2
2
|
from uuid import UUID
|
|
3
3
|
import pandas as pd
|
|
4
|
-
from ._helper_functions import (
|
|
4
|
+
from sempy_labs._helper_functions import (
|
|
5
5
|
_create_dataframe,
|
|
6
6
|
_base_api,
|
|
7
7
|
resolve_workspace_id,
|
|
@@ -15,6 +15,8 @@ def list_dashboards(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
|
15
15
|
"""
|
|
16
16
|
Shows a list of the dashboards within a workspace.
|
|
17
17
|
|
|
18
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
19
|
+
|
|
18
20
|
Parameters
|
|
19
21
|
----------
|
|
20
22
|
workspace : str | uuid.UUID, default=None
|
|
@@ -42,7 +44,9 @@ def list_dashboards(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
|
42
44
|
|
|
43
45
|
workspace_id = resolve_workspace_id(workspace)
|
|
44
46
|
|
|
45
|
-
response = _base_api(
|
|
47
|
+
response = _base_api(
|
|
48
|
+
request=f"/v1.0/myorg/groups/{workspace_id}/dashboards", client="fabric_sp"
|
|
49
|
+
)
|
|
46
50
|
|
|
47
51
|
rows = []
|
|
48
52
|
for v in response.json().get("value", []):
|
sempy_labs/_data_pipelines.py
CHANGED
sempy_labs/_dataflows.py
CHANGED
sempy_labs/_dax.py
CHANGED
|
@@ -1,17 +1,17 @@
|
|
|
1
1
|
import sempy.fabric as fabric
|
|
2
2
|
import pandas as pd
|
|
3
|
-
from ._helper_functions import (
|
|
3
|
+
from sempy_labs._helper_functions import (
|
|
4
4
|
resolve_workspace_name_and_id,
|
|
5
5
|
format_dax_object_name,
|
|
6
6
|
resolve_dataset_name_and_id,
|
|
7
7
|
_base_api,
|
|
8
8
|
generate_guid,
|
|
9
9
|
)
|
|
10
|
-
from ._model_dependencies import get_model_calc_dependencies
|
|
10
|
+
from sempy_labs._model_dependencies import get_model_calc_dependencies
|
|
11
11
|
from typing import Optional, List, Tuple
|
|
12
12
|
from sempy._utils._log import log
|
|
13
13
|
from uuid import UUID
|
|
14
|
-
from .directlake._warm_cache import _put_columns_into_memory
|
|
14
|
+
from sempy_labs.directlake._warm_cache import _put_columns_into_memory
|
|
15
15
|
import sempy_labs._icons as icons
|
|
16
16
|
import time
|
|
17
17
|
|
sempy_labs/_delta_analyzer.py
CHANGED
|
@@ -5,7 +5,7 @@ import os
|
|
|
5
5
|
from uuid import UUID
|
|
6
6
|
from typing import Dict, Optional
|
|
7
7
|
import pyarrow.parquet as pq
|
|
8
|
-
from ._helper_functions import (
|
|
8
|
+
from sempy_labs._helper_functions import (
|
|
9
9
|
create_abfss_path,
|
|
10
10
|
save_as_delta_table,
|
|
11
11
|
_get_column_aggregate,
|
|
@@ -21,11 +21,11 @@ from ._helper_functions import (
|
|
|
21
21
|
_get_delta_table,
|
|
22
22
|
)
|
|
23
23
|
from sempy._utils._log import log
|
|
24
|
-
from .lakehouse._get_lakehouse_tables import get_lakehouse_tables
|
|
25
|
-
from .lakehouse._lakehouse import (
|
|
24
|
+
from sempy_labs.lakehouse._get_lakehouse_tables import get_lakehouse_tables
|
|
25
|
+
from sempy_labs.lakehouse._lakehouse import (
|
|
26
26
|
lakehouse_attached,
|
|
27
27
|
)
|
|
28
|
-
from .lakehouse._helper import (
|
|
28
|
+
from sempy_labs.lakehouse._helper import (
|
|
29
29
|
is_v_ordered,
|
|
30
30
|
)
|
|
31
31
|
import sempy_labs._icons as icons
|
sempy_labs/_environments.py
CHANGED
sempy_labs/_eventhouses.py
CHANGED
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
import pandas as pd
|
|
2
2
|
from typing import Optional
|
|
3
|
-
from ._helper_functions import (
|
|
3
|
+
from sempy_labs._helper_functions import (
|
|
4
4
|
_base_api,
|
|
5
5
|
_create_dataframe,
|
|
6
6
|
_conv_b64,
|
|
7
7
|
delete_item,
|
|
8
8
|
create_item,
|
|
9
|
-
|
|
9
|
+
_get_item_definition,
|
|
10
10
|
resolve_workspace_id,
|
|
11
11
|
)
|
|
12
12
|
from uuid import UUID
|
|
@@ -26,6 +26,8 @@ def create_eventhouse(
|
|
|
26
26
|
|
|
27
27
|
This is a wrapper function for the following API: `Items - Create Eventhouse <https://learn.microsoft.com/rest/api/fabric/environment/items/create-eventhouse>`_.
|
|
28
28
|
|
|
29
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
30
|
+
|
|
29
31
|
Parameters
|
|
30
32
|
----------
|
|
31
33
|
name: str
|
|
@@ -127,6 +129,8 @@ def delete_eventhouse(name: str, workspace: Optional[str | UUID] = None):
|
|
|
127
129
|
|
|
128
130
|
This is a wrapper function for the following API: `Items - Delete Eventhouse <https://learn.microsoft.com/rest/api/fabric/environment/items/delete-eventhouse>`_.
|
|
129
131
|
|
|
132
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
133
|
+
|
|
130
134
|
Parameters
|
|
131
135
|
----------
|
|
132
136
|
name: str
|
|
@@ -151,6 +155,8 @@ def get_eventhouse_definition(
|
|
|
151
155
|
|
|
152
156
|
This is a wrapper function for the following API: `Items - Get Eventhouse Definition <https://learn.microsoft.com/rest/api/fabric/eventhouse/items/get-eventhouse-definition>`_.
|
|
153
157
|
|
|
158
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
159
|
+
|
|
154
160
|
Parameters
|
|
155
161
|
----------
|
|
156
162
|
eventhouse : str
|
|
@@ -168,7 +174,7 @@ def get_eventhouse_definition(
|
|
|
168
174
|
The eventhouse definition in .json format or as a pandas dataframe.
|
|
169
175
|
"""
|
|
170
176
|
|
|
171
|
-
return
|
|
177
|
+
return _get_item_definition(
|
|
172
178
|
item=eventhouse,
|
|
173
179
|
type="Eventhouse",
|
|
174
180
|
workspace=workspace,
|
sempy_labs/_eventstreams.py
CHANGED
|
@@ -2,11 +2,13 @@ from uuid import UUID
|
|
|
2
2
|
import pandas as pd
|
|
3
3
|
from typing import Optional, List
|
|
4
4
|
import sempy_labs._icons as icons
|
|
5
|
-
from ._helper_functions import (
|
|
5
|
+
from sempy_labs._helper_functions import (
|
|
6
6
|
resolve_workspace_name_and_id,
|
|
7
7
|
_base_api,
|
|
8
8
|
_create_dataframe,
|
|
9
9
|
resolve_item_id,
|
|
10
|
+
resolve_item_name_and_id,
|
|
11
|
+
resolve_workspace_id,
|
|
10
12
|
)
|
|
11
13
|
from sempy._utils._log import log
|
|
12
14
|
|
|
@@ -24,6 +26,8 @@ def create_external_data_share(
|
|
|
24
26
|
|
|
25
27
|
This is a wrapper function for the following API: `External Data Shares - Create External Data Share <https://learn.microsoft.com/rest/api/fabric/core/external-data-shares/create-external-data-share>`_.
|
|
26
28
|
|
|
29
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
30
|
+
|
|
27
31
|
Parameters
|
|
28
32
|
----------
|
|
29
33
|
item_name : str
|
|
@@ -53,6 +57,7 @@ def create_external_data_share(
|
|
|
53
57
|
method="post",
|
|
54
58
|
status_codes=201,
|
|
55
59
|
payload=payload,
|
|
60
|
+
client="fabric_sp",
|
|
56
61
|
)
|
|
57
62
|
print(
|
|
58
63
|
f"{icons.green_dot} An external data share was created for the '{item_name}' {item_type} within the '{workspace_name}' workspace for the {paths} paths."
|
|
@@ -71,6 +76,8 @@ def revoke_external_data_share(
|
|
|
71
76
|
|
|
72
77
|
This is a wrapper function for the following API: `External Data Shares - Revoke External Data Share <https://learn.microsoft.com/rest/api/fabric/core/external-data-shares/revoke-external-data-share`_.
|
|
73
78
|
|
|
79
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
80
|
+
|
|
74
81
|
Parameters
|
|
75
82
|
----------
|
|
76
83
|
external_data_share_id : uuid.UUID
|
|
@@ -91,6 +98,7 @@ def revoke_external_data_share(
|
|
|
91
98
|
_base_api(
|
|
92
99
|
request=f"/v1/workspaces/{workspace_id}/items/{item_id}/externalDataShares/{external_data_share_id}/revoke",
|
|
93
100
|
method="post",
|
|
101
|
+
client="fabric_sp",
|
|
94
102
|
)
|
|
95
103
|
print(
|
|
96
104
|
f"{icons.green_dot} The '{external_data_share_id}' external data share for the '{item_name}' {item_type} within the '{workspace_name}' workspace has been revoked."
|
|
@@ -106,6 +114,8 @@ def list_external_data_shares_in_item(
|
|
|
106
114
|
|
|
107
115
|
This is a wrapper function for the following API: `External Data Shares - List External Data Shares In Item <https://learn.microsoft.com/rest/api/fabric/core/external-data-shares/list-external-data-shares-in-item`_.
|
|
108
116
|
|
|
117
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
118
|
+
|
|
109
119
|
Parameters
|
|
110
120
|
----------
|
|
111
121
|
item_name : str
|
|
@@ -123,7 +133,7 @@ def list_external_data_shares_in_item(
|
|
|
123
133
|
A pandas dataframe showing a list of the external data shares that exist for the specified item.
|
|
124
134
|
"""
|
|
125
135
|
|
|
126
|
-
|
|
136
|
+
workspace_id = resolve_workspace_id(workspace)
|
|
127
137
|
item_id = resolve_item_id(item=item_name, type=item_type, workspace=workspace_id)
|
|
128
138
|
|
|
129
139
|
columns = {
|
|
@@ -145,6 +155,7 @@ def list_external_data_shares_in_item(
|
|
|
145
155
|
responses = _base_api(
|
|
146
156
|
request=f"/v1/workspaces/{workspace_id}/items/{item_id}/externalDataShares",
|
|
147
157
|
uses_pagination=True,
|
|
158
|
+
client="fabric_sp",
|
|
148
159
|
)
|
|
149
160
|
|
|
150
161
|
rows = []
|
|
@@ -174,3 +185,46 @@ def list_external_data_shares_in_item(
|
|
|
174
185
|
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
175
186
|
|
|
176
187
|
return df
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
@log
|
|
191
|
+
def delete_external_data_share(
|
|
192
|
+
external_data_share_id: UUID,
|
|
193
|
+
item: str | UUID,
|
|
194
|
+
item_type: str,
|
|
195
|
+
workspace: Optional[str | UUID] = None,
|
|
196
|
+
):
|
|
197
|
+
"""
|
|
198
|
+
Deletes the specified external data share.
|
|
199
|
+
|
|
200
|
+
This is a wrapper function for the following API: `External Data Shares Provider - Delete External Data Share <https://learn.microsoft.com/rest/api/fabric/core/external-data-shares-provider/delete-external-data-share`_.
|
|
201
|
+
|
|
202
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
203
|
+
|
|
204
|
+
Parameters
|
|
205
|
+
----------
|
|
206
|
+
external_data_share_id : uuid.UUID
|
|
207
|
+
The external data share ID.
|
|
208
|
+
item : str | uuid.UUID
|
|
209
|
+
The item name or ID.
|
|
210
|
+
item_type : str
|
|
211
|
+
The `item type <https://learn.microsoft.com/rest/api/fabric/core/items/list-items?tabs=HTTP#itemtype>`_.
|
|
212
|
+
workspace : str | uuid.UUID, default=None
|
|
213
|
+
The Fabric workspace name or ID.
|
|
214
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
215
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
216
|
+
"""
|
|
217
|
+
|
|
218
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
219
|
+
(item_name, item_id) = resolve_item_name_and_id(
|
|
220
|
+
item=item, type=item_type, workspace=workspace_id
|
|
221
|
+
)
|
|
222
|
+
|
|
223
|
+
_base_api(
|
|
224
|
+
request=f"/v1/workspaces/{workspace_id}/items/{item_id}/externalDataShares/{external_data_share_id}",
|
|
225
|
+
method="delete",
|
|
226
|
+
client="fabric_sp",
|
|
227
|
+
)
|
|
228
|
+
print(
|
|
229
|
+
f"{icons.green_dot} The '{external_data_share_id}' external data share for the '{item_name}' {item_type} within the '{workspace_name}' workspace has been revoked."
|
|
230
|
+
)
|
sempy_labs/_gateways.py
CHANGED
|
@@ -77,16 +77,16 @@ def list_gateways() -> pd.DataFrame:
|
|
|
77
77
|
@log
|
|
78
78
|
def _resolve_gateway_id(gateway: str | UUID) -> UUID:
|
|
79
79
|
|
|
80
|
-
dfG = list_gateways()
|
|
81
80
|
if _is_valid_uuid(gateway):
|
|
82
|
-
|
|
81
|
+
return gateway
|
|
83
82
|
else:
|
|
83
|
+
dfG = list_gateways()
|
|
84
84
|
dfG_filt = dfG[dfG["Gateway Name"] == gateway]
|
|
85
85
|
|
|
86
|
-
|
|
87
|
-
|
|
86
|
+
if dfG_filt.empty:
|
|
87
|
+
raise ValueError(f"{icons.red_dot} The '{gateway}' gateway does not exist.")
|
|
88
88
|
|
|
89
|
-
|
|
89
|
+
return dfG_filt["Gateway Id"].iloc[0]
|
|
90
90
|
|
|
91
91
|
|
|
92
92
|
@log
|
|
@@ -472,7 +472,10 @@ def update_vnet_gateway(
|
|
|
472
472
|
|
|
473
473
|
@log
|
|
474
474
|
def bind_semantic_model_to_gateway(
|
|
475
|
-
dataset: str | UUID,
|
|
475
|
+
dataset: str | UUID,
|
|
476
|
+
gateway: str | UUID,
|
|
477
|
+
workspace: Optional[str | UUID] = None,
|
|
478
|
+
data_source_object_ids: Optional[list[UUID]] = None,
|
|
476
479
|
):
|
|
477
480
|
"""
|
|
478
481
|
Binds the specified dataset from the specified workspace to the specified gateway.
|
|
@@ -488,9 +491,11 @@ def bind_semantic_model_to_gateway(
|
|
|
488
491
|
gateway : str | uuid.UUID
|
|
489
492
|
The name or ID of the gateway.
|
|
490
493
|
workspace : str | uuid.UUID, default=None
|
|
491
|
-
The
|
|
494
|
+
The workspace name or ID.
|
|
492
495
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
493
496
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
497
|
+
data_source_object_ids : list[uuid.UUID], default=None
|
|
498
|
+
A list of data source object IDs to bind to the gateway.
|
|
494
499
|
"""
|
|
495
500
|
|
|
496
501
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
@@ -502,6 +507,8 @@ def bind_semantic_model_to_gateway(
|
|
|
502
507
|
payload = {
|
|
503
508
|
"gatewayObjectId": gateway_id,
|
|
504
509
|
}
|
|
510
|
+
if data_source_object_ids is not None:
|
|
511
|
+
payload["datasourceObjectIds"] = data_source_object_ids
|
|
505
512
|
|
|
506
513
|
_base_api(
|
|
507
514
|
request=f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}/Default.BindToGateway",
|