semantic-link-labs 0.11.2__py3-none-any.whl → 0.12.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.11.2.dist-info → semantic_link_labs-0.12.0.dist-info}/METADATA +7 -6
- {semantic_link_labs-0.11.2.dist-info → semantic_link_labs-0.12.0.dist-info}/RECORD +90 -84
- sempy_labs/__init__.py +18 -18
- sempy_labs/_a_lib_info.py +1 -1
- sempy_labs/_authentication.py +81 -32
- sempy_labs/_capacities.py +2 -2
- sempy_labs/_capacity_migration.py +4 -4
- sempy_labs/_clear_cache.py +1 -1
- sempy_labs/_connections.py +107 -70
- sempy_labs/_dashboards.py +6 -2
- sempy_labs/_data_pipelines.py +1 -1
- sempy_labs/_dataflows.py +1 -1
- sempy_labs/_dax.py +3 -3
- sempy_labs/_delta_analyzer.py +4 -4
- sempy_labs/_delta_analyzer_history.py +1 -1
- sempy_labs/_deployment_pipelines.py +1 -1
- sempy_labs/_environments.py +1 -1
- sempy_labs/_eventhouses.py +9 -3
- sempy_labs/_eventstreams.py +1 -1
- sempy_labs/_external_data_shares.py +56 -2
- sempy_labs/_gateways.py +14 -7
- sempy_labs/_generate_semantic_model.py +7 -12
- sempy_labs/_git.py +1 -1
- sempy_labs/_graphQL.py +1 -1
- sempy_labs/_helper_functions.py +293 -22
- sempy_labs/_job_scheduler.py +12 -1
- sempy_labs/_kql_databases.py +1 -1
- sempy_labs/_kql_querysets.py +10 -2
- sempy_labs/_kusto.py +2 -2
- sempy_labs/_labels.py +126 -0
- sempy_labs/_list_functions.py +2 -2
- sempy_labs/_managed_private_endpoints.py +1 -1
- sempy_labs/_mirrored_databases.py +40 -16
- sempy_labs/_mirrored_warehouses.py +1 -1
- sempy_labs/_ml_experiments.py +1 -1
- sempy_labs/_model_bpa.py +6 -6
- sempy_labs/_model_bpa_bulk.py +3 -3
- sempy_labs/_model_dependencies.py +1 -1
- sempy_labs/_mounted_data_factories.py +3 -3
- sempy_labs/_notebooks.py +153 -3
- sempy_labs/_query_scale_out.py +2 -2
- sempy_labs/_refresh_semantic_model.py +1 -1
- sempy_labs/_semantic_models.py +15 -3
- sempy_labs/_spark.py +1 -1
- sempy_labs/_sql.py +3 -3
- sempy_labs/_sql_endpoints.py +5 -3
- sempy_labs/_sqldatabase.py +5 -1
- sempy_labs/_tags.py +3 -1
- sempy_labs/_translations.py +7 -360
- sempy_labs/_user_delegation_key.py +2 -2
- sempy_labs/_utils.py +27 -0
- sempy_labs/_vertipaq.py +3 -3
- sempy_labs/_vpax.py +1 -1
- sempy_labs/_warehouses.py +5 -0
- sempy_labs/_workloads.py +1 -1
- sempy_labs/_workspace_identity.py +1 -1
- sempy_labs/_workspaces.py +145 -11
- sempy_labs/admin/__init__.py +6 -0
- sempy_labs/admin/_capacities.py +34 -11
- sempy_labs/admin/_items.py +2 -2
- sempy_labs/admin/_tenant_keys.py +89 -0
- sempy_labs/directlake/_dl_helper.py +5 -2
- sempy_labs/graph/_users.py +3 -5
- sempy_labs/lakehouse/__init__.py +4 -0
- sempy_labs/lakehouse/_helper.py +18 -9
- sempy_labs/lakehouse/_lakehouse.py +18 -9
- sempy_labs/lakehouse/_materialized_lake_views.py +76 -0
- sempy_labs/lakehouse/_shortcuts.py +8 -2
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +38 -47
- sempy_labs/migration/_migrate_calctables_to_semantic_model.py +12 -22
- sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +7 -11
- sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +14 -23
- sempy_labs/ml_model/__init__.py +23 -0
- sempy_labs/ml_model/_functions.py +427 -0
- sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
- sempy_labs/report/_bpareporttemplate/.platform +11 -0
- sempy_labs/report/_download_report.py +4 -1
- sempy_labs/report/_export_report.py +12 -5
- sempy_labs/report/_generate_report.py +11 -3
- sempy_labs/report/_paginated.py +21 -15
- sempy_labs/report/_report_functions.py +19 -11
- sempy_labs/report/_report_rebind.py +21 -10
- sempy_labs/report/_reportwrapper.py +1 -1
- sempy_labs/theme/_org_themes.py +5 -6
- sempy_labs/tom/_model.py +13 -19
- sempy_labs/variable_library/__init__.py +19 -0
- sempy_labs/variable_library/_functions.py +403 -0
- sempy_labs/_dax_query_view.py +0 -57
- sempy_labs/_ml_models.py +0 -111
- sempy_labs/_variable_libraries.py +0 -92
- {semantic_link_labs-0.11.2.dist-info → semantic_link_labs-0.12.0.dist-info}/WHEEL +0 -0
- {semantic_link_labs-0.11.2.dist-info → semantic_link_labs-0.12.0.dist-info}/licenses/LICENSE +0 -0
- {semantic_link_labs-0.11.2.dist-info → semantic_link_labs-0.12.0.dist-info}/top_level.txt +0 -0
|
@@ -4,7 +4,7 @@ import json
|
|
|
4
4
|
import os
|
|
5
5
|
from typing import Optional, List
|
|
6
6
|
from sempy._utils._log import log
|
|
7
|
-
from ._helper_functions import (
|
|
7
|
+
from sempy_labs._helper_functions import (
|
|
8
8
|
resolve_workspace_name_and_id,
|
|
9
9
|
resolve_dataset_name_and_id,
|
|
10
10
|
_conv_b64,
|
|
@@ -13,9 +13,9 @@ from ._helper_functions import (
|
|
|
13
13
|
_mount,
|
|
14
14
|
resolve_workspace_id,
|
|
15
15
|
)
|
|
16
|
-
from .lakehouse._lakehouse import lakehouse_attached
|
|
16
|
+
from sempy_labs.lakehouse._lakehouse import lakehouse_attached
|
|
17
17
|
import sempy_labs._icons as icons
|
|
18
|
-
from ._refresh_semantic_model import refresh_semantic_model
|
|
18
|
+
from sempy_labs._refresh_semantic_model import refresh_semantic_model
|
|
19
19
|
from uuid import UUID
|
|
20
20
|
|
|
21
21
|
|
|
@@ -284,13 +284,9 @@ def deploy_semantic_model(
|
|
|
284
284
|
source_workspace
|
|
285
285
|
)
|
|
286
286
|
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
else:
|
|
291
|
-
(target_workspace_name, target_workspace_id) = resolve_workspace_name_and_id(
|
|
292
|
-
target_workspace
|
|
293
|
-
)
|
|
287
|
+
(target_workspace_name, target_workspace_id) = resolve_workspace_name_and_id(
|
|
288
|
+
target_workspace
|
|
289
|
+
)
|
|
294
290
|
|
|
295
291
|
if target_dataset is None:
|
|
296
292
|
target_dataset = source_dataset
|
|
@@ -306,13 +302,12 @@ def deploy_semantic_model(
|
|
|
306
302
|
|
|
307
303
|
dfD = fabric.list_datasets(workspace=target_workspace_id, mode="rest")
|
|
308
304
|
dfD_filt = dfD[dfD["Dataset Name"] == target_dataset]
|
|
309
|
-
if
|
|
305
|
+
if not dfD_filt.empty and not overwrite:
|
|
310
306
|
raise ValueError(
|
|
311
307
|
f"{icons.warning} The '{target_dataset}' semantic model already exists within the '{target_workspace_name}' workspace. The 'overwrite' parameter is set to False so the source semantic model was not deployed to the target destination."
|
|
312
308
|
)
|
|
313
309
|
|
|
314
310
|
if perspective is not None:
|
|
315
|
-
|
|
316
311
|
from sempy_labs.tom import connect_semantic_model
|
|
317
312
|
|
|
318
313
|
with connect_semantic_model(
|
sempy_labs/_git.py
CHANGED
sempy_labs/_graphQL.py
CHANGED
sempy_labs/_helper_functions.py
CHANGED
|
@@ -20,6 +20,8 @@ import sempy_labs._authentication as auth
|
|
|
20
20
|
from jsonpath_ng.ext import parse
|
|
21
21
|
from jsonpath_ng.jsonpath import Fields, Index
|
|
22
22
|
from sempy._utils._log import log
|
|
23
|
+
from os import PathLike
|
|
24
|
+
import sempy_labs._utils as utils
|
|
23
25
|
|
|
24
26
|
|
|
25
27
|
def _build_url(url: str, params: dict) -> str:
|
|
@@ -225,8 +227,6 @@ def delete_item(
|
|
|
225
227
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
226
228
|
"""
|
|
227
229
|
|
|
228
|
-
from sempy_labs._utils import item_types
|
|
229
|
-
|
|
230
230
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
231
231
|
(item_name, item_id) = resolve_item_name_and_id(item, type, workspace_id)
|
|
232
232
|
item_type = item_types.get(type)[0].lower()
|
|
@@ -238,6 +238,18 @@ def delete_item(
|
|
|
238
238
|
)
|
|
239
239
|
|
|
240
240
|
|
|
241
|
+
@log
|
|
242
|
+
def create_folder_if_not_exists(
|
|
243
|
+
folder: str | PathLike, workspace: Optional[str | UUID] = None
|
|
244
|
+
) -> UUID:
|
|
245
|
+
try:
|
|
246
|
+
x = fabric.resolve_folder_id(folder=folder, workspace=workspace)
|
|
247
|
+
except:
|
|
248
|
+
x = fabric.create_folder(folder=folder, workspace=workspace)
|
|
249
|
+
|
|
250
|
+
return x
|
|
251
|
+
|
|
252
|
+
|
|
241
253
|
@log
|
|
242
254
|
def create_item(
|
|
243
255
|
name: str,
|
|
@@ -245,6 +257,7 @@ def create_item(
|
|
|
245
257
|
description: Optional[str] = None,
|
|
246
258
|
definition: Optional[dict] = None,
|
|
247
259
|
workspace: Optional[str | UUID] = None,
|
|
260
|
+
folder: Optional[str | PathLike] = None,
|
|
248
261
|
):
|
|
249
262
|
"""
|
|
250
263
|
Creates an item in a Fabric workspace.
|
|
@@ -263,12 +276,14 @@ def create_item(
|
|
|
263
276
|
The Fabric workspace name or ID.
|
|
264
277
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
265
278
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
279
|
+
folder : str | os.PathLike, default=None
|
|
280
|
+
The folder within the workspace where the item will be created.
|
|
281
|
+
Defaults to None which places the item in the root of the workspace.
|
|
266
282
|
"""
|
|
267
|
-
from sempy_labs._utils import item_types
|
|
268
283
|
|
|
269
284
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
270
|
-
item_type = item_types.get(type)[0].lower()
|
|
271
|
-
item_type_url = item_types.get(type)[1]
|
|
285
|
+
item_type = utils.item_types.get(type)[0].lower()
|
|
286
|
+
item_type_url = utils.item_types.get(type)[1]
|
|
272
287
|
|
|
273
288
|
payload = {
|
|
274
289
|
"displayName": name,
|
|
@@ -277,6 +292,10 @@ def create_item(
|
|
|
277
292
|
payload["description"] = description
|
|
278
293
|
if definition:
|
|
279
294
|
payload["definition"] = definition
|
|
295
|
+
if folder:
|
|
296
|
+
payload["folderId"] = create_folder_if_not_exists(
|
|
297
|
+
folder=folder, workspace=workspace_id
|
|
298
|
+
)
|
|
280
299
|
|
|
281
300
|
_base_api(
|
|
282
301
|
request=f"/v1/workspaces/{workspace_id}/{item_type_url}",
|
|
@@ -290,8 +309,249 @@ def create_item(
|
|
|
290
309
|
)
|
|
291
310
|
|
|
292
311
|
|
|
312
|
+
@log
|
|
313
|
+
def copy_item(
|
|
314
|
+
item: str | UUID,
|
|
315
|
+
type: str,
|
|
316
|
+
target_name: Optional[str] = None,
|
|
317
|
+
source_workspace: Optional[str | UUID] = None,
|
|
318
|
+
target_workspace: Optional[str | UUID] = None,
|
|
319
|
+
overwrite: bool = False,
|
|
320
|
+
keep_existing_bindings: bool = False,
|
|
321
|
+
):
|
|
322
|
+
"""
|
|
323
|
+
Copies an item (with its definition) from one location to another location.
|
|
324
|
+
|
|
325
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
326
|
+
|
|
327
|
+
Parameters
|
|
328
|
+
----------
|
|
329
|
+
item : str | uuid.UUID
|
|
330
|
+
The name or ID of the item to be copied.
|
|
331
|
+
type : str
|
|
332
|
+
The type of the item.
|
|
333
|
+
target_name: str, default=None
|
|
334
|
+
The name of the item in the target workspace. Defaults to the same name as the source item.
|
|
335
|
+
source_workspace : str | uuid.UUID, default=None
|
|
336
|
+
The workspace name or ID in which the item exists.
|
|
337
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
338
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
339
|
+
target_workspace : str | uuid.UUID, default=None
|
|
340
|
+
The workspace name or ID to which the item will be copied.
|
|
341
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
342
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
343
|
+
overwrite : bool, default=False
|
|
344
|
+
If True, overwrites the item in the target workspace if it already exists.
|
|
345
|
+
keep_existing_bindings : bool, default=False
|
|
346
|
+
If True, ensures that reports are re-bound to the original semantic model.
|
|
347
|
+
If False, reports are binded to the semantic model to which the item is bound.
|
|
348
|
+
"""
|
|
349
|
+
|
|
350
|
+
from sempy_labs.report import report_rebind
|
|
351
|
+
|
|
352
|
+
(item_name, item_id) = resolve_item_name_and_id(
|
|
353
|
+
item=item, type=type, workspace=source_workspace
|
|
354
|
+
)
|
|
355
|
+
(source_workspace_name, source_workspace_id) = resolve_workspace_name_and_id(
|
|
356
|
+
source_workspace
|
|
357
|
+
)
|
|
358
|
+
(target_workspace_name, target_workspace_id) = resolve_workspace_name_and_id(
|
|
359
|
+
target_workspace
|
|
360
|
+
)
|
|
361
|
+
|
|
362
|
+
if target_name is None:
|
|
363
|
+
target_name = item_name
|
|
364
|
+
|
|
365
|
+
if source_workspace_id == target_workspace_id and target_name == item_name:
|
|
366
|
+
raise ValueError(
|
|
367
|
+
f"{icons.red_dot} The source and target workspaces are the same and the target name is the same as the source name. No action taken."
|
|
368
|
+
)
|
|
369
|
+
|
|
370
|
+
type_url = utils.items.get(type)
|
|
371
|
+
result = _base_api(
|
|
372
|
+
request=f"v1/workspaces/{source_workspace_id}/{type_url}/{item_id}",
|
|
373
|
+
client="fabric_sp",
|
|
374
|
+
)
|
|
375
|
+
description = result.json().get("description")
|
|
376
|
+
|
|
377
|
+
payload = get_item_definition(
|
|
378
|
+
item=item_id,
|
|
379
|
+
type=type,
|
|
380
|
+
workspace=source_workspace_id,
|
|
381
|
+
return_dataframe=False,
|
|
382
|
+
decode=False,
|
|
383
|
+
)
|
|
384
|
+
payload["displayName"] = target_name
|
|
385
|
+
if description:
|
|
386
|
+
payload["description"] = description
|
|
387
|
+
|
|
388
|
+
# Check if item exists in target workspace
|
|
389
|
+
exists = False
|
|
390
|
+
try:
|
|
391
|
+
target_item_id = resolve_item_id(
|
|
392
|
+
item=target_name, type=type, workspace=target_workspace_id
|
|
393
|
+
)
|
|
394
|
+
exists = True
|
|
395
|
+
except Exception:
|
|
396
|
+
exists = False
|
|
397
|
+
|
|
398
|
+
if exists and not overwrite:
|
|
399
|
+
raise ValueError(
|
|
400
|
+
f"{icons.warning} The item '{target_name}' of type '{type}' already exists in the target workspace '{target_workspace_name}' and overwrite is set to False."
|
|
401
|
+
)
|
|
402
|
+
elif exists and overwrite:
|
|
403
|
+
# Update item definition
|
|
404
|
+
print(
|
|
405
|
+
f"{icons.in_progress} Updating existing item '{target_name}' of type '{type}' in the target workspace '{target_workspace_name}'..."
|
|
406
|
+
)
|
|
407
|
+
# Get the existing source model
|
|
408
|
+
if type == "Report" and keep_existing_bindings:
|
|
409
|
+
result = _base_api(
|
|
410
|
+
request=f"v1.0/myorg/groups/{target_workspace_id}/reports/{target_item_id}"
|
|
411
|
+
).json()
|
|
412
|
+
dataset_id = result.get("datasetId")
|
|
413
|
+
dataset_workspace_id = result.get("datasetWorkspaceId")
|
|
414
|
+
_base_api(
|
|
415
|
+
request=f"/v1/workspaces/{target_workspace_id}/{type_url}/{target_item_id}/updateDefinition",
|
|
416
|
+
method="post",
|
|
417
|
+
client="fabric_sp",
|
|
418
|
+
payload=payload,
|
|
419
|
+
lro_return_status_code=True,
|
|
420
|
+
status_codes=None,
|
|
421
|
+
)
|
|
422
|
+
print(
|
|
423
|
+
f"{icons.green_dot} The item '{target_name}' of type '{type}' has been successfully updated in the target workspace '{target_workspace_name}'."
|
|
424
|
+
)
|
|
425
|
+
|
|
426
|
+
if keep_existing_bindings:
|
|
427
|
+
report_rebind(
|
|
428
|
+
report=target_item_id,
|
|
429
|
+
dataset=dataset_id,
|
|
430
|
+
report_workspace=target_workspace,
|
|
431
|
+
dataset_workspace=dataset_workspace_id,
|
|
432
|
+
)
|
|
433
|
+
|
|
434
|
+
else:
|
|
435
|
+
print(
|
|
436
|
+
f"{icons.in_progress} Creating new item '{target_name}' of type '{type}' in the target workspace '{target_workspace_name}'..."
|
|
437
|
+
)
|
|
438
|
+
create_item(
|
|
439
|
+
name=target_name,
|
|
440
|
+
type=type,
|
|
441
|
+
definition=payload["definition"],
|
|
442
|
+
workspace=target_workspace_id,
|
|
443
|
+
)
|
|
444
|
+
|
|
445
|
+
|
|
446
|
+
@log
|
|
447
|
+
def is_base64(s):
|
|
448
|
+
try:
|
|
449
|
+
# Add padding if needed
|
|
450
|
+
s_padded = s + "=" * (-len(s) % 4)
|
|
451
|
+
decoded = base64.b64decode(s_padded, validate=True)
|
|
452
|
+
# Optional: check if re-encoding gives the original (excluding padding)
|
|
453
|
+
return base64.b64encode(decoded).decode().rstrip("=") == s.rstrip("=")
|
|
454
|
+
except Exception:
|
|
455
|
+
return False
|
|
456
|
+
|
|
457
|
+
|
|
458
|
+
@log
|
|
459
|
+
def decode_payload(payload):
|
|
460
|
+
|
|
461
|
+
if is_base64(payload):
|
|
462
|
+
try:
|
|
463
|
+
decoded_payload = json.loads(base64.b64decode(payload).decode("utf-8"))
|
|
464
|
+
except Exception:
|
|
465
|
+
decoded_payload = base64.b64decode(payload)
|
|
466
|
+
elif isinstance(payload, dict):
|
|
467
|
+
decoded_payload = payload
|
|
468
|
+
else:
|
|
469
|
+
raise ValueError("Payload must be a dictionary or a base64 encoded value.")
|
|
470
|
+
|
|
471
|
+
return decoded_payload
|
|
472
|
+
|
|
473
|
+
|
|
293
474
|
@log
|
|
294
475
|
def get_item_definition(
|
|
476
|
+
item: str | UUID,
|
|
477
|
+
type: str,
|
|
478
|
+
workspace: Optional[str | UUID] = None,
|
|
479
|
+
return_dataframe: bool = False,
|
|
480
|
+
decode: bool = True,
|
|
481
|
+
format: Optional[str] = None,
|
|
482
|
+
) -> dict | pd.DataFrame:
|
|
483
|
+
"""
|
|
484
|
+
Gets a Fabric item's defintion.
|
|
485
|
+
|
|
486
|
+
This is a wrapper function for the following API: `<https://learn.microsoft.com/rest/api/fabric/core/items/get-item-definition>`_.
|
|
487
|
+
|
|
488
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
489
|
+
|
|
490
|
+
Parameters
|
|
491
|
+
----------
|
|
492
|
+
item : str | uuid.UUID
|
|
493
|
+
The name or ID of the item to be copied.
|
|
494
|
+
type : str
|
|
495
|
+
The `type <https://learn.microsoft.com/rest/api/fabric/core/items/list-items?tabs=HTTP#itemtype>`_ of the item.
|
|
496
|
+
target_name: str, default=None
|
|
497
|
+
The name of the item in the target workspace. Defaults to the same name as the source item.
|
|
498
|
+
workspace : str | uuid.UUID, default=None
|
|
499
|
+
The workspace name or ID.
|
|
500
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
501
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
502
|
+
return_dataframe : bool, default=False
|
|
503
|
+
If True, returns a pandas dataframe.
|
|
504
|
+
If False, returns a dictionary.
|
|
505
|
+
decode : bool, default=True
|
|
506
|
+
If True, decodes the base64 payload.
|
|
507
|
+
format : str, default=None
|
|
508
|
+
The `format <https://learn.microsoft.com/rest/api/fabric/core/items/get-item-definition?tabs=HTTP#itemdefinition>`_ of the item definition.
|
|
509
|
+
"""
|
|
510
|
+
|
|
511
|
+
workspace_id = resolve_workspace_id(workspace)
|
|
512
|
+
item_id = resolve_item_id(item=item, type=type, workspace=workspace_id)
|
|
513
|
+
|
|
514
|
+
item_type_url = utils.items.get(type)
|
|
515
|
+
if not item_type_url:
|
|
516
|
+
raise ValueError(f"{icons.red_dot} Invalid item type '{type}'.")
|
|
517
|
+
|
|
518
|
+
url = f"/v1/workspaces/{workspace_id}/{item_type_url}/{item_id}/getDefinition"
|
|
519
|
+
if format:
|
|
520
|
+
url += f"?format={format}"
|
|
521
|
+
|
|
522
|
+
result = _base_api(
|
|
523
|
+
request=url,
|
|
524
|
+
method="post",
|
|
525
|
+
status_codes=None,
|
|
526
|
+
lro_return_json=True,
|
|
527
|
+
)
|
|
528
|
+
|
|
529
|
+
if return_dataframe:
|
|
530
|
+
return pd.json_normalize(result["definition"]["parts"]).rename(
|
|
531
|
+
columns={
|
|
532
|
+
"path": "Path",
|
|
533
|
+
"payload": "Payload",
|
|
534
|
+
"payloadType": "Payload Type",
|
|
535
|
+
}
|
|
536
|
+
)
|
|
537
|
+
|
|
538
|
+
definition = {"definition": {"parts": []}}
|
|
539
|
+
if decode:
|
|
540
|
+
for part in result.get("definition", {}).get("parts", []):
|
|
541
|
+
path = part.get("path")
|
|
542
|
+
payload = part.get("payload")
|
|
543
|
+
decoded_payload = decode_payload(payload)
|
|
544
|
+
|
|
545
|
+
# Keep structure similar to original but replace payload with decoded version
|
|
546
|
+
definition["definition"]["parts"].append(
|
|
547
|
+
{"path": path, "payload": decoded_payload}
|
|
548
|
+
)
|
|
549
|
+
else:
|
|
550
|
+
return result
|
|
551
|
+
|
|
552
|
+
|
|
553
|
+
@log
|
|
554
|
+
def _get_item_definition(
|
|
295
555
|
item: str | UUID,
|
|
296
556
|
type: str,
|
|
297
557
|
workspace: Optional[str | UUID] = None,
|
|
@@ -299,12 +559,11 @@ def get_item_definition(
|
|
|
299
559
|
return_dataframe: bool = True,
|
|
300
560
|
decode: bool = True,
|
|
301
561
|
):
|
|
302
|
-
from sempy_labs._utils import item_types
|
|
303
562
|
|
|
304
563
|
workspace_id = resolve_workspace_id(workspace)
|
|
305
564
|
item_id = resolve_item_id(item, type, workspace_id)
|
|
306
|
-
item_type_url = item_types.get(type)[1]
|
|
307
|
-
path = item_types.get(type)[2]
|
|
565
|
+
item_type_url = utils.item_types.get(type)[1]
|
|
566
|
+
path = utils.item_types.get(type)[2]
|
|
308
567
|
|
|
309
568
|
url = f"/v1/workspaces/{workspace_id}/{item_type_url}/{item_id}/getDefinition"
|
|
310
569
|
if format:
|
|
@@ -825,7 +1084,9 @@ def resolve_workspace_id(
|
|
|
825
1084
|
|
|
826
1085
|
|
|
827
1086
|
@log
|
|
828
|
-
def resolve_workspace_name(
|
|
1087
|
+
def resolve_workspace_name(
|
|
1088
|
+
workspace_id: Optional[UUID] = None, throw_error: bool = True
|
|
1089
|
+
) -> str:
|
|
829
1090
|
|
|
830
1091
|
if workspace_id is None:
|
|
831
1092
|
workspace_id = _get_fabric_context_setting(name="trident.workspace.id")
|
|
@@ -835,9 +1096,12 @@ def resolve_workspace_name(workspace_id: Optional[UUID] = None) -> str:
|
|
|
835
1096
|
request=f"/v1/workspaces/{workspace_id}", client="fabric_sp"
|
|
836
1097
|
).json()
|
|
837
1098
|
except FabricHTTPException:
|
|
838
|
-
|
|
839
|
-
|
|
840
|
-
|
|
1099
|
+
if throw_error:
|
|
1100
|
+
raise ValueError(
|
|
1101
|
+
f"{icons.red_dot} The '{workspace_id}' workspace was not found."
|
|
1102
|
+
)
|
|
1103
|
+
else:
|
|
1104
|
+
return workspace_id
|
|
841
1105
|
|
|
842
1106
|
return response.get("displayName")
|
|
843
1107
|
|
|
@@ -997,15 +1261,15 @@ def _decode_b64(file, format: Optional[str] = "utf-8"):
|
|
|
997
1261
|
|
|
998
1262
|
@log
|
|
999
1263
|
def is_default_semantic_model(
|
|
1000
|
-
dataset: str, workspace: Optional[str | UUID] = None
|
|
1264
|
+
dataset: str | UUID, workspace: Optional[str | UUID] = None
|
|
1001
1265
|
) -> bool:
|
|
1002
1266
|
"""
|
|
1003
1267
|
Identifies whether a semantic model is a default semantic model.
|
|
1004
1268
|
|
|
1005
1269
|
Parameters
|
|
1006
1270
|
----------
|
|
1007
|
-
dataset : str
|
|
1008
|
-
The name of the semantic model.
|
|
1271
|
+
dataset : str | uuid.UUID
|
|
1272
|
+
The name or ID of the semantic model.
|
|
1009
1273
|
workspace : str | uuid.UUID, default=None
|
|
1010
1274
|
The Fabric workspace name or ID.
|
|
1011
1275
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
@@ -1017,7 +1281,10 @@ def is_default_semantic_model(
|
|
|
1017
1281
|
A True/False value indicating whether the semantic model is a default semantic model.
|
|
1018
1282
|
"""
|
|
1019
1283
|
|
|
1020
|
-
|
|
1284
|
+
workspace_id = resolve_workspace_id(workspace)
|
|
1285
|
+
(dataset_name, dataset_id) = resolve_item_name_and_id(
|
|
1286
|
+
item=dataset, type="SemanticModel", workspace=workspace_id
|
|
1287
|
+
)
|
|
1021
1288
|
|
|
1022
1289
|
dfI = fabric.list_items(workspace=workspace_id)
|
|
1023
1290
|
filtered_df = dfI.groupby("Display Name").filter(
|
|
@@ -1026,7 +1293,7 @@ def is_default_semantic_model(
|
|
|
1026
1293
|
)
|
|
1027
1294
|
default_semantic_models = filtered_df["Display Name"].unique().tolist()
|
|
1028
1295
|
|
|
1029
|
-
return
|
|
1296
|
+
return dataset_name in default_semantic_models
|
|
1030
1297
|
|
|
1031
1298
|
|
|
1032
1299
|
@log
|
|
@@ -1926,17 +2193,21 @@ def _base_api(
|
|
|
1926
2193
|
if (lro_return_json or lro_return_status_code) and status_codes is None:
|
|
1927
2194
|
status_codes = [200, 202]
|
|
1928
2195
|
|
|
1929
|
-
|
|
1930
|
-
|
|
2196
|
+
class FabricDefaultCredential(TokenCredential):
|
|
2197
|
+
|
|
2198
|
+
def get_token(self, *scopes, **kwargs) -> AccessToken:
|
|
2199
|
+
from sempy.fabric._credentials import build_access_token
|
|
2200
|
+
|
|
2201
|
+
return build_access_token(notebookutils.credentials.getToken("pbi"))
|
|
1931
2202
|
|
|
1932
2203
|
if isinstance(status_codes, int):
|
|
1933
2204
|
status_codes = [status_codes]
|
|
1934
2205
|
|
|
1935
2206
|
if client == "fabric":
|
|
1936
|
-
c = fabric.FabricRestClient(
|
|
2207
|
+
c = fabric.FabricRestClient(credential=FabricDefaultCredential())
|
|
1937
2208
|
elif client == "fabric_sp":
|
|
1938
|
-
token = auth.token_provider.get() or
|
|
1939
|
-
c = fabric.FabricRestClient(
|
|
2209
|
+
token = auth.token_provider.get() or FabricDefaultCredential()
|
|
2210
|
+
c = fabric.FabricRestClient(credential=token)
|
|
1940
2211
|
elif client in ["azure", "graph"]:
|
|
1941
2212
|
pass
|
|
1942
2213
|
else:
|
sempy_labs/_job_scheduler.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from sempy._utils._log import log
|
|
2
2
|
import pandas as pd
|
|
3
3
|
from typing import Optional, List
|
|
4
|
-
from ._helper_functions import (
|
|
4
|
+
from sempy_labs._helper_functions import (
|
|
5
5
|
resolve_workspace_name_and_id,
|
|
6
6
|
resolve_item_name_and_id,
|
|
7
7
|
_update_dataframe_datatypes,
|
|
@@ -22,6 +22,8 @@ def list_item_job_instances(
|
|
|
22
22
|
|
|
23
23
|
This is a wrapper function for the following API: `Job Scheduler - List Item Job Instances <https://learn.microsoft.com/rest/api/fabric/core/job-scheduler/list-item-job-instances>`_.
|
|
24
24
|
|
|
25
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
26
|
+
|
|
25
27
|
Parameters
|
|
26
28
|
----------
|
|
27
29
|
item : str | uuid.UUID
|
|
@@ -62,6 +64,7 @@ def list_item_job_instances(
|
|
|
62
64
|
responses = _base_api(
|
|
63
65
|
request=f"v1/workspaces/{workspace_id}/items/{item_id}/jobs/instances",
|
|
64
66
|
uses_pagination=True,
|
|
67
|
+
client="fabric_sp",
|
|
65
68
|
)
|
|
66
69
|
|
|
67
70
|
if not responses[0].get("value"):
|
|
@@ -185,6 +188,9 @@ def list_item_schedules(
|
|
|
185
188
|
"Times": "string",
|
|
186
189
|
"Owner Id": "string",
|
|
187
190
|
"Owner Type": "string",
|
|
191
|
+
"Recurrence": "int",
|
|
192
|
+
"Occurrence Type": "string",
|
|
193
|
+
"Occurrence Day of Month": "int",
|
|
188
194
|
}
|
|
189
195
|
df = _create_dataframe(columns=columns)
|
|
190
196
|
|
|
@@ -210,6 +216,11 @@ def list_item_schedules(
|
|
|
210
216
|
"Times": config.get("times"),
|
|
211
217
|
"Owner Id": own.get("id"),
|
|
212
218
|
"Owner Type": own.get("type"),
|
|
219
|
+
"Recurrence": config.get("recurrence"),
|
|
220
|
+
"Occurrence Type": config.get("occurence", {}).get("occurrenceType"),
|
|
221
|
+
"Occurrence Day of Month": config.get("occurrence", {}).get(
|
|
222
|
+
"dayOfMonth"
|
|
223
|
+
),
|
|
213
224
|
}
|
|
214
225
|
)
|
|
215
226
|
|
sempy_labs/_kql_databases.py
CHANGED
sempy_labs/_kql_querysets.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import pandas as pd
|
|
2
2
|
import sempy_labs._icons as icons
|
|
3
3
|
from typing import Optional
|
|
4
|
-
from ._helper_functions import (
|
|
4
|
+
from sempy_labs._helper_functions import (
|
|
5
5
|
resolve_workspace_id,
|
|
6
6
|
_base_api,
|
|
7
7
|
_create_dataframe,
|
|
@@ -19,6 +19,8 @@ def list_kql_querysets(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
|
19
19
|
|
|
20
20
|
This is a wrapper function for the following API: `Items - List KQL Querysets <https://learn.microsoft.com/rest/api/fabric/kqlqueryset/items/list-kql-querysets>`_.
|
|
21
21
|
|
|
22
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
23
|
+
|
|
22
24
|
Parameters
|
|
23
25
|
----------
|
|
24
26
|
workspace : str | uuid.UUID, default=None
|
|
@@ -42,7 +44,9 @@ def list_kql_querysets(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
|
42
44
|
workspace_id = resolve_workspace_id(workspace)
|
|
43
45
|
|
|
44
46
|
responses = _base_api(
|
|
45
|
-
request=f"v1/workspaces/{workspace_id}/kqlQuerysets",
|
|
47
|
+
request=f"v1/workspaces/{workspace_id}/kqlQuerysets",
|
|
48
|
+
uses_pagination=True,
|
|
49
|
+
client="fabric_sp",
|
|
46
50
|
)
|
|
47
51
|
|
|
48
52
|
rows = []
|
|
@@ -71,6 +75,8 @@ def create_kql_queryset(
|
|
|
71
75
|
|
|
72
76
|
This is a wrapper function for the following API: `Items - Create KQL Queryset <https://learn.microsoft.com/rest/api/fabric/kqlqueryset/items/create-kql-queryset>`_.
|
|
73
77
|
|
|
78
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
79
|
+
|
|
74
80
|
Parameters
|
|
75
81
|
----------
|
|
76
82
|
name: str
|
|
@@ -97,6 +103,8 @@ def delete_kql_queryset(
|
|
|
97
103
|
|
|
98
104
|
This is a wrapper function for the following API: `Items - Delete KQL Queryset <https://learn.microsoft.com/rest/api/fabric/kqlqueryset/items/delete-kql-queryset>`_.
|
|
99
105
|
|
|
106
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
107
|
+
|
|
100
108
|
Parameters
|
|
101
109
|
----------
|
|
102
110
|
kql_queryset: str | uuid.UUID
|
sempy_labs/_kusto.py
CHANGED
|
@@ -5,8 +5,8 @@ from sempy._utils._log import log
|
|
|
5
5
|
import sempy_labs._icons as icons
|
|
6
6
|
from typing import Optional
|
|
7
7
|
from uuid import UUID
|
|
8
|
-
from ._kql_databases import _resolve_cluster_uri
|
|
9
|
-
from ._helper_functions import resolve_item_id
|
|
8
|
+
from sempy_labs._kql_databases import _resolve_cluster_uri
|
|
9
|
+
from sempy_labs._helper_functions import resolve_item_id
|
|
10
10
|
|
|
11
11
|
|
|
12
12
|
@log
|