semantic-link-labs 0.8.3__py3-none-any.whl → 0.8.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.5.dist-info}/METADATA +38 -8
- {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.5.dist-info}/RECORD +109 -104
- {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.5.dist-info}/WHEEL +1 -1
- sempy_labs/__init__.py +53 -1
- sempy_labs/_bpa_translation/_model/_translations_am-ET.po +24 -5
- sempy_labs/_bpa_translation/_model/_translations_ar-AE.po +28 -4
- sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +34 -4
- sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +33 -4
- sempy_labs/_bpa_translation/_model/_translations_cs-CZ.po +31 -4
- sempy_labs/_bpa_translation/_model/_translations_da-DK.po +31 -4
- sempy_labs/_bpa_translation/_model/_translations_de-DE.po +34 -4
- sempy_labs/_bpa_translation/_model/_translations_el-GR.po +36 -4
- sempy_labs/_bpa_translation/_model/_translations_es-ES.po +90 -58
- sempy_labs/_bpa_translation/_model/_translations_fa-IR.po +31 -5
- sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +31 -4
- sempy_labs/_bpa_translation/_model/_translations_fr-FR.po +34 -4
- sempy_labs/_bpa_translation/_model/_translations_ga-IE.po +34 -4
- sempy_labs/_bpa_translation/_model/_translations_he-IL.po +28 -4
- sempy_labs/_bpa_translation/_model/_translations_hi-IN.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_hu-HU.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_id-ID.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_is-IS.po +31 -4
- sempy_labs/_bpa_translation/_model/_translations_it-IT.po +34 -4
- sempy_labs/_bpa_translation/_model/_translations_ja-JP.po +24 -4
- sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +72 -56
- sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +34 -4
- sempy_labs/_bpa_translation/_model/_translations_nl-NL.po +34 -4
- sempy_labs/_bpa_translation/_model/_translations_pl-PL.po +95 -71
- sempy_labs/_bpa_translation/_model/_translations_pt-BR.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_pt-PT.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +33 -4
- sempy_labs/_bpa_translation/_model/_translations_ru-RU.po +34 -4
- sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +31 -4
- sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_ta-IN.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_te-IN.po +31 -4
- sempy_labs/_bpa_translation/_model/_translations_th-TH.po +31 -4
- sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +100 -72
- sempy_labs/_bpa_translation/_model/_translations_zh-CN.po +23 -5
- sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po +32 -4
- sempy_labs/_capacities.py +49 -14
- sempy_labs/_capacity_migration.py +1 -7
- sempy_labs/_data_pipelines.py +6 -0
- sempy_labs/_dataflows.py +118 -1
- sempy_labs/_dax.py +189 -3
- sempy_labs/_deployment_pipelines.py +13 -7
- sempy_labs/_environments.py +6 -0
- sempy_labs/_eventhouses.py +6 -0
- sempy_labs/_eventstreams.py +6 -0
- sempy_labs/_external_data_shares.py +6 -4
- sempy_labs/_generate_semantic_model.py +26 -3
- sempy_labs/_git.py +14 -14
- sempy_labs/_helper_functions.py +197 -1
- sempy_labs/_icons.py +55 -22
- sempy_labs/_kql_databases.py +6 -0
- sempy_labs/_kql_querysets.py +6 -0
- sempy_labs/_list_functions.py +1 -1
- sempy_labs/_managed_private_endpoints.py +166 -0
- sempy_labs/_mirrored_databases.py +428 -0
- sempy_labs/_mirrored_warehouses.py +2 -0
- sempy_labs/_ml_experiments.py +6 -0
- sempy_labs/_ml_models.py +7 -1
- sempy_labs/_model_bpa.py +215 -181
- sempy_labs/_model_bpa_bulk.py +46 -42
- sempy_labs/_model_bpa_rules.py +8 -3
- sempy_labs/_model_dependencies.py +41 -87
- sempy_labs/_notebooks.py +107 -12
- sempy_labs/_query_scale_out.py +8 -6
- sempy_labs/_refresh_semantic_model.py +299 -49
- sempy_labs/_spark.py +12 -5
- sempy_labs/_translations.py +2 -0
- sempy_labs/_vertipaq.py +89 -86
- sempy_labs/_warehouses.py +79 -0
- sempy_labs/_workloads.py +128 -0
- sempy_labs/_workspace_identity.py +4 -4
- sempy_labs/_workspaces.py +14 -1
- sempy_labs/admin/__init__.py +2 -0
- sempy_labs/admin/_basic_functions.py +131 -43
- sempy_labs/admin/_domains.py +18 -18
- sempy_labs/directlake/__init__.py +2 -0
- sempy_labs/directlake/_directlake_schema_sync.py +2 -1
- sempy_labs/directlake/_dl_helper.py +4 -1
- sempy_labs/directlake/_get_shared_expression.py +7 -1
- sempy_labs/directlake/_guardrails.py +2 -1
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +1 -7
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +78 -0
- sempy_labs/directlake/_update_directlake_partition_entity.py +13 -32
- sempy_labs/directlake/_warm_cache.py +10 -9
- sempy_labs/lakehouse/_get_lakehouse_tables.py +6 -2
- sempy_labs/lakehouse/_shortcuts.py +4 -0
- sempy_labs/migration/_create_pqt_file.py +5 -2
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +3 -2
- sempy_labs/migration/_migrate_calctables_to_semantic_model.py +2 -0
- sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +2 -8
- sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +17 -0
- sempy_labs/migration/_migration_validation.py +2 -0
- sempy_labs/migration/_refresh_calc_tables.py +1 -0
- sempy_labs/report/__init__.py +6 -1
- sempy_labs/report/_download_report.py +75 -0
- sempy_labs/report/_generate_report.py +6 -0
- sempy_labs/report/_paginated.py +74 -0
- sempy_labs/report/_report_functions.py +6 -0
- sempy_labs/report/_report_rebind.py +2 -0
- sempy_labs/report/_reportwrapper.py +4 -2
- sempy_labs/tom/_model.py +135 -68
- {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.5.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.5.dist-info}/top_level.txt +0 -0
sempy_labs/_capacities.py
CHANGED
|
@@ -37,6 +37,8 @@ def create_fabric_capacity(
|
|
|
37
37
|
"""
|
|
38
38
|
This function creates a new Fabric capacity within an Azure subscription.
|
|
39
39
|
|
|
40
|
+
This is a wrapper function for the following API: `Fabric Capacities - Create Or Update <https://learn.microsoft.com/rest/api/microsoftfabric/fabric-capacities/create-or-update?view=rest-microsoftfabric-2023-11-01>`_.
|
|
41
|
+
|
|
40
42
|
Parameters
|
|
41
43
|
----------
|
|
42
44
|
capacity_name : str
|
|
@@ -253,6 +255,8 @@ def suspend_fabric_capacity(
|
|
|
253
255
|
"""
|
|
254
256
|
This function suspends a Fabric capacity.
|
|
255
257
|
|
|
258
|
+
This is a wrapper function for the following API: `Fabric Capacities - Suspend <https://learn.microsoft.com/rest/api/microsoftfabric/fabric-capacities/suspend?view=rest-microsoftfabric-2023-11-01>`_.
|
|
259
|
+
|
|
256
260
|
Parameters
|
|
257
261
|
----------
|
|
258
262
|
capacity_name : str
|
|
@@ -286,7 +290,7 @@ def suspend_fabric_capacity(
|
|
|
286
290
|
if response.status_code != 202:
|
|
287
291
|
raise FabricHTTPException(response)
|
|
288
292
|
|
|
289
|
-
print(f"{icons.green_dot} The '{capacity_name} capacity has been suspended.")
|
|
293
|
+
print(f"{icons.green_dot} The '{capacity_name}' capacity has been suspended.")
|
|
290
294
|
|
|
291
295
|
|
|
292
296
|
def resume_fabric_capacity(
|
|
@@ -301,6 +305,8 @@ def resume_fabric_capacity(
|
|
|
301
305
|
"""
|
|
302
306
|
This function resumes a Fabric capacity.
|
|
303
307
|
|
|
308
|
+
This is a wrapper function for the following API: `Fabric Capacities - Resume <https://learn.microsoft.com/rest/api/microsoftfabric/fabric-capacities/resume?view=rest-microsoftfabric-2023-11-01>`_.
|
|
309
|
+
|
|
304
310
|
Parameters
|
|
305
311
|
----------
|
|
306
312
|
capacity_name : str
|
|
@@ -335,7 +341,7 @@ def resume_fabric_capacity(
|
|
|
335
341
|
if response.status_code != 202:
|
|
336
342
|
raise FabricHTTPException(response)
|
|
337
343
|
|
|
338
|
-
print(f"{icons.green_dot} The '{capacity_name} capacity has been resumed.")
|
|
344
|
+
print(f"{icons.green_dot} The '{capacity_name}' capacity has been resumed.")
|
|
339
345
|
|
|
340
346
|
|
|
341
347
|
def delete_embedded_capacity(
|
|
@@ -384,7 +390,7 @@ def delete_embedded_capacity(
|
|
|
384
390
|
if response.status_code not in [200, 202]:
|
|
385
391
|
raise FabricHTTPException(response)
|
|
386
392
|
|
|
387
|
-
print(f"{icons.green_dot} The '{capacity_name} capacity has been deleted.")
|
|
393
|
+
print(f"{icons.green_dot} The '{capacity_name}' capacity has been deleted.")
|
|
388
394
|
|
|
389
395
|
|
|
390
396
|
def delete_premium_capacity(capacity_name: str):
|
|
@@ -427,6 +433,8 @@ def delete_fabric_capacity(
|
|
|
427
433
|
"""
|
|
428
434
|
This function deletes a Fabric capacity.
|
|
429
435
|
|
|
436
|
+
This is a wrapper function for the following API: `Fabric Capacities - Delete <https://learn.microsoft.com/rest/api/microsoftfabric/fabric-capacities/delete?view=rest-microsoftfabric-2023-11-01>`_.
|
|
437
|
+
|
|
430
438
|
Parameters
|
|
431
439
|
----------
|
|
432
440
|
capacity_name : str
|
|
@@ -461,7 +469,7 @@ def delete_fabric_capacity(
|
|
|
461
469
|
if response.status_code != 202:
|
|
462
470
|
raise FabricHTTPException(response)
|
|
463
471
|
|
|
464
|
-
print(f"{icons.green_dot} The '{capacity_name} capacity has been deleted.")
|
|
472
|
+
print(f"{icons.green_dot} The '{capacity_name}' capacity has been deleted.")
|
|
465
473
|
|
|
466
474
|
|
|
467
475
|
def update_fabric_capacity(
|
|
@@ -479,6 +487,8 @@ def update_fabric_capacity(
|
|
|
479
487
|
"""
|
|
480
488
|
This function updates a Fabric capacity's properties.
|
|
481
489
|
|
|
490
|
+
This is a wrapper function for the following API: `Fabric Capacities - Update <https://learn.microsoft.com/rest/api/microsoftfabric/fabric-capacities/update?view=rest-microsoftfabric-2023-11-01>`_.
|
|
491
|
+
|
|
482
492
|
Parameters
|
|
483
493
|
----------
|
|
484
494
|
capacity_name : str
|
|
@@ -521,26 +531,47 @@ def update_fabric_capacity(
|
|
|
521
531
|
|
|
522
532
|
url = f"https://management.azure.com/subscriptions/{azure_subscription_id}/resourceGroups/{resource_group}/providers/Microsoft.Fabric/capacities/{capacity_name}?api-version={icons.azure_api_version}"
|
|
523
533
|
|
|
534
|
+
get_response = requests.get(url, headers=headers)
|
|
535
|
+
if get_response.status_code != 200:
|
|
536
|
+
raise FabricHTTPException(get_response)
|
|
537
|
+
|
|
538
|
+
get_json = get_response.json()
|
|
539
|
+
current_sku = get_json.get("sku", {}).get("name")
|
|
540
|
+
current_admins = (
|
|
541
|
+
get_json.get("properties", {}).get("administration", {}).get("members")
|
|
542
|
+
)
|
|
543
|
+
current_tags = get_json.get("tags")
|
|
544
|
+
|
|
524
545
|
payload = {}
|
|
546
|
+
payload["sku"] = {
|
|
547
|
+
"name": current_sku,
|
|
548
|
+
"tier": "Fabric",
|
|
549
|
+
}
|
|
550
|
+
payload["tags"] = current_tags
|
|
551
|
+
payload["properties"] = get_json["properties"]
|
|
552
|
+
|
|
525
553
|
if sku is not None:
|
|
526
|
-
payload["sku"]
|
|
554
|
+
payload["sku"]["name"] = sku
|
|
527
555
|
if admin_members is not None:
|
|
528
|
-
payload["properties"] = {"administration": {"members":
|
|
529
|
-
|
|
530
|
-
|
|
556
|
+
payload["properties"] = {"administration": {"members": admin_members}}
|
|
557
|
+
if tags is not None:
|
|
558
|
+
payload["tags"] = tags
|
|
531
559
|
|
|
532
|
-
if
|
|
533
|
-
|
|
534
|
-
|
|
560
|
+
# Do not proceed if no properties are being changed
|
|
561
|
+
if current_sku == sku and current_admins == admin_members and current_tags == tags:
|
|
562
|
+
print(
|
|
563
|
+
f"{icons.yellow_dot} The properties of the '{capacity_name}' are the same as those specified in the parameters of this function. No changes have been made."
|
|
535
564
|
)
|
|
565
|
+
return
|
|
536
566
|
|
|
537
|
-
|
|
567
|
+
payload = _add_sll_tag(payload, tags)
|
|
568
|
+
response = requests.patch(url, headers=headers, json=payload)
|
|
538
569
|
|
|
539
570
|
if response.status_code != 202:
|
|
540
571
|
raise FabricHTTPException(response)
|
|
541
572
|
|
|
542
573
|
print(
|
|
543
|
-
f"{icons.green_dot} The '{capacity_name} capacity has been updated accordingly."
|
|
574
|
+
f"{icons.green_dot} The '{capacity_name}' capacity has been updated accordingly."
|
|
544
575
|
)
|
|
545
576
|
|
|
546
577
|
|
|
@@ -556,6 +587,8 @@ def check_fabric_capacity_name_availablility(
|
|
|
556
587
|
"""
|
|
557
588
|
This function updates a Fabric capacity's properties.
|
|
558
589
|
|
|
590
|
+
This is a wrapper function for the following API: `Fabric Capacities - Check Name Availability <https://learn.microsoft.com/rest/api/microsoftfabric/fabric-capacities/check-name-availability?view=rest-microsoftfabric-2023-11-01>`_.
|
|
591
|
+
|
|
559
592
|
Parameters
|
|
560
593
|
----------
|
|
561
594
|
capacity_name : str
|
|
@@ -611,6 +644,8 @@ def create_resource_group(
|
|
|
611
644
|
"""
|
|
612
645
|
This function creates a resource group in a region within an Azure subscription.
|
|
613
646
|
|
|
647
|
+
This is a wrapper function for the following API: `ResourceGroupsOperations Class - CreateOrUpdate <https://learn.microsoft.com/python/api/azure-mgmt-resource/azure.mgmt.resource.resources.v2022_09_01.operations.resourcegroupsoperations?view=azure-python#azure-mgmt-resource-resources-v2022-09-01-operations-resourcegroupsoperations-create-or-update>`_.
|
|
648
|
+
|
|
614
649
|
Parameters
|
|
615
650
|
----------
|
|
616
651
|
azure_subscription_id : str
|
|
@@ -631,7 +666,7 @@ def create_resource_group(
|
|
|
631
666
|
|
|
632
667
|
from azure.mgmt.resource import ResourceManagementClient
|
|
633
668
|
|
|
634
|
-
azure_token, credential, headers =
|
|
669
|
+
azure_token, credential, headers = _get_azure_token_credentials(
|
|
635
670
|
key_vault_uri=key_vault_uri,
|
|
636
671
|
key_vault_tenant_id=key_vault_tenant_id,
|
|
637
672
|
key_vault_client_id=key_vault_client_id,
|
|
@@ -632,7 +632,6 @@ def migrate_fabric_trial_capacity(
|
|
|
632
632
|
source_capacity: str,
|
|
633
633
|
target_capacity: str,
|
|
634
634
|
target_capacity_sku: str = "F64",
|
|
635
|
-
target_capacity_region: Optional[str] = None,
|
|
636
635
|
target_capacity_admin_members: Optional[str | List[str]] = None,
|
|
637
636
|
):
|
|
638
637
|
"""
|
|
@@ -658,9 +657,6 @@ def migrate_fabric_trial_capacity(
|
|
|
658
657
|
The name of the new Fabric capacity (F SKU). If this capacity does not exist, it will be created.
|
|
659
658
|
target_capacity_sku : str, default="F64"
|
|
660
659
|
If the target capacity does not exist, this property sets the SKU size for the target capacity.
|
|
661
|
-
target_capacity_region : str, default=None
|
|
662
|
-
If the target capacity does not exist, this property sets the region for the target capacity.
|
|
663
|
-
Defaults to None which resolves to the region in which the Trial SKU exists.
|
|
664
660
|
target_capacity_admin_members : str, default=None
|
|
665
661
|
If the target capacity does not exist, this property sets the admin members for the target capacity.
|
|
666
662
|
Defaults to None which resolves to the admin members on the Trial SKU.
|
|
@@ -691,9 +687,7 @@ def migrate_fabric_trial_capacity(
|
|
|
691
687
|
)
|
|
692
688
|
return
|
|
693
689
|
|
|
694
|
-
|
|
695
|
-
if target_capacity_region is None:
|
|
696
|
-
target_capacity_region = dfC_filt["Region"].iloc[0]
|
|
690
|
+
target_capacity_region = dfC_filt["Region"].iloc[0]
|
|
697
691
|
|
|
698
692
|
# Use same admins as source capacity
|
|
699
693
|
if isinstance(target_capacity_admin_members, str):
|
sempy_labs/_data_pipelines.py
CHANGED
|
@@ -15,6 +15,8 @@ def list_data_pipelines(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
15
15
|
"""
|
|
16
16
|
Shows the data pipelines within a workspace.
|
|
17
17
|
|
|
18
|
+
This is a wrapper function for the following API: `Items - List Data Pipelines <https://learn.microsoft.com/rest/api/fabric/datapipeline/items/list-data-pipelines>`_.
|
|
19
|
+
|
|
18
20
|
Parameters
|
|
19
21
|
----------
|
|
20
22
|
workspace : str, default=None
|
|
@@ -57,6 +59,8 @@ def create_data_pipeline(
|
|
|
57
59
|
"""
|
|
58
60
|
Creates a Fabric data pipeline.
|
|
59
61
|
|
|
62
|
+
This is a wrapper function for the following API: `Items - Create Data Pipeline <https://learn.microsoft.com/rest/api/fabric/datapipeline/items/create-data-pipeline>`_.
|
|
63
|
+
|
|
60
64
|
Parameters
|
|
61
65
|
----------
|
|
62
66
|
name: str
|
|
@@ -92,6 +96,8 @@ def delete_data_pipeline(name: str, workspace: Optional[str] = None):
|
|
|
92
96
|
"""
|
|
93
97
|
Deletes a Fabric data pipeline.
|
|
94
98
|
|
|
99
|
+
This is a wrapper function for the following API: `Items - Delete Data Pipeline <https://learn.microsoft.com/rest/api/fabric/datapipeline/items/delete-data-pipeline>`_.
|
|
100
|
+
|
|
95
101
|
Parameters
|
|
96
102
|
----------
|
|
97
103
|
name: str
|
sempy_labs/_dataflows.py
CHANGED
|
@@ -2,10 +2,12 @@ import sempy.fabric as fabric
|
|
|
2
2
|
import pandas as pd
|
|
3
3
|
from sempy_labs._helper_functions import (
|
|
4
4
|
resolve_workspace_name_and_id,
|
|
5
|
+
_is_valid_uuid,
|
|
5
6
|
)
|
|
6
|
-
from typing import Optional
|
|
7
|
+
from typing import Optional, Tuple
|
|
7
8
|
import sempy_labs._icons as icons
|
|
8
9
|
from sempy.fabric.exceptions import FabricHTTPException
|
|
10
|
+
from uuid import UUID
|
|
9
11
|
|
|
10
12
|
|
|
11
13
|
def list_dataflows(workspace: Optional[str] = None):
|
|
@@ -59,6 +61,8 @@ def assign_workspace_to_dataflow_storage(
|
|
|
59
61
|
"""
|
|
60
62
|
Assigns a dataflow storage account to a workspace.
|
|
61
63
|
|
|
64
|
+
This is a wrapper function for the following API: `Dataflow Storage Accounts - Groups AssignToDataflowStorage <https://learn.microsoft.com/rest/api/power-bi/dataflow-storage-accounts/groups-assign-to-dataflow-storage>`_.
|
|
65
|
+
|
|
62
66
|
Parameters
|
|
63
67
|
----------
|
|
64
68
|
dataflow_storage_account : str
|
|
@@ -99,6 +103,8 @@ def list_dataflow_storage_accounts() -> pd.DataFrame:
|
|
|
99
103
|
"""
|
|
100
104
|
Shows the accessible dataflow storage accounts.
|
|
101
105
|
|
|
106
|
+
This is a wrapper function for the following API: `Dataflow Storage Accounts - Get Dataflow Storage Accounts <https://learn.microsoft.com/rest/api/power-bi/dataflow-storage-accounts/get-dataflow-storage-accounts>`_.
|
|
107
|
+
|
|
102
108
|
Returns
|
|
103
109
|
-------
|
|
104
110
|
pandas.DataFrame
|
|
@@ -128,3 +134,114 @@ def list_dataflow_storage_accounts() -> pd.DataFrame:
|
|
|
128
134
|
df["Enabled"] = df["Enabled"].astype(bool)
|
|
129
135
|
|
|
130
136
|
return df
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
def list_upstream_dataflows(
|
|
140
|
+
dataflow: str | UUID, workspace: Optional[str] = None
|
|
141
|
+
) -> pd.DataFrame:
|
|
142
|
+
"""
|
|
143
|
+
Shows a list of upstream dataflows for the specified dataflow.
|
|
144
|
+
|
|
145
|
+
This is a wrapper function for the following API: `Dataflows - Get Upstream Dataflows In Group <https://learn.microsoft.com/rest/api/power-bi/dataflows/get-upstream-dataflows-in-group>`_.
|
|
146
|
+
|
|
147
|
+
Parameters
|
|
148
|
+
----------
|
|
149
|
+
dataflow : str | UUID
|
|
150
|
+
Name or UUID of the dataflow.
|
|
151
|
+
workspace : str, default=None
|
|
152
|
+
The Fabric workspace name.
|
|
153
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
154
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
Returns
|
|
158
|
+
-------
|
|
159
|
+
pandas.DataFrame
|
|
160
|
+
A pandas dataframe showing a list of upstream dataflows for the specified dataflow.
|
|
161
|
+
"""
|
|
162
|
+
|
|
163
|
+
workspace_name = fabric.resolve_workspace_name(workspace)
|
|
164
|
+
workspace_id = fabric.resolve_workspace_id(workspace)
|
|
165
|
+
(dataflow_name, dataflow_id) = _resolve_dataflow_name_and_id(
|
|
166
|
+
dataflow=dataflow, workspace=workspace
|
|
167
|
+
)
|
|
168
|
+
client = fabric.PowerBIRestClient()
|
|
169
|
+
|
|
170
|
+
df = pd.DataFrame(
|
|
171
|
+
columns=[
|
|
172
|
+
"Dataflow Name",
|
|
173
|
+
"Dataflow Id",
|
|
174
|
+
"Workspace Name",
|
|
175
|
+
"Workspace Id",
|
|
176
|
+
"Upstream Dataflow Name",
|
|
177
|
+
"Upstream Dataflow Id",
|
|
178
|
+
"Upstream Workspace Name",
|
|
179
|
+
"Upstream Workspace Id",
|
|
180
|
+
]
|
|
181
|
+
)
|
|
182
|
+
|
|
183
|
+
def collect_upstreams(
|
|
184
|
+
client, dataflow_id, dataflow_name, workspace_id, workspace_name
|
|
185
|
+
):
|
|
186
|
+
response = client.get(
|
|
187
|
+
f"/v1.0/myorg/groups/{workspace_id}/dataflows/{dataflow_id}/upstreamDataflows"
|
|
188
|
+
)
|
|
189
|
+
if response.status_code != 200:
|
|
190
|
+
raise FabricHTTPException(response)
|
|
191
|
+
|
|
192
|
+
values = response.json().get("value", [])
|
|
193
|
+
for v in values:
|
|
194
|
+
tgt_dataflow_id = v.get("targetDataflowId")
|
|
195
|
+
tgt_workspace_id = v.get("groupId")
|
|
196
|
+
tgt_workspace_name = fabric.resolve_workspace_name(tgt_workspace_id)
|
|
197
|
+
(tgt_dataflow_name, _) = _resolve_dataflow_name_and_id(
|
|
198
|
+
dataflow=tgt_dataflow_id, workspace=tgt_workspace_name
|
|
199
|
+
)
|
|
200
|
+
|
|
201
|
+
df.loc[len(df)] = {
|
|
202
|
+
"Dataflow Name": dataflow_name,
|
|
203
|
+
"Dataflow Id": dataflow_id,
|
|
204
|
+
"Workspace Name": workspace_name,
|
|
205
|
+
"Workspace Id": workspace_id,
|
|
206
|
+
"Upstream Dataflow Name": tgt_dataflow_name,
|
|
207
|
+
"Upstream Dataflow Id": tgt_dataflow_id,
|
|
208
|
+
"Upstream Workspace Name": tgt_workspace_name,
|
|
209
|
+
"Upstream Workspace Id": tgt_workspace_id,
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
collect_upstreams(
|
|
213
|
+
client,
|
|
214
|
+
tgt_dataflow_id,
|
|
215
|
+
tgt_dataflow_name,
|
|
216
|
+
tgt_workspace_id,
|
|
217
|
+
tgt_workspace_name,
|
|
218
|
+
)
|
|
219
|
+
|
|
220
|
+
collect_upstreams(client, dataflow_id, dataflow_name, workspace_id, workspace_name)
|
|
221
|
+
|
|
222
|
+
return df
|
|
223
|
+
|
|
224
|
+
|
|
225
|
+
def _resolve_dataflow_name_and_id(
|
|
226
|
+
dataflow: str | UUID, workspace: Optional[str] = None
|
|
227
|
+
) -> Tuple[str, UUID]:
|
|
228
|
+
|
|
229
|
+
if workspace is None:
|
|
230
|
+
workspace = fabric.resolve_workspace_name(workspace)
|
|
231
|
+
|
|
232
|
+
dfD = list_dataflows(workspace=workspace)
|
|
233
|
+
|
|
234
|
+
if _is_valid_uuid(dataflow):
|
|
235
|
+
dfD_filt = dfD[dfD["Dataflow Id"] == dataflow]
|
|
236
|
+
else:
|
|
237
|
+
dfD_filt = dfD[dfD["Dataflow Name"] == dataflow]
|
|
238
|
+
|
|
239
|
+
if len(dfD_filt) == 0:
|
|
240
|
+
raise ValueError(
|
|
241
|
+
f"{icons.red_dot} The '{dataflow}' dataflow does not exist within the '{workspace}' workspace."
|
|
242
|
+
)
|
|
243
|
+
|
|
244
|
+
dataflow_id = dfD_filt["Dataflow Id"].iloc[0]
|
|
245
|
+
dataflow_name = dfD_filt["Dataflow Name"].iloc[0]
|
|
246
|
+
|
|
247
|
+
return dataflow_name, dataflow_id
|
sempy_labs/_dax.py
CHANGED
|
@@ -3,9 +3,12 @@ import pandas as pd
|
|
|
3
3
|
from sempy_labs._helper_functions import (
|
|
4
4
|
resolve_dataset_id,
|
|
5
5
|
resolve_workspace_name_and_id,
|
|
6
|
+
format_dax_object_name,
|
|
6
7
|
)
|
|
8
|
+
from sempy_labs._model_dependencies import get_model_calc_dependencies
|
|
7
9
|
from typing import Optional
|
|
8
10
|
from sempy._utils._log import log
|
|
11
|
+
from tqdm.auto import tqdm
|
|
9
12
|
|
|
10
13
|
|
|
11
14
|
@log
|
|
@@ -40,10 +43,7 @@ def evaluate_dax_impersonation(
|
|
|
40
43
|
A pandas dataframe holding the result of the DAX query.
|
|
41
44
|
"""
|
|
42
45
|
|
|
43
|
-
# https://learn.microsoft.com/en-us/rest/api/power-bi/datasets/execute-queries-in-group
|
|
44
|
-
|
|
45
46
|
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
46
|
-
|
|
47
47
|
dataset_id = resolve_dataset_id(dataset=dataset, workspace=workspace)
|
|
48
48
|
|
|
49
49
|
request_body = {
|
|
@@ -62,3 +62,189 @@ def evaluate_dax_impersonation(
|
|
|
62
62
|
df = pd.DataFrame(data_rows, columns=column_names)
|
|
63
63
|
|
|
64
64
|
return df
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
@log
|
|
68
|
+
def get_dax_query_dependencies(
|
|
69
|
+
dataset: str,
|
|
70
|
+
dax_string: str,
|
|
71
|
+
put_in_memory: bool = False,
|
|
72
|
+
workspace: Optional[str] = None,
|
|
73
|
+
) -> pd.DataFrame:
|
|
74
|
+
"""
|
|
75
|
+
Obtains the columns on which a DAX query depends, including model dependencies. Shows Vertipaq statistics (i.e. Total Size, Data Size, Dictionary Size, Hierarchy Size) for easy prioritizing.
|
|
76
|
+
|
|
77
|
+
Parameters
|
|
78
|
+
----------
|
|
79
|
+
dataset : str
|
|
80
|
+
Name of the semantic model.
|
|
81
|
+
dax_string : str
|
|
82
|
+
The DAX query.
|
|
83
|
+
put_in_memory : bool, default=False
|
|
84
|
+
If True, ensures that the dependent columns are put into memory in order to give realistic Vertipaq stats (i.e. Total Size etc.).
|
|
85
|
+
workspace : str, default=None
|
|
86
|
+
The Fabric workspace name.
|
|
87
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
88
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
89
|
+
|
|
90
|
+
Returns
|
|
91
|
+
-------
|
|
92
|
+
pandas.DataFrame
|
|
93
|
+
A pandas dataframe showing the dependent columns of a given DAX query including model dependencies.
|
|
94
|
+
"""
|
|
95
|
+
|
|
96
|
+
if workspace is None:
|
|
97
|
+
workspace = fabric.resolve_workspace_name(workspace)
|
|
98
|
+
|
|
99
|
+
# Escape quotes in dax
|
|
100
|
+
dax_string = dax_string.replace('"', '""')
|
|
101
|
+
final_query = f"""
|
|
102
|
+
EVALUATE
|
|
103
|
+
VAR source_query = "{dax_string}"
|
|
104
|
+
VAR all_dependencies = SELECTCOLUMNS(
|
|
105
|
+
INFO.CALCDEPENDENCY("QUERY", source_query),
|
|
106
|
+
"Referenced Object Type",[REFERENCED_OBJECT_TYPE],
|
|
107
|
+
"Referenced Table", [REFERENCED_TABLE],
|
|
108
|
+
"Referenced Object", [REFERENCED_OBJECT]
|
|
109
|
+
)
|
|
110
|
+
RETURN all_dependencies
|
|
111
|
+
"""
|
|
112
|
+
dep = fabric.evaluate_dax(
|
|
113
|
+
dataset=dataset, workspace=workspace, dax_string=final_query
|
|
114
|
+
)
|
|
115
|
+
|
|
116
|
+
# Clean up column names and values (remove outside square brackets, underscorees in object type)
|
|
117
|
+
dep.columns = dep.columns.map(lambda x: x[1:-1])
|
|
118
|
+
dep["Referenced Object Type"] = (
|
|
119
|
+
dep["Referenced Object Type"].str.replace("_", " ").str.title()
|
|
120
|
+
)
|
|
121
|
+
dep
|
|
122
|
+
|
|
123
|
+
# Dataframe df will contain the output of all dependencies of the objects used in the query
|
|
124
|
+
df = dep.copy()
|
|
125
|
+
|
|
126
|
+
cd = get_model_calc_dependencies(dataset=dataset, workspace=workspace)
|
|
127
|
+
|
|
128
|
+
for _, r in dep.iterrows():
|
|
129
|
+
ot = r["Referenced Object Type"]
|
|
130
|
+
object_name = r["Referenced Object"]
|
|
131
|
+
table_name = r["Referenced Table"]
|
|
132
|
+
cd_filt = cd[
|
|
133
|
+
(cd["Object Type"] == ot)
|
|
134
|
+
& (cd["Object Name"] == object_name)
|
|
135
|
+
& (cd["Table Name"] == table_name)
|
|
136
|
+
]
|
|
137
|
+
|
|
138
|
+
# Adds in the dependencies of each object used in the query (i.e. relationship etc.)
|
|
139
|
+
if len(cd_filt) > 0:
|
|
140
|
+
subset = cd_filt[
|
|
141
|
+
["Referenced Object Type", "Referenced Table", "Referenced Object"]
|
|
142
|
+
]
|
|
143
|
+
df = pd.concat([df, subset], ignore_index=True)
|
|
144
|
+
|
|
145
|
+
df.columns = df.columns.map(lambda x: x.replace("Referenced ", ""))
|
|
146
|
+
# Remove duplicates
|
|
147
|
+
df = df.drop_duplicates().reset_index(drop=True)
|
|
148
|
+
# Only show columns and remove the rownumber column
|
|
149
|
+
df = df[
|
|
150
|
+
(df["Object Type"].isin(["Column", "Calc Column"]))
|
|
151
|
+
& (~df["Object"].str.startswith("RowNumber-"))
|
|
152
|
+
]
|
|
153
|
+
|
|
154
|
+
# Get vertipaq stats, filter to just the objects in the df dataframe
|
|
155
|
+
df["Full Object"] = format_dax_object_name(df["Table"], df["Object"])
|
|
156
|
+
dfC = fabric.list_columns(dataset=dataset, workspace=workspace, extended=True)
|
|
157
|
+
dfC["Full Object"] = format_dax_object_name(dfC["Table Name"], dfC["Column Name"])
|
|
158
|
+
|
|
159
|
+
dfC_filtered = dfC[dfC["Full Object"].isin(df["Full Object"].values)][
|
|
160
|
+
[
|
|
161
|
+
"Table Name",
|
|
162
|
+
"Column Name",
|
|
163
|
+
"Total Size",
|
|
164
|
+
"Data Size",
|
|
165
|
+
"Dictionary Size",
|
|
166
|
+
"Hierarchy Size",
|
|
167
|
+
"Is Resident",
|
|
168
|
+
"Full Object",
|
|
169
|
+
]
|
|
170
|
+
].reset_index(drop=True)
|
|
171
|
+
|
|
172
|
+
if put_in_memory:
|
|
173
|
+
not_in_memory = dfC_filtered[dfC_filtered["Is Resident"] == False]
|
|
174
|
+
|
|
175
|
+
if len(not_in_memory) > 0:
|
|
176
|
+
tbls = not_in_memory["Table Name"].unique()
|
|
177
|
+
|
|
178
|
+
# Run basic query to get columns into memory; completed one table at a time (so as not to overload the capacity)
|
|
179
|
+
for table_name in (bar := tqdm(tbls)):
|
|
180
|
+
bar.set_description(f"Warming the '{table_name}' table...")
|
|
181
|
+
css = ", ".join(
|
|
182
|
+
not_in_memory[not_in_memory["Table Name"] == table_name][
|
|
183
|
+
"Full Object"
|
|
184
|
+
]
|
|
185
|
+
.astype(str)
|
|
186
|
+
.tolist()
|
|
187
|
+
)
|
|
188
|
+
dax = f"""EVALUATE TOPN(1,SUMMARIZECOLUMNS({css}))"""
|
|
189
|
+
fabric.evaluate_dax(
|
|
190
|
+
dataset=dataset, dax_string=dax, workspace=workspace
|
|
191
|
+
)
|
|
192
|
+
|
|
193
|
+
# Get column stats again
|
|
194
|
+
dfC = fabric.list_columns(
|
|
195
|
+
dataset=dataset, workspace=workspace, extended=True
|
|
196
|
+
)
|
|
197
|
+
dfC["Full Object"] = format_dax_object_name(
|
|
198
|
+
dfC["Table Name"], dfC["Column Name"]
|
|
199
|
+
)
|
|
200
|
+
|
|
201
|
+
dfC_filtered = dfC[dfC["Full Object"].isin(df["Full Object"].values)][
|
|
202
|
+
[
|
|
203
|
+
"Table Name",
|
|
204
|
+
"Column Name",
|
|
205
|
+
"Total Size",
|
|
206
|
+
"Data Size",
|
|
207
|
+
"Dictionary Size",
|
|
208
|
+
"Hierarchy Size",
|
|
209
|
+
"Is Resident",
|
|
210
|
+
"Full Object",
|
|
211
|
+
]
|
|
212
|
+
].reset_index(drop=True)
|
|
213
|
+
|
|
214
|
+
dfC_filtered.drop(["Full Object"], axis=1, inplace=True)
|
|
215
|
+
|
|
216
|
+
return dfC_filtered
|
|
217
|
+
|
|
218
|
+
|
|
219
|
+
@log
|
|
220
|
+
def get_dax_query_memory_size(
|
|
221
|
+
dataset: str, dax_string: str, workspace: Optional[str] = None
|
|
222
|
+
) -> int:
|
|
223
|
+
"""
|
|
224
|
+
Obtains the total size, in bytes, used by all columns that a DAX query depends on.
|
|
225
|
+
|
|
226
|
+
Parameters
|
|
227
|
+
----------
|
|
228
|
+
dataset : str
|
|
229
|
+
Name of the semantic model.
|
|
230
|
+
dax_string : str
|
|
231
|
+
The DAX query.
|
|
232
|
+
workspace : str, default=None
|
|
233
|
+
The Fabric workspace name.
|
|
234
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
235
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
236
|
+
|
|
237
|
+
Returns
|
|
238
|
+
-------
|
|
239
|
+
int
|
|
240
|
+
The total size, in bytes, used by all columns that the DAX query depends on.
|
|
241
|
+
"""
|
|
242
|
+
|
|
243
|
+
if workspace is None:
|
|
244
|
+
workspace = fabric.resolve_workspace_name(workspace)
|
|
245
|
+
|
|
246
|
+
df = get_dax_query_dependencies(
|
|
247
|
+
dataset=dataset, workspace=workspace, dax_string=dax_string, put_in_memory=True
|
|
248
|
+
)
|
|
249
|
+
|
|
250
|
+
return df["Total Size"].sum()
|
|
@@ -11,6 +11,8 @@ def list_deployment_pipelines() -> pd.DataFrame:
|
|
|
11
11
|
"""
|
|
12
12
|
Shows a list of deployment pipelines the user can access.
|
|
13
13
|
|
|
14
|
+
This is a wrapper function for the following API: `Deployment Pipelines - List Deployment Pipelines <https://learn.microsoft.com/rest/api/fabric/core/deployment-pipelines/list-deployment-pipelines>`_.
|
|
15
|
+
|
|
14
16
|
Returns
|
|
15
17
|
-------
|
|
16
18
|
pandas.DataFrame
|
|
@@ -45,6 +47,8 @@ def list_deployment_pipeline_stages(deployment_pipeline: str) -> pd.DataFrame:
|
|
|
45
47
|
"""
|
|
46
48
|
Shows the specified deployment pipeline stages.
|
|
47
49
|
|
|
50
|
+
This is a wrapper function for the following API: `Deployment Pipelines - List Deployment Pipeline Stages <https://learn.microsoft.com/rest/api/fabric/core/deployment-pipelines/list-deployment-pipeline-stages>`_.
|
|
51
|
+
|
|
48
52
|
Parameters
|
|
49
53
|
----------
|
|
50
54
|
deployment_pipeline : str
|
|
@@ -84,13 +88,13 @@ def list_deployment_pipeline_stages(deployment_pipeline: str) -> pd.DataFrame:
|
|
|
84
88
|
for r in responses:
|
|
85
89
|
for v in r.get("value", []):
|
|
86
90
|
new_data = {
|
|
87
|
-
"Deployment Pipeline Stage Id": v
|
|
88
|
-
"Deployment Pipeline Stage Name": v
|
|
89
|
-
"Description": v
|
|
90
|
-
"Order": v
|
|
91
|
-
"Workspace Id": v
|
|
92
|
-
"Workspace Name": v
|
|
93
|
-
"Public": v
|
|
91
|
+
"Deployment Pipeline Stage Id": v.get("id"),
|
|
92
|
+
"Deployment Pipeline Stage Name": v.get("displayName"),
|
|
93
|
+
"Description": v.get("description"),
|
|
94
|
+
"Order": v.get("order"),
|
|
95
|
+
"Workspace Id": v.get("workspaceId"),
|
|
96
|
+
"Workspace Name": v.get("workspaceName"),
|
|
97
|
+
"Public": v.get("isPublic"),
|
|
94
98
|
}
|
|
95
99
|
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
96
100
|
|
|
@@ -106,6 +110,8 @@ def list_deployment_pipeline_stage_items(
|
|
|
106
110
|
"""
|
|
107
111
|
Shows the supported items from the workspace assigned to the specified stage of the specified deployment pipeline.
|
|
108
112
|
|
|
113
|
+
This is a wrapper function for the following API: `Deployment Pipelines - List Deployment Pipeline Stage Items <https://learn.microsoft.com/rest/api/fabric/core/deployment-pipelines/list-deployment-pipeline-stage-items>`_.
|
|
114
|
+
|
|
109
115
|
Parameters
|
|
110
116
|
----------
|
|
111
117
|
deployment_pipeline : str
|
sempy_labs/_environments.py
CHANGED
|
@@ -16,6 +16,8 @@ def create_environment(
|
|
|
16
16
|
"""
|
|
17
17
|
Creates a Fabric environment.
|
|
18
18
|
|
|
19
|
+
This is a wrapper function for the following API: `Items - Create Environment <https://learn.microsoft.com/rest/api/fabric/environment/items/create-environment>`_.
|
|
20
|
+
|
|
19
21
|
Parameters
|
|
20
22
|
----------
|
|
21
23
|
environment: str
|
|
@@ -51,6 +53,8 @@ def list_environments(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
51
53
|
"""
|
|
52
54
|
Shows the environments within a workspace.
|
|
53
55
|
|
|
56
|
+
This is a wrapper function for the following API: `Items - List Environments <https://learn.microsoft.com/rest/api/fabric/environment/items/list-environments>`_.
|
|
57
|
+
|
|
54
58
|
Parameters
|
|
55
59
|
----------
|
|
56
60
|
workspace : str, default=None
|
|
@@ -91,6 +95,8 @@ def delete_environment(environment: str, workspace: Optional[str] = None):
|
|
|
91
95
|
"""
|
|
92
96
|
Deletes a Fabric environment.
|
|
93
97
|
|
|
98
|
+
This is a wrapper function for the following API: `Items - Delete Environment <https://learn.microsoft.com/rest/api/fabric/environment/items/delete-environment>`_.
|
|
99
|
+
|
|
94
100
|
Parameters
|
|
95
101
|
----------
|
|
96
102
|
environment: str
|
sempy_labs/_eventhouses.py
CHANGED
|
@@ -16,6 +16,8 @@ def create_eventhouse(
|
|
|
16
16
|
"""
|
|
17
17
|
Creates a Fabric eventhouse.
|
|
18
18
|
|
|
19
|
+
This is a wrapper function for the following API: `Items - Create Eventhouse <https://learn.microsoft.com/rest/api/fabric/environment/items/create-eventhouse>`_.
|
|
20
|
+
|
|
19
21
|
Parameters
|
|
20
22
|
----------
|
|
21
23
|
name: str
|
|
@@ -51,6 +53,8 @@ def list_eventhouses(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
51
53
|
"""
|
|
52
54
|
Shows the eventhouses within a workspace.
|
|
53
55
|
|
|
56
|
+
This is a wrapper function for the following API: `Items - List Eventhouses <https://learn.microsoft.com/rest/api/fabric/environment/items/list-eventhouses>`_.
|
|
57
|
+
|
|
54
58
|
Parameters
|
|
55
59
|
----------
|
|
56
60
|
workspace : str, default=None
|
|
@@ -91,6 +95,8 @@ def delete_eventhouse(name: str, workspace: Optional[str] = None):
|
|
|
91
95
|
"""
|
|
92
96
|
Deletes a Fabric eventhouse.
|
|
93
97
|
|
|
98
|
+
This is a wrapper function for the following API: `Items - Delete Eventhouse <https://learn.microsoft.com/rest/api/fabric/environment/items/delete-eventhouse>`_.
|
|
99
|
+
|
|
94
100
|
Parameters
|
|
95
101
|
----------
|
|
96
102
|
name: str
|