semantic-link-labs 0.12.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- semantic_link_labs-0.12.8.dist-info/METADATA +354 -0
- semantic_link_labs-0.12.8.dist-info/RECORD +243 -0
- semantic_link_labs-0.12.8.dist-info/WHEEL +5 -0
- semantic_link_labs-0.12.8.dist-info/licenses/LICENSE +21 -0
- semantic_link_labs-0.12.8.dist-info/top_level.txt +1 -0
- sempy_labs/__init__.py +606 -0
- sempy_labs/_a_lib_info.py +2 -0
- sempy_labs/_ai.py +437 -0
- sempy_labs/_authentication.py +264 -0
- sempy_labs/_bpa_translation/_model/_translations_am-ET.po +869 -0
- sempy_labs/_bpa_translation/_model/_translations_ar-AE.po +908 -0
- sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +968 -0
- sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +963 -0
- sempy_labs/_bpa_translation/_model/_translations_cs-CZ.po +943 -0
- sempy_labs/_bpa_translation/_model/_translations_da-DK.po +945 -0
- sempy_labs/_bpa_translation/_model/_translations_de-DE.po +988 -0
- sempy_labs/_bpa_translation/_model/_translations_el-GR.po +993 -0
- sempy_labs/_bpa_translation/_model/_translations_es-ES.po +971 -0
- sempy_labs/_bpa_translation/_model/_translations_fa-IR.po +933 -0
- sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +942 -0
- sempy_labs/_bpa_translation/_model/_translations_fr-FR.po +994 -0
- sempy_labs/_bpa_translation/_model/_translations_ga-IE.po +967 -0
- sempy_labs/_bpa_translation/_model/_translations_he-IL.po +902 -0
- sempy_labs/_bpa_translation/_model/_translations_hi-IN.po +944 -0
- sempy_labs/_bpa_translation/_model/_translations_hu-HU.po +963 -0
- sempy_labs/_bpa_translation/_model/_translations_id-ID.po +946 -0
- sempy_labs/_bpa_translation/_model/_translations_is-IS.po +939 -0
- sempy_labs/_bpa_translation/_model/_translations_it-IT.po +986 -0
- sempy_labs/_bpa_translation/_model/_translations_ja-JP.po +846 -0
- sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +839 -0
- sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +967 -0
- sempy_labs/_bpa_translation/_model/_translations_nl-NL.po +978 -0
- sempy_labs/_bpa_translation/_model/_translations_pl-PL.po +962 -0
- sempy_labs/_bpa_translation/_model/_translations_pt-BR.po +962 -0
- sempy_labs/_bpa_translation/_model/_translations_pt-PT.po +957 -0
- sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +968 -0
- sempy_labs/_bpa_translation/_model/_translations_ru-RU.po +964 -0
- sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +952 -0
- sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +950 -0
- sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +942 -0
- sempy_labs/_bpa_translation/_model/_translations_ta-IN.po +976 -0
- sempy_labs/_bpa_translation/_model/_translations_te-IN.po +947 -0
- sempy_labs/_bpa_translation/_model/_translations_th-TH.po +924 -0
- sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +953 -0
- sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +961 -0
- sempy_labs/_bpa_translation/_model/_translations_zh-CN.po +804 -0
- sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po +969 -0
- sempy_labs/_capacities.py +1198 -0
- sempy_labs/_capacity_migration.py +660 -0
- sempy_labs/_clear_cache.py +351 -0
- sempy_labs/_connections.py +610 -0
- sempy_labs/_dashboards.py +69 -0
- sempy_labs/_data_access_security.py +98 -0
- sempy_labs/_data_pipelines.py +162 -0
- sempy_labs/_dataflows.py +668 -0
- sempy_labs/_dax.py +501 -0
- sempy_labs/_daxformatter.py +80 -0
- sempy_labs/_delta_analyzer.py +467 -0
- sempy_labs/_delta_analyzer_history.py +301 -0
- sempy_labs/_dictionary_diffs.py +221 -0
- sempy_labs/_documentation.py +147 -0
- sempy_labs/_domains.py +51 -0
- sempy_labs/_eventhouses.py +182 -0
- sempy_labs/_external_data_shares.py +230 -0
- sempy_labs/_gateways.py +521 -0
- sempy_labs/_generate_semantic_model.py +521 -0
- sempy_labs/_get_connection_string.py +84 -0
- sempy_labs/_git.py +543 -0
- sempy_labs/_graphQL.py +90 -0
- sempy_labs/_helper_functions.py +2833 -0
- sempy_labs/_icons.py +149 -0
- sempy_labs/_job_scheduler.py +609 -0
- sempy_labs/_kql_databases.py +149 -0
- sempy_labs/_kql_querysets.py +124 -0
- sempy_labs/_kusto.py +137 -0
- sempy_labs/_labels.py +124 -0
- sempy_labs/_list_functions.py +1720 -0
- sempy_labs/_managed_private_endpoints.py +253 -0
- sempy_labs/_mirrored_databases.py +416 -0
- sempy_labs/_mirrored_warehouses.py +60 -0
- sempy_labs/_ml_experiments.py +113 -0
- sempy_labs/_model_auto_build.py +140 -0
- sempy_labs/_model_bpa.py +557 -0
- sempy_labs/_model_bpa_bulk.py +378 -0
- sempy_labs/_model_bpa_rules.py +859 -0
- sempy_labs/_model_dependencies.py +343 -0
- sempy_labs/_mounted_data_factories.py +123 -0
- sempy_labs/_notebooks.py +441 -0
- sempy_labs/_one_lake_integration.py +151 -0
- sempy_labs/_onelake.py +131 -0
- sempy_labs/_query_scale_out.py +433 -0
- sempy_labs/_refresh_semantic_model.py +435 -0
- sempy_labs/_semantic_models.py +468 -0
- sempy_labs/_spark.py +455 -0
- sempy_labs/_sql.py +241 -0
- sempy_labs/_sql_audit_settings.py +207 -0
- sempy_labs/_sql_endpoints.py +214 -0
- sempy_labs/_tags.py +201 -0
- sempy_labs/_translations.py +43 -0
- sempy_labs/_user_delegation_key.py +44 -0
- sempy_labs/_utils.py +79 -0
- sempy_labs/_vertipaq.py +1021 -0
- sempy_labs/_vpax.py +388 -0
- sempy_labs/_warehouses.py +234 -0
- sempy_labs/_workloads.py +140 -0
- sempy_labs/_workspace_identity.py +72 -0
- sempy_labs/_workspaces.py +595 -0
- sempy_labs/admin/__init__.py +170 -0
- sempy_labs/admin/_activities.py +167 -0
- sempy_labs/admin/_apps.py +145 -0
- sempy_labs/admin/_artifacts.py +65 -0
- sempy_labs/admin/_basic_functions.py +463 -0
- sempy_labs/admin/_capacities.py +508 -0
- sempy_labs/admin/_dataflows.py +45 -0
- sempy_labs/admin/_datasets.py +186 -0
- sempy_labs/admin/_domains.py +522 -0
- sempy_labs/admin/_external_data_share.py +100 -0
- sempy_labs/admin/_git.py +72 -0
- sempy_labs/admin/_items.py +265 -0
- sempy_labs/admin/_labels.py +211 -0
- sempy_labs/admin/_reports.py +241 -0
- sempy_labs/admin/_scanner.py +118 -0
- sempy_labs/admin/_shared.py +82 -0
- sempy_labs/admin/_sharing_links.py +110 -0
- sempy_labs/admin/_tags.py +131 -0
- sempy_labs/admin/_tenant.py +503 -0
- sempy_labs/admin/_tenant_keys.py +89 -0
- sempy_labs/admin/_users.py +140 -0
- sempy_labs/admin/_workspaces.py +236 -0
- sempy_labs/deployment_pipeline/__init__.py +23 -0
- sempy_labs/deployment_pipeline/_items.py +580 -0
- sempy_labs/directlake/__init__.py +57 -0
- sempy_labs/directlake/_autosync.py +58 -0
- sempy_labs/directlake/_directlake_schema_compare.py +120 -0
- sempy_labs/directlake/_directlake_schema_sync.py +161 -0
- sempy_labs/directlake/_dl_helper.py +274 -0
- sempy_labs/directlake/_generate_shared_expression.py +94 -0
- sempy_labs/directlake/_get_directlake_lakehouse.py +62 -0
- sempy_labs/directlake/_get_shared_expression.py +34 -0
- sempy_labs/directlake/_guardrails.py +96 -0
- sempy_labs/directlake/_list_directlake_model_calc_tables.py +70 -0
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +90 -0
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +239 -0
- sempy_labs/directlake/_update_directlake_partition_entity.py +259 -0
- sempy_labs/directlake/_warm_cache.py +236 -0
- sempy_labs/dotnet_lib/dotnet.runtime.config.json +10 -0
- sempy_labs/environment/__init__.py +23 -0
- sempy_labs/environment/_items.py +212 -0
- sempy_labs/environment/_pubstage.py +223 -0
- sempy_labs/eventstream/__init__.py +37 -0
- sempy_labs/eventstream/_items.py +263 -0
- sempy_labs/eventstream/_topology.py +652 -0
- sempy_labs/graph/__init__.py +59 -0
- sempy_labs/graph/_groups.py +651 -0
- sempy_labs/graph/_sensitivity_labels.py +120 -0
- sempy_labs/graph/_teams.py +125 -0
- sempy_labs/graph/_user_licenses.py +96 -0
- sempy_labs/graph/_users.py +516 -0
- sempy_labs/graph_model/__init__.py +15 -0
- sempy_labs/graph_model/_background_jobs.py +63 -0
- sempy_labs/graph_model/_items.py +149 -0
- sempy_labs/lakehouse/__init__.py +67 -0
- sempy_labs/lakehouse/_blobs.py +247 -0
- sempy_labs/lakehouse/_get_lakehouse_columns.py +102 -0
- sempy_labs/lakehouse/_get_lakehouse_tables.py +274 -0
- sempy_labs/lakehouse/_helper.py +250 -0
- sempy_labs/lakehouse/_lakehouse.py +351 -0
- sempy_labs/lakehouse/_livy_sessions.py +143 -0
- sempy_labs/lakehouse/_materialized_lake_views.py +157 -0
- sempy_labs/lakehouse/_partitioning.py +165 -0
- sempy_labs/lakehouse/_schemas.py +217 -0
- sempy_labs/lakehouse/_shortcuts.py +440 -0
- sempy_labs/migration/__init__.py +35 -0
- sempy_labs/migration/_create_pqt_file.py +238 -0
- sempy_labs/migration/_direct_lake_to_import.py +105 -0
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +398 -0
- sempy_labs/migration/_migrate_calctables_to_semantic_model.py +148 -0
- sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +533 -0
- sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +172 -0
- sempy_labs/migration/_migration_validation.py +71 -0
- sempy_labs/migration/_refresh_calc_tables.py +131 -0
- sempy_labs/mirrored_azure_databricks_catalog/__init__.py +15 -0
- sempy_labs/mirrored_azure_databricks_catalog/_discover.py +213 -0
- sempy_labs/mirrored_azure_databricks_catalog/_refresh_catalog_metadata.py +45 -0
- sempy_labs/ml_model/__init__.py +23 -0
- sempy_labs/ml_model/_functions.py +427 -0
- sempy_labs/report/_BPAReportTemplate.json +232 -0
- sempy_labs/report/__init__.py +55 -0
- sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
- sempy_labs/report/_bpareporttemplate/.platform +11 -0
- sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +710 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +11 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +191 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +438 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +576 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +207 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +506 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +513 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +8 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +112 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +11 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +513 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +352 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +37 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +542 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +221 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +576 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +9 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +38 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +10 -0
- sempy_labs/report/_bpareporttemplate/definition/report.json +176 -0
- sempy_labs/report/_bpareporttemplate/definition/version.json +4 -0
- sempy_labs/report/_bpareporttemplate/definition.pbir +14 -0
- sempy_labs/report/_download_report.py +76 -0
- sempy_labs/report/_export_report.py +257 -0
- sempy_labs/report/_generate_report.py +427 -0
- sempy_labs/report/_paginated.py +76 -0
- sempy_labs/report/_report_bpa.py +354 -0
- sempy_labs/report/_report_bpa_rules.py +115 -0
- sempy_labs/report/_report_functions.py +581 -0
- sempy_labs/report/_report_helper.py +227 -0
- sempy_labs/report/_report_list_functions.py +110 -0
- sempy_labs/report/_report_rebind.py +149 -0
- sempy_labs/report/_reportwrapper.py +3100 -0
- sempy_labs/report/_save_report.py +147 -0
- sempy_labs/snowflake_database/__init__.py +10 -0
- sempy_labs/snowflake_database/_items.py +105 -0
- sempy_labs/sql_database/__init__.py +21 -0
- sempy_labs/sql_database/_items.py +201 -0
- sempy_labs/sql_database/_mirroring.py +79 -0
- sempy_labs/theme/__init__.py +12 -0
- sempy_labs/theme/_org_themes.py +129 -0
- sempy_labs/tom/__init__.py +3 -0
- sempy_labs/tom/_model.py +5977 -0
- sempy_labs/variable_library/__init__.py +19 -0
- sempy_labs/variable_library/_functions.py +403 -0
- sempy_labs/warehouse/__init__.py +28 -0
- sempy_labs/warehouse/_items.py +234 -0
- sempy_labs/warehouse/_restore_points.py +309 -0
|
@@ -0,0 +1,660 @@
|
|
|
1
|
+
import sempy.fabric as fabric
|
|
2
|
+
from typing import Optional, List
|
|
3
|
+
from sempy._utils._log import log
|
|
4
|
+
import sempy_labs._icons as icons
|
|
5
|
+
from sempy_labs._workspaces import assign_workspace_to_capacity
|
|
6
|
+
from sempy_labs.admin import (
|
|
7
|
+
assign_workspaces_to_capacity,
|
|
8
|
+
)
|
|
9
|
+
from sempy_labs.admin._capacities import (
|
|
10
|
+
_list_capacities_meta,
|
|
11
|
+
list_capacities,
|
|
12
|
+
)
|
|
13
|
+
from sempy_labs._helper_functions import (
|
|
14
|
+
resolve_capacity_id,
|
|
15
|
+
convert_to_alphanumeric_lowercase,
|
|
16
|
+
_base_api,
|
|
17
|
+
)
|
|
18
|
+
from sempy_labs._capacities import create_fabric_capacity
|
|
19
|
+
from uuid import UUID
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def _migrate_settings(source_capacity: str, target_capacity: str):
|
|
23
|
+
|
|
24
|
+
_migrate_capacity_settings(
|
|
25
|
+
source_capacity=source_capacity,
|
|
26
|
+
target_capacity=target_capacity,
|
|
27
|
+
)
|
|
28
|
+
_migrate_access_settings(
|
|
29
|
+
source_capacity=source_capacity,
|
|
30
|
+
target_capacity=target_capacity,
|
|
31
|
+
)
|
|
32
|
+
_migrate_notification_settings(
|
|
33
|
+
source_capacity=source_capacity,
|
|
34
|
+
target_capacity=target_capacity,
|
|
35
|
+
)
|
|
36
|
+
_migrate_spark_settings(
|
|
37
|
+
source_capacity=source_capacity,
|
|
38
|
+
target_capacity=target_capacity,
|
|
39
|
+
)
|
|
40
|
+
_migrate_delegated_tenant_settings(
|
|
41
|
+
source_capacity=source_capacity,
|
|
42
|
+
target_capacity=target_capacity,
|
|
43
|
+
)
|
|
44
|
+
_migrate_disaster_recovery_settings(
|
|
45
|
+
source_capacity=source_capacity,
|
|
46
|
+
target_capacity=target_capacity,
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
@log
|
|
51
|
+
def migrate_workspaces(
|
|
52
|
+
source_capacity: str,
|
|
53
|
+
target_capacity: str,
|
|
54
|
+
workspaces: Optional[str | List[str]] = None,
|
|
55
|
+
):
|
|
56
|
+
"""
|
|
57
|
+
This function migrates the workspace(s) from one capacity to another capacity.
|
|
58
|
+
Limitation: source & target capacities must be in the same region.
|
|
59
|
+
If not all the workspaces succesfully migrated to the target capacity, the migrated workspaces will rollback to be assigned
|
|
60
|
+
to the source capacity.
|
|
61
|
+
|
|
62
|
+
Parameters
|
|
63
|
+
----------
|
|
64
|
+
source_capacity : str
|
|
65
|
+
Name of the source Fabric capacity.
|
|
66
|
+
target_capacity : str
|
|
67
|
+
Name of the target/destination Fabric capacity.
|
|
68
|
+
workspaces : str | List[str], default=None
|
|
69
|
+
The name of the workspace(s) specified will be reassigned from the source capacity to the target capacity.
|
|
70
|
+
Defaults to None which will reassign all workspaces in the source capacity to the target capacity.
|
|
71
|
+
"""
|
|
72
|
+
|
|
73
|
+
if isinstance(workspaces, str):
|
|
74
|
+
workspaces = [workspaces]
|
|
75
|
+
|
|
76
|
+
dfC = _list_capacities_meta()
|
|
77
|
+
dfC_filt = dfC[dfC["Capacity Name"] == source_capacity]
|
|
78
|
+
if len(dfC_filt) == 0:
|
|
79
|
+
raise ValueError(
|
|
80
|
+
f"{icons.red_dot} Invalid source capacity. The '{source_capacity}' capacity does not exist."
|
|
81
|
+
)
|
|
82
|
+
source_capacity_region = dfC_filt["Region"].iloc[0]
|
|
83
|
+
source_capacity_id = dfC_filt["Capacity Id"].iloc[0]
|
|
84
|
+
dfC_filt = dfC[dfC["Capacity Name"] == target_capacity]
|
|
85
|
+
if len(dfC_filt) == 0:
|
|
86
|
+
raise ValueError(
|
|
87
|
+
f"{icons.red_dot} Invalid target capacity. The '{target_capacity}' capacity does not exist."
|
|
88
|
+
)
|
|
89
|
+
target_capacity_region = dfC_filt["Region"].iloc[0]
|
|
90
|
+
target_capacity_state = dfC_filt["State"].iloc[0]
|
|
91
|
+
|
|
92
|
+
if source_capacity_region != target_capacity_region:
|
|
93
|
+
raise ValueError(
|
|
94
|
+
f"{icons.red_dot} The '{source_capacity}' and '{target_capacity}' are not in the same region."
|
|
95
|
+
)
|
|
96
|
+
if target_capacity_state != "Active":
|
|
97
|
+
raise ValueError(
|
|
98
|
+
f"{icons.red_dot} The '{target_capacity}' target capacity is inactive. The capacity must be active in order for workspaces to be migrated."
|
|
99
|
+
)
|
|
100
|
+
|
|
101
|
+
dfW = fabric.list_workspaces(filter=f"capacityId eq '{source_capacity_id.upper()}'")
|
|
102
|
+
if workspaces is None:
|
|
103
|
+
workspace_count = len(dfW)
|
|
104
|
+
else:
|
|
105
|
+
workspace_count = len(workspaces)
|
|
106
|
+
migrated_workspaces = []
|
|
107
|
+
|
|
108
|
+
for i, r in dfW.iterrows():
|
|
109
|
+
workspace_id = r["Id"]
|
|
110
|
+
workspace_name = r["Name"]
|
|
111
|
+
if workspaces is None or workspace_name in workspaces:
|
|
112
|
+
assign_workspace_to_capacity(
|
|
113
|
+
capacity=target_capacity, workspace=workspace_id
|
|
114
|
+
)
|
|
115
|
+
migrated_workspaces.append(workspace_name)
|
|
116
|
+
|
|
117
|
+
if len(migrated_workspaces) < workspace_count:
|
|
118
|
+
print(
|
|
119
|
+
f"{icons.warning} Not all workspaces in the '{source_capacity}' capacity were migrated to the '{target_capacity}' capacity."
|
|
120
|
+
)
|
|
121
|
+
print(f"{icons.in_progress} Initiating rollback...")
|
|
122
|
+
for i, r in dfW.iterrows():
|
|
123
|
+
workspace_id = r["Id"]
|
|
124
|
+
workspace_name = r["Name"]
|
|
125
|
+
if workspace_name in migrated_workspaces:
|
|
126
|
+
assign_workspace_to_capacity(
|
|
127
|
+
capacity=source_capacity, workspace=workspace_id
|
|
128
|
+
)
|
|
129
|
+
print(
|
|
130
|
+
f"{icons.green_dot} Rollback of the workspaces to the '{source_capacity}' capacity is complete."
|
|
131
|
+
)
|
|
132
|
+
else:
|
|
133
|
+
print(
|
|
134
|
+
f"{icons.green_dot} All workspaces were migrated from the '{source_capacity}' capacity to the '{target_capacity}' capacity succesfully."
|
|
135
|
+
)
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
@log
|
|
139
|
+
def migrate_capacities(
|
|
140
|
+
azure_subscription_id: str,
|
|
141
|
+
resource_group: str | dict,
|
|
142
|
+
capacities: Optional[str | List[str]] = None,
|
|
143
|
+
use_existing_rg_for_A_sku: bool = True,
|
|
144
|
+
p_sku_only: bool = True,
|
|
145
|
+
**kwargs,
|
|
146
|
+
):
|
|
147
|
+
"""
|
|
148
|
+
This function creates new Fabric capacities for given A or P sku capacities and reassigns their workspaces to the newly created capacity.
|
|
149
|
+
|
|
150
|
+
Parameters
|
|
151
|
+
----------
|
|
152
|
+
azure_subscription_id : str
|
|
153
|
+
The Azure subscription ID.
|
|
154
|
+
resource_group : str | dict
|
|
155
|
+
The name of the Azure resource group.
|
|
156
|
+
For A skus, this parameter will be ignored and the resource group used for the F sku will be the same as the A sku's resource group.
|
|
157
|
+
For P skus, if this parameter is a string, it will use that resource group for all of the newly created F skus. If this parameter is a dictionary, it will use that mapping (capacity name -> resource group) for creating capacities with the mapped resource groups.
|
|
158
|
+
capacities : str | List[str], default=None
|
|
159
|
+
The capacity(ies) to migrate from A/P -> F sku.
|
|
160
|
+
Defaults to None which migrates all accessible A/P sku capacities to F skus.
|
|
161
|
+
use_existing_rg_for_A_sku : bool, default=True
|
|
162
|
+
If True, the F sku inherits the resource group from the A sku (for A sku migrations)
|
|
163
|
+
p_sku_only : bool, default=True
|
|
164
|
+
If set to True, only migrates P skus. If set to False, migrates both P and A skus.
|
|
165
|
+
"""
|
|
166
|
+
|
|
167
|
+
if isinstance(capacities, str):
|
|
168
|
+
capacities = [capacities]
|
|
169
|
+
|
|
170
|
+
p_sku_list = list(icons.sku_mapping.keys())
|
|
171
|
+
|
|
172
|
+
dfC = list_capacities()
|
|
173
|
+
|
|
174
|
+
if capacities is None:
|
|
175
|
+
dfC_filt = dfC.copy()
|
|
176
|
+
else:
|
|
177
|
+
dfC_filt = dfC[dfC["Capacity Name"].isin(capacities)]
|
|
178
|
+
|
|
179
|
+
if p_sku_only:
|
|
180
|
+
dfC_filt = dfC_filt[dfC_filt["Sku"].str.startswith("P")]
|
|
181
|
+
else:
|
|
182
|
+
dfC_filt = dfC_filt[
|
|
183
|
+
(dfC_filt["Sku"].str.startswith(("P", "A")))
|
|
184
|
+
& (~dfC_filt["Sku"].str.startswith("PP"))
|
|
185
|
+
]
|
|
186
|
+
|
|
187
|
+
dfC_filt = (
|
|
188
|
+
dfC_filt.copy()
|
|
189
|
+
) # Something strange is happening here. Without this a key error on Display Name occurs
|
|
190
|
+
|
|
191
|
+
if len(dfC_filt) == 0:
|
|
192
|
+
print(f"{icons.info} There are no valid capacities to migrate.")
|
|
193
|
+
return
|
|
194
|
+
|
|
195
|
+
for _, r in dfC_filt.iterrows():
|
|
196
|
+
cap_name = r["Capacity Name"]
|
|
197
|
+
region = r["Region"]
|
|
198
|
+
sku_size = r["Sku"]
|
|
199
|
+
admins = r["Admins"]
|
|
200
|
+
tgt_capacity = f"{convert_to_alphanumeric_lowercase(cap_name)}{icons.migrate_capacity_suffix}"
|
|
201
|
+
|
|
202
|
+
# Check if target capacity exists
|
|
203
|
+
dfC_tgt = dfC[dfC["Capacity Name"] == tgt_capacity]
|
|
204
|
+
|
|
205
|
+
if sku_size[:1] == "A" and use_existing_rg_for_A_sku:
|
|
206
|
+
rg = None
|
|
207
|
+
else:
|
|
208
|
+
if isinstance(resource_group, str):
|
|
209
|
+
rg = resource_group
|
|
210
|
+
elif isinstance(resource_group, dict):
|
|
211
|
+
rg = resource_group.get(cap_name)
|
|
212
|
+
else:
|
|
213
|
+
raise ValueError(f"{icons.red_dot} Invalid 'resource_group' parameter.")
|
|
214
|
+
|
|
215
|
+
if sku_size in p_sku_list:
|
|
216
|
+
# Only create the capacity if it does not already exist
|
|
217
|
+
if len(dfC_tgt) > 0:
|
|
218
|
+
print(
|
|
219
|
+
f"{icons.info} Skipping creating a new capacity for '{cap_name}' as the '{tgt_capacity}' capacity already exists."
|
|
220
|
+
)
|
|
221
|
+
else:
|
|
222
|
+
create_fabric_capacity(
|
|
223
|
+
capacity_name=tgt_capacity,
|
|
224
|
+
azure_subscription_id=azure_subscription_id,
|
|
225
|
+
resource_group=rg,
|
|
226
|
+
region=region,
|
|
227
|
+
sku=icons.sku_mapping.get(sku_size),
|
|
228
|
+
admin_members=admins,
|
|
229
|
+
)
|
|
230
|
+
# Migrate workspaces to new capacity
|
|
231
|
+
assign_workspaces_to_capacity(
|
|
232
|
+
source_capacity=cap_name, target_capacity=tgt_capacity, workspace=None
|
|
233
|
+
)
|
|
234
|
+
|
|
235
|
+
# Migrate settings to new capacity
|
|
236
|
+
# _migrate_settings(source_capacity=cap_name, target_capacity=tgt_capacity)
|
|
237
|
+
|
|
238
|
+
|
|
239
|
+
@log
|
|
240
|
+
def _migrate_capacity_settings(source_capacity: str, target_capacity: str):
|
|
241
|
+
"""
|
|
242
|
+
This function migrates a capacity's settings to another capacity.
|
|
243
|
+
|
|
244
|
+
Parameters
|
|
245
|
+
----------
|
|
246
|
+
source_capacity : str
|
|
247
|
+
Name of the source capacity.
|
|
248
|
+
target_capacity : str
|
|
249
|
+
Name of the target capacity.
|
|
250
|
+
"""
|
|
251
|
+
|
|
252
|
+
dfC = list_capacities()
|
|
253
|
+
dfC_filt = dfC[dfC["Capacity Name"] == source_capacity]
|
|
254
|
+
if len(dfC_filt) == 0:
|
|
255
|
+
raise ValueError(
|
|
256
|
+
f"{icons.red_dot} The '{source_capacity}' capacity does not exist."
|
|
257
|
+
)
|
|
258
|
+
source_capacity_id = dfC_filt["Capacity Id"].iloc[0].upper()
|
|
259
|
+
dfC_filt = dfC[dfC["Capacity Name"] == target_capacity]
|
|
260
|
+
if len(dfC_filt) == 0:
|
|
261
|
+
raise ValueError(
|
|
262
|
+
f"{icons.red_dot} The '{target_capacity}' capacity does not exist."
|
|
263
|
+
)
|
|
264
|
+
target_capacity_id = dfC_filt["Capacity Id"].iloc[0].upper()
|
|
265
|
+
|
|
266
|
+
workloads_params = "capacityCustomParameters?workloadIds=ADM&workloadIds=CDSA&workloadIds=DMS&workloadIds=RsRdlEngine&workloadIds=ScreenshotEngine&workloadIds=AS&workloadIds=QES&workloadIds=DMR&workloadIds=ESGLake&workloadIds=NLS&workloadIds=lake&workloadIds=TIPS&workloadIds=Kusto&workloadIds=Lakehouse&workloadIds=SparkCore&workloadIds=DI&workloadIds=Notebook&workloadIds=ML&workloadIds=ES&workloadIds=Reflex&workloadIds=Must&workloadIds=dmh&workloadIds=PowerBI&workloadIds=HLS"
|
|
267
|
+
|
|
268
|
+
response_get_source = _base_api(
|
|
269
|
+
request=f"capacities/{source_capacity_id}/{workloads_params}"
|
|
270
|
+
)
|
|
271
|
+
response_source_json = response_get_source.json().get(
|
|
272
|
+
"capacityCustomParameters", {}
|
|
273
|
+
)
|
|
274
|
+
|
|
275
|
+
# Create payload for put request
|
|
276
|
+
def remove_empty_workloads(data):
|
|
277
|
+
keys_to_remove = [
|
|
278
|
+
key for key, value in data.items() if not value["workloadCustomParameters"]
|
|
279
|
+
]
|
|
280
|
+
for key in keys_to_remove:
|
|
281
|
+
del data[key]
|
|
282
|
+
|
|
283
|
+
remove_empty_workloads(response_source_json)
|
|
284
|
+
|
|
285
|
+
settings_json = {}
|
|
286
|
+
settings_json["capacityCustomParameters"] = {}
|
|
287
|
+
|
|
288
|
+
for workload in response_source_json:
|
|
289
|
+
if workload not in ["AI"]:
|
|
290
|
+
settings_json["capacityCustomParameters"][workload] = {}
|
|
291
|
+
settings_json["capacityCustomParameters"][workload][
|
|
292
|
+
"workloadCustomParameters"
|
|
293
|
+
] = {}
|
|
294
|
+
|
|
295
|
+
for workload_part in response_source_json[workload].values():
|
|
296
|
+
for workload_item in workload_part:
|
|
297
|
+
setting_name = workload_item["name"]
|
|
298
|
+
setting_value = workload_item["value"]
|
|
299
|
+
if setting_value is None:
|
|
300
|
+
settings_json["capacityCustomParameters"][workload][
|
|
301
|
+
"workloadCustomParameters"
|
|
302
|
+
][setting_name] = setting_value
|
|
303
|
+
elif isinstance(setting_value, bool):
|
|
304
|
+
settings_json["capacityCustomParameters"][workload][
|
|
305
|
+
"workloadCustomParameters"
|
|
306
|
+
][setting_name] = bool(setting_value)
|
|
307
|
+
elif isinstance(setting_value, str):
|
|
308
|
+
settings_json["capacityCustomParameters"][workload][
|
|
309
|
+
"workloadCustomParameters"
|
|
310
|
+
][setting_name] = str(setting_value)
|
|
311
|
+
else:
|
|
312
|
+
settings_json["capacityCustomParameters"][workload][
|
|
313
|
+
"workloadCustomParameters"
|
|
314
|
+
][setting_name] = setting_value
|
|
315
|
+
|
|
316
|
+
_base_api(
|
|
317
|
+
request=f"capacities/{target_capacity_id}/{workloads_params}",
|
|
318
|
+
method="put",
|
|
319
|
+
payload=settings_json,
|
|
320
|
+
status_codes=204,
|
|
321
|
+
)
|
|
322
|
+
|
|
323
|
+
print(
|
|
324
|
+
f"{icons.green_dot} The capacity settings have been migrated from the '{source_capacity}' capacity to the '{target_capacity}' capacity."
|
|
325
|
+
)
|
|
326
|
+
|
|
327
|
+
|
|
328
|
+
@log
|
|
329
|
+
def _migrate_disaster_recovery_settings(source_capacity: str, target_capacity: str):
|
|
330
|
+
"""
|
|
331
|
+
This function migrates a capacity's disaster recovery settings to another capacity.
|
|
332
|
+
|
|
333
|
+
Parameters
|
|
334
|
+
----------
|
|
335
|
+
source_capacity : str
|
|
336
|
+
Name of the source capacity.
|
|
337
|
+
target_capacity : str
|
|
338
|
+
Name of the target capacity.
|
|
339
|
+
"""
|
|
340
|
+
|
|
341
|
+
dfC = list_capacities()
|
|
342
|
+
dfC_filt = dfC[dfC["Capacity Name"] == source_capacity]
|
|
343
|
+
if len(dfC_filt) == 0:
|
|
344
|
+
raise ValueError(
|
|
345
|
+
f"{icons.red_dot} The '{source_capacity}' capacity does not exist."
|
|
346
|
+
)
|
|
347
|
+
source_capacity_id = dfC_filt["Capacity Id"].iloc[0].upper()
|
|
348
|
+
dfC_filt = dfC[dfC["Capacity Name"] == target_capacity]
|
|
349
|
+
if len(dfC_filt) == 0:
|
|
350
|
+
raise ValueError(
|
|
351
|
+
f"{icons.red_dot} The '{target_capacity}' capacity does not exist."
|
|
352
|
+
)
|
|
353
|
+
target_capacity_id = dfC_filt["Capacity Id"].iloc[0].upper()
|
|
354
|
+
|
|
355
|
+
response_get_source = _base_api(request=f"capacities/{source_capacity_id}/config")
|
|
356
|
+
|
|
357
|
+
payload = {}
|
|
358
|
+
value = response_get_source.json()["bcdr"]["config"]
|
|
359
|
+
payload["config"] = value
|
|
360
|
+
|
|
361
|
+
_base_api(
|
|
362
|
+
request=f"capacities/{target_capacity_id}/fabricbcdr",
|
|
363
|
+
payload=payload,
|
|
364
|
+
status_codes=202,
|
|
365
|
+
method="put",
|
|
366
|
+
)
|
|
367
|
+
print(
|
|
368
|
+
f"{icons.green_dot} The disaster recovery settings have been migrated from the '{source_capacity}' capacity to the '{target_capacity}' capacity."
|
|
369
|
+
)
|
|
370
|
+
|
|
371
|
+
|
|
372
|
+
@log
|
|
373
|
+
def _migrate_access_settings(source_capacity: str, target_capacity: str):
|
|
374
|
+
"""
|
|
375
|
+
This function migrates the access settings from a source capacity to a target capacity.
|
|
376
|
+
|
|
377
|
+
Parameters
|
|
378
|
+
----------
|
|
379
|
+
source_capacity : str
|
|
380
|
+
Name of the source capacity.
|
|
381
|
+
target_capacity : str
|
|
382
|
+
Name of the target capacity.
|
|
383
|
+
"""
|
|
384
|
+
|
|
385
|
+
dfC = list_capacities()
|
|
386
|
+
dfC_filt = dfC[dfC["Capacity Name"] == source_capacity]
|
|
387
|
+
if dfC_filt.empty:
|
|
388
|
+
raise ValueError(
|
|
389
|
+
f"{icons.red_dot} The '{source_capacity}' capacity does not exist."
|
|
390
|
+
)
|
|
391
|
+
source_capacity_id = dfC_filt["Capacity Id"].iloc[0].upper()
|
|
392
|
+
dfC_filt = dfC[dfC["Capacity Name"] == target_capacity]
|
|
393
|
+
if dfC_filt.empty:
|
|
394
|
+
raise ValueError(
|
|
395
|
+
f"{icons.red_dot} The '{target_capacity}' capacity does not exist."
|
|
396
|
+
)
|
|
397
|
+
target_capacity_id = dfC_filt["Capacity Id"].iloc[0].upper()
|
|
398
|
+
|
|
399
|
+
response_get_source = _base_api(request=f"capacities/{source_capacity_id}")
|
|
400
|
+
|
|
401
|
+
payload = response_get_source.json().get("access", {})
|
|
402
|
+
|
|
403
|
+
_base_api(
|
|
404
|
+
request=f"capacities/{target_capacity_id}/access",
|
|
405
|
+
method="put",
|
|
406
|
+
payload=payload,
|
|
407
|
+
status_codes=204,
|
|
408
|
+
)
|
|
409
|
+
|
|
410
|
+
print(
|
|
411
|
+
f"{icons.green_dot} The access settings have been migrated from the '{source_capacity}' capacity to the '{target_capacity}' capacity."
|
|
412
|
+
)
|
|
413
|
+
|
|
414
|
+
|
|
415
|
+
@log
|
|
416
|
+
def _migrate_notification_settings(source_capacity: str, target_capacity: str):
|
|
417
|
+
"""
|
|
418
|
+
This function migrates the notification settings from a source capacity to a target capacity.
|
|
419
|
+
|
|
420
|
+
Parameters
|
|
421
|
+
----------
|
|
422
|
+
source_capacity : str
|
|
423
|
+
Name of the source capacity.
|
|
424
|
+
target_capacity : str
|
|
425
|
+
Name of the target capacity.
|
|
426
|
+
"""
|
|
427
|
+
|
|
428
|
+
dfC = list_capacities()
|
|
429
|
+
dfC_filt = dfC[dfC["Capacity Name"] == source_capacity]
|
|
430
|
+
if dfC_filt.empty:
|
|
431
|
+
raise ValueError(
|
|
432
|
+
f"{icons.red_dot} The '{source_capacity}' capacity does not exist."
|
|
433
|
+
)
|
|
434
|
+
source_capacity_id = dfC_filt["Capacity Id"].iloc[0].upper()
|
|
435
|
+
dfC_filt = dfC[dfC["Capacity Name"] == target_capacity]
|
|
436
|
+
if dfC_filt.empty:
|
|
437
|
+
raise ValueError(
|
|
438
|
+
f"{icons.red_dot} The '{target_capacity}' capacity does not exist."
|
|
439
|
+
)
|
|
440
|
+
target_capacity_id = dfC_filt["Capacity Id"].iloc[0].upper()
|
|
441
|
+
|
|
442
|
+
response_get_source = _base_api(request=f"capacities/{source_capacity_id}")
|
|
443
|
+
|
|
444
|
+
payload = response_get_source.json().get("capacityNotificationSettings", {})
|
|
445
|
+
|
|
446
|
+
_base_api(
|
|
447
|
+
request=f"capacities/{target_capacity_id}/notificationSettings",
|
|
448
|
+
method="put",
|
|
449
|
+
payload=payload,
|
|
450
|
+
status_codes=204,
|
|
451
|
+
)
|
|
452
|
+
|
|
453
|
+
print(
|
|
454
|
+
f"{icons.green_dot} The notification settings have been migrated from the '{source_capacity}' capacity to the '{target_capacity}' capacity."
|
|
455
|
+
)
|
|
456
|
+
|
|
457
|
+
|
|
458
|
+
@log
|
|
459
|
+
def _migrate_delegated_tenant_settings(source_capacity: str, target_capacity: str):
|
|
460
|
+
"""
|
|
461
|
+
This function migrates the delegated tenant settings from a source capacity to a target capacity.
|
|
462
|
+
|
|
463
|
+
Parameters
|
|
464
|
+
----------
|
|
465
|
+
source_capacity : str
|
|
466
|
+
Name of the source capacity.
|
|
467
|
+
target_capacity : str
|
|
468
|
+
Name of the target capacity.
|
|
469
|
+
"""
|
|
470
|
+
|
|
471
|
+
dfC = list_capacities()
|
|
472
|
+
|
|
473
|
+
dfC_filt = dfC[dfC["Capacity Name"] == source_capacity]
|
|
474
|
+
if dfC_filt.empty:
|
|
475
|
+
raise ValueError(
|
|
476
|
+
f"{icons.red_dot} The '{source_capacity}' capacity does not exist."
|
|
477
|
+
)
|
|
478
|
+
source_capacity_id = dfC_filt["Capacity Id"].iloc[0].upper()
|
|
479
|
+
|
|
480
|
+
dfC_filt = dfC[dfC["Capacity Name"] == target_capacity]
|
|
481
|
+
if dfC_filt.empty:
|
|
482
|
+
raise ValueError(
|
|
483
|
+
f"{icons.red_dot} The '{target_capacity}' capacity does not exist."
|
|
484
|
+
)
|
|
485
|
+
target_capacity_id = dfC_filt["Capacity Id"].iloc[0].upper()
|
|
486
|
+
|
|
487
|
+
response_get = _base_api("v1/admin/capacities/delegatedTenantSettingOverrides")
|
|
488
|
+
|
|
489
|
+
response_json = response_get.json().get("Overrides", [])
|
|
490
|
+
|
|
491
|
+
for o in response_json:
|
|
492
|
+
if o.get("id").upper() == source_capacity_id:
|
|
493
|
+
for setting in o.get("tenantSettings", []):
|
|
494
|
+
setting_name = setting.get("settingName")
|
|
495
|
+
feature_switch = {
|
|
496
|
+
"switchId": -1,
|
|
497
|
+
"switchName": setting_name,
|
|
498
|
+
"isEnabled": setting.get("enabled", False),
|
|
499
|
+
"isGranular": setting.get("canSpecifySecurityGroups", False),
|
|
500
|
+
"allowedSecurityGroups": [
|
|
501
|
+
{
|
|
502
|
+
"id": group.get("graphId"),
|
|
503
|
+
"name": group.get("name"),
|
|
504
|
+
"isEmailEnabled": False,
|
|
505
|
+
}
|
|
506
|
+
for group in setting.get("enabledSecurityGroups", [])
|
|
507
|
+
],
|
|
508
|
+
"deniedSecurityGroups": [
|
|
509
|
+
{
|
|
510
|
+
"id": group.get("graphId"),
|
|
511
|
+
"name": group.get("name"),
|
|
512
|
+
"isEmailEnabled": False,
|
|
513
|
+
}
|
|
514
|
+
for group in setting.get("excludedSecurityGroups", [])
|
|
515
|
+
],
|
|
516
|
+
}
|
|
517
|
+
|
|
518
|
+
payload = {"featureSwitches": [feature_switch], "properties": []}
|
|
519
|
+
|
|
520
|
+
_base_api(
|
|
521
|
+
request=f"metadata/tenantsettings/selfserve?capacityObjectId={target_capacity_id}",
|
|
522
|
+
method="put",
|
|
523
|
+
payload=payload,
|
|
524
|
+
)
|
|
525
|
+
|
|
526
|
+
print(
|
|
527
|
+
f"{icons.green_dot} The delegated tenant settings for the '{setting_name}' feature switch of the '{source_capacity}' capacity have been migrated to the '{target_capacity}' capacity."
|
|
528
|
+
)
|
|
529
|
+
|
|
530
|
+
|
|
531
|
+
@log
|
|
532
|
+
def _migrate_spark_settings(source_capacity: str | UUID, target_capacity: str | UUID):
|
|
533
|
+
"""
|
|
534
|
+
This function migrates a capacity's spark settings to another capacity.
|
|
535
|
+
|
|
536
|
+
Requirement: The target capacity must be able to accomodate the spark pools being migrated from the source capacity.
|
|
537
|
+
|
|
538
|
+
Parameters
|
|
539
|
+
----------
|
|
540
|
+
source_capacity : str | uuid.UUID
|
|
541
|
+
Name or ID of the source capacity.
|
|
542
|
+
target_capacity : str | uuid.UUID
|
|
543
|
+
Name or ID of the target capacity.
|
|
544
|
+
"""
|
|
545
|
+
|
|
546
|
+
source_capacity_id = resolve_capacity_id(capacity=source_capacity)
|
|
547
|
+
target_capacity_id = resolve_capacity_id(capacity=target_capacity)
|
|
548
|
+
|
|
549
|
+
# Get source capacity server dns
|
|
550
|
+
response = _base_api(request=f"metadata/capacityInformation/{source_capacity_id}")
|
|
551
|
+
source_server_dns = response.json().get("capacityDns")
|
|
552
|
+
source_url = f"{source_server_dns}/webapi/capacities"
|
|
553
|
+
|
|
554
|
+
# Get target capacity server dns
|
|
555
|
+
response = _base_api(request=f"metadata/capacityInformation/{target_capacity_id}")
|
|
556
|
+
target_server_dns = response.json().get("capacityDns")
|
|
557
|
+
target_url = f"{target_server_dns}/webapi/capacities"
|
|
558
|
+
|
|
559
|
+
# Construct get and put URLs
|
|
560
|
+
end_url = "workloads/SparkCore/SparkCoreService/automatic/v1/sparksettings"
|
|
561
|
+
get_url = f"{source_url}/{source_capacity_id}/{end_url}"
|
|
562
|
+
put_url = f"{target_url}/{target_capacity_id}/{end_url}/content"
|
|
563
|
+
|
|
564
|
+
# Get source capacity spark settings
|
|
565
|
+
response = _base_api(request=get_url)
|
|
566
|
+
payload = response.json().get("content")
|
|
567
|
+
|
|
568
|
+
# Update target capacity spark settings
|
|
569
|
+
_base_api(request=put_url, method="put", payload=payload)
|
|
570
|
+
print(
|
|
571
|
+
f"{icons.green_dot} The spark settings have been migrated from the '{source_capacity}' capacity to the '{target_capacity}' capacity."
|
|
572
|
+
)
|
|
573
|
+
|
|
574
|
+
|
|
575
|
+
@log
|
|
576
|
+
def migrate_fabric_trial_capacity(
|
|
577
|
+
azure_subscription_id: str,
|
|
578
|
+
resource_group: str,
|
|
579
|
+
source_capacity: str,
|
|
580
|
+
target_capacity: str,
|
|
581
|
+
target_capacity_sku: str = "F64",
|
|
582
|
+
target_capacity_admin_members: Optional[str | List[str]] = None,
|
|
583
|
+
**kwargs,
|
|
584
|
+
):
|
|
585
|
+
"""
|
|
586
|
+
This function migrates a Fabric trial capacity to a Fabric capacity. If the 'target_capacity' does not exist, it is created with the relevant target capacity parameters (sku, region, admin members).
|
|
587
|
+
|
|
588
|
+
Parameters
|
|
589
|
+
----------
|
|
590
|
+
azure_subscription_id : str
|
|
591
|
+
The Azure subscription ID.
|
|
592
|
+
resource_group : str
|
|
593
|
+
The name of the Azure resource group.
|
|
594
|
+
source_capacity : str
|
|
595
|
+
The name of the Fabric trial capacity.
|
|
596
|
+
target_capacity : str
|
|
597
|
+
The name of the new Fabric capacity (F SKU). If this capacity does not exist, it will be created.
|
|
598
|
+
target_capacity_sku : str, default="F64"
|
|
599
|
+
If the target capacity does not exist, this property sets the SKU size for the target capacity.
|
|
600
|
+
target_capacity_admin_members : str, default=None
|
|
601
|
+
If the target capacity does not exist, this property sets the admin members for the target capacity.
|
|
602
|
+
Defaults to None which resolves to the admin members on the Trial SKU.
|
|
603
|
+
"""
|
|
604
|
+
|
|
605
|
+
notebook_workspace_id = fabric.get_notebook_workspace_id()
|
|
606
|
+
dfW = fabric.list_workspaces(filter=f"id eq '{notebook_workspace_id}'")
|
|
607
|
+
notebook_capacity_id = dfW["Capacity Id"].iloc[0].lower()
|
|
608
|
+
|
|
609
|
+
dfC = list_capacities()
|
|
610
|
+
dfC_filt = dfC[dfC["Capacity Name"] == source_capacity]
|
|
611
|
+
|
|
612
|
+
if dfC_filt.empty:
|
|
613
|
+
raise ValueError(
|
|
614
|
+
f"{icons.red_dot} The {source_capacity}' capacity does not exist."
|
|
615
|
+
)
|
|
616
|
+
|
|
617
|
+
source_capacity_sku = dfC_filt["Sku"].iloc[0]
|
|
618
|
+
if not source_capacity_sku.startswith("FT"):
|
|
619
|
+
raise ValueError(
|
|
620
|
+
f"{icons.red_dot} This function is for migrating Fabric trial capacites to Fabric capacities."
|
|
621
|
+
)
|
|
622
|
+
|
|
623
|
+
source_capacity_id = dfC_filt["Capacity Id"].iloc[0].lower()
|
|
624
|
+
if source_capacity_id == notebook_capacity_id:
|
|
625
|
+
print(
|
|
626
|
+
f"{icons.warning} The '{source_capacity}' capacity cannot be both the source capacity as well as the capacity in which the notebook is running."
|
|
627
|
+
)
|
|
628
|
+
return
|
|
629
|
+
|
|
630
|
+
target_capacity_region = dfC_filt["Region"].iloc[0]
|
|
631
|
+
|
|
632
|
+
# Use same admins as source capacity
|
|
633
|
+
if isinstance(target_capacity_admin_members, str):
|
|
634
|
+
target_capacity_admin_members = [target_capacity_admin_members]
|
|
635
|
+
|
|
636
|
+
if target_capacity_admin_members is None:
|
|
637
|
+
target_capacity_admin_members = dfC_filt["Admins"].iloc[0]
|
|
638
|
+
|
|
639
|
+
dfC_filt = dfC[dfC["Capacity Name"] == target_capacity]
|
|
640
|
+
if len(dfC_filt) == 0:
|
|
641
|
+
create_fabric_capacity(
|
|
642
|
+
capacity_name=target_capacity,
|
|
643
|
+
azure_subscription_id=azure_subscription_id,
|
|
644
|
+
resource_group=resource_group,
|
|
645
|
+
region=target_capacity_region,
|
|
646
|
+
admin_members=target_capacity_admin_members,
|
|
647
|
+
sku=target_capacity_sku,
|
|
648
|
+
)
|
|
649
|
+
|
|
650
|
+
assign_workspaces_to_capacity(
|
|
651
|
+
source_capacity=source_capacity,
|
|
652
|
+
target_capacity=target_capacity,
|
|
653
|
+
workspace=None,
|
|
654
|
+
)
|
|
655
|
+
|
|
656
|
+
# This migrates all the capacity settings
|
|
657
|
+
# _migrate_settings(
|
|
658
|
+
# source_capacity=source_capacity,
|
|
659
|
+
# target_capacity=target_capacity,
|
|
660
|
+
# )
|