semantic-link-labs 0.12.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- semantic_link_labs-0.12.8.dist-info/METADATA +354 -0
- semantic_link_labs-0.12.8.dist-info/RECORD +243 -0
- semantic_link_labs-0.12.8.dist-info/WHEEL +5 -0
- semantic_link_labs-0.12.8.dist-info/licenses/LICENSE +21 -0
- semantic_link_labs-0.12.8.dist-info/top_level.txt +1 -0
- sempy_labs/__init__.py +606 -0
- sempy_labs/_a_lib_info.py +2 -0
- sempy_labs/_ai.py +437 -0
- sempy_labs/_authentication.py +264 -0
- sempy_labs/_bpa_translation/_model/_translations_am-ET.po +869 -0
- sempy_labs/_bpa_translation/_model/_translations_ar-AE.po +908 -0
- sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +968 -0
- sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +963 -0
- sempy_labs/_bpa_translation/_model/_translations_cs-CZ.po +943 -0
- sempy_labs/_bpa_translation/_model/_translations_da-DK.po +945 -0
- sempy_labs/_bpa_translation/_model/_translations_de-DE.po +988 -0
- sempy_labs/_bpa_translation/_model/_translations_el-GR.po +993 -0
- sempy_labs/_bpa_translation/_model/_translations_es-ES.po +971 -0
- sempy_labs/_bpa_translation/_model/_translations_fa-IR.po +933 -0
- sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +942 -0
- sempy_labs/_bpa_translation/_model/_translations_fr-FR.po +994 -0
- sempy_labs/_bpa_translation/_model/_translations_ga-IE.po +967 -0
- sempy_labs/_bpa_translation/_model/_translations_he-IL.po +902 -0
- sempy_labs/_bpa_translation/_model/_translations_hi-IN.po +944 -0
- sempy_labs/_bpa_translation/_model/_translations_hu-HU.po +963 -0
- sempy_labs/_bpa_translation/_model/_translations_id-ID.po +946 -0
- sempy_labs/_bpa_translation/_model/_translations_is-IS.po +939 -0
- sempy_labs/_bpa_translation/_model/_translations_it-IT.po +986 -0
- sempy_labs/_bpa_translation/_model/_translations_ja-JP.po +846 -0
- sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +839 -0
- sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +967 -0
- sempy_labs/_bpa_translation/_model/_translations_nl-NL.po +978 -0
- sempy_labs/_bpa_translation/_model/_translations_pl-PL.po +962 -0
- sempy_labs/_bpa_translation/_model/_translations_pt-BR.po +962 -0
- sempy_labs/_bpa_translation/_model/_translations_pt-PT.po +957 -0
- sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +968 -0
- sempy_labs/_bpa_translation/_model/_translations_ru-RU.po +964 -0
- sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +952 -0
- sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +950 -0
- sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +942 -0
- sempy_labs/_bpa_translation/_model/_translations_ta-IN.po +976 -0
- sempy_labs/_bpa_translation/_model/_translations_te-IN.po +947 -0
- sempy_labs/_bpa_translation/_model/_translations_th-TH.po +924 -0
- sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +953 -0
- sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +961 -0
- sempy_labs/_bpa_translation/_model/_translations_zh-CN.po +804 -0
- sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po +969 -0
- sempy_labs/_capacities.py +1198 -0
- sempy_labs/_capacity_migration.py +660 -0
- sempy_labs/_clear_cache.py +351 -0
- sempy_labs/_connections.py +610 -0
- sempy_labs/_dashboards.py +69 -0
- sempy_labs/_data_access_security.py +98 -0
- sempy_labs/_data_pipelines.py +162 -0
- sempy_labs/_dataflows.py +668 -0
- sempy_labs/_dax.py +501 -0
- sempy_labs/_daxformatter.py +80 -0
- sempy_labs/_delta_analyzer.py +467 -0
- sempy_labs/_delta_analyzer_history.py +301 -0
- sempy_labs/_dictionary_diffs.py +221 -0
- sempy_labs/_documentation.py +147 -0
- sempy_labs/_domains.py +51 -0
- sempy_labs/_eventhouses.py +182 -0
- sempy_labs/_external_data_shares.py +230 -0
- sempy_labs/_gateways.py +521 -0
- sempy_labs/_generate_semantic_model.py +521 -0
- sempy_labs/_get_connection_string.py +84 -0
- sempy_labs/_git.py +543 -0
- sempy_labs/_graphQL.py +90 -0
- sempy_labs/_helper_functions.py +2833 -0
- sempy_labs/_icons.py +149 -0
- sempy_labs/_job_scheduler.py +609 -0
- sempy_labs/_kql_databases.py +149 -0
- sempy_labs/_kql_querysets.py +124 -0
- sempy_labs/_kusto.py +137 -0
- sempy_labs/_labels.py +124 -0
- sempy_labs/_list_functions.py +1720 -0
- sempy_labs/_managed_private_endpoints.py +253 -0
- sempy_labs/_mirrored_databases.py +416 -0
- sempy_labs/_mirrored_warehouses.py +60 -0
- sempy_labs/_ml_experiments.py +113 -0
- sempy_labs/_model_auto_build.py +140 -0
- sempy_labs/_model_bpa.py +557 -0
- sempy_labs/_model_bpa_bulk.py +378 -0
- sempy_labs/_model_bpa_rules.py +859 -0
- sempy_labs/_model_dependencies.py +343 -0
- sempy_labs/_mounted_data_factories.py +123 -0
- sempy_labs/_notebooks.py +441 -0
- sempy_labs/_one_lake_integration.py +151 -0
- sempy_labs/_onelake.py +131 -0
- sempy_labs/_query_scale_out.py +433 -0
- sempy_labs/_refresh_semantic_model.py +435 -0
- sempy_labs/_semantic_models.py +468 -0
- sempy_labs/_spark.py +455 -0
- sempy_labs/_sql.py +241 -0
- sempy_labs/_sql_audit_settings.py +207 -0
- sempy_labs/_sql_endpoints.py +214 -0
- sempy_labs/_tags.py +201 -0
- sempy_labs/_translations.py +43 -0
- sempy_labs/_user_delegation_key.py +44 -0
- sempy_labs/_utils.py +79 -0
- sempy_labs/_vertipaq.py +1021 -0
- sempy_labs/_vpax.py +388 -0
- sempy_labs/_warehouses.py +234 -0
- sempy_labs/_workloads.py +140 -0
- sempy_labs/_workspace_identity.py +72 -0
- sempy_labs/_workspaces.py +595 -0
- sempy_labs/admin/__init__.py +170 -0
- sempy_labs/admin/_activities.py +167 -0
- sempy_labs/admin/_apps.py +145 -0
- sempy_labs/admin/_artifacts.py +65 -0
- sempy_labs/admin/_basic_functions.py +463 -0
- sempy_labs/admin/_capacities.py +508 -0
- sempy_labs/admin/_dataflows.py +45 -0
- sempy_labs/admin/_datasets.py +186 -0
- sempy_labs/admin/_domains.py +522 -0
- sempy_labs/admin/_external_data_share.py +100 -0
- sempy_labs/admin/_git.py +72 -0
- sempy_labs/admin/_items.py +265 -0
- sempy_labs/admin/_labels.py +211 -0
- sempy_labs/admin/_reports.py +241 -0
- sempy_labs/admin/_scanner.py +118 -0
- sempy_labs/admin/_shared.py +82 -0
- sempy_labs/admin/_sharing_links.py +110 -0
- sempy_labs/admin/_tags.py +131 -0
- sempy_labs/admin/_tenant.py +503 -0
- sempy_labs/admin/_tenant_keys.py +89 -0
- sempy_labs/admin/_users.py +140 -0
- sempy_labs/admin/_workspaces.py +236 -0
- sempy_labs/deployment_pipeline/__init__.py +23 -0
- sempy_labs/deployment_pipeline/_items.py +580 -0
- sempy_labs/directlake/__init__.py +57 -0
- sempy_labs/directlake/_autosync.py +58 -0
- sempy_labs/directlake/_directlake_schema_compare.py +120 -0
- sempy_labs/directlake/_directlake_schema_sync.py +161 -0
- sempy_labs/directlake/_dl_helper.py +274 -0
- sempy_labs/directlake/_generate_shared_expression.py +94 -0
- sempy_labs/directlake/_get_directlake_lakehouse.py +62 -0
- sempy_labs/directlake/_get_shared_expression.py +34 -0
- sempy_labs/directlake/_guardrails.py +96 -0
- sempy_labs/directlake/_list_directlake_model_calc_tables.py +70 -0
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +90 -0
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +239 -0
- sempy_labs/directlake/_update_directlake_partition_entity.py +259 -0
- sempy_labs/directlake/_warm_cache.py +236 -0
- sempy_labs/dotnet_lib/dotnet.runtime.config.json +10 -0
- sempy_labs/environment/__init__.py +23 -0
- sempy_labs/environment/_items.py +212 -0
- sempy_labs/environment/_pubstage.py +223 -0
- sempy_labs/eventstream/__init__.py +37 -0
- sempy_labs/eventstream/_items.py +263 -0
- sempy_labs/eventstream/_topology.py +652 -0
- sempy_labs/graph/__init__.py +59 -0
- sempy_labs/graph/_groups.py +651 -0
- sempy_labs/graph/_sensitivity_labels.py +120 -0
- sempy_labs/graph/_teams.py +125 -0
- sempy_labs/graph/_user_licenses.py +96 -0
- sempy_labs/graph/_users.py +516 -0
- sempy_labs/graph_model/__init__.py +15 -0
- sempy_labs/graph_model/_background_jobs.py +63 -0
- sempy_labs/graph_model/_items.py +149 -0
- sempy_labs/lakehouse/__init__.py +67 -0
- sempy_labs/lakehouse/_blobs.py +247 -0
- sempy_labs/lakehouse/_get_lakehouse_columns.py +102 -0
- sempy_labs/lakehouse/_get_lakehouse_tables.py +274 -0
- sempy_labs/lakehouse/_helper.py +250 -0
- sempy_labs/lakehouse/_lakehouse.py +351 -0
- sempy_labs/lakehouse/_livy_sessions.py +143 -0
- sempy_labs/lakehouse/_materialized_lake_views.py +157 -0
- sempy_labs/lakehouse/_partitioning.py +165 -0
- sempy_labs/lakehouse/_schemas.py +217 -0
- sempy_labs/lakehouse/_shortcuts.py +440 -0
- sempy_labs/migration/__init__.py +35 -0
- sempy_labs/migration/_create_pqt_file.py +238 -0
- sempy_labs/migration/_direct_lake_to_import.py +105 -0
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +398 -0
- sempy_labs/migration/_migrate_calctables_to_semantic_model.py +148 -0
- sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +533 -0
- sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +172 -0
- sempy_labs/migration/_migration_validation.py +71 -0
- sempy_labs/migration/_refresh_calc_tables.py +131 -0
- sempy_labs/mirrored_azure_databricks_catalog/__init__.py +15 -0
- sempy_labs/mirrored_azure_databricks_catalog/_discover.py +213 -0
- sempy_labs/mirrored_azure_databricks_catalog/_refresh_catalog_metadata.py +45 -0
- sempy_labs/ml_model/__init__.py +23 -0
- sempy_labs/ml_model/_functions.py +427 -0
- sempy_labs/report/_BPAReportTemplate.json +232 -0
- sempy_labs/report/__init__.py +55 -0
- sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
- sempy_labs/report/_bpareporttemplate/.platform +11 -0
- sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +710 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +11 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +191 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +438 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +576 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +207 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +506 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +513 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +8 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +112 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +11 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +513 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +352 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +37 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +542 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +221 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +576 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +9 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +38 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +10 -0
- sempy_labs/report/_bpareporttemplate/definition/report.json +176 -0
- sempy_labs/report/_bpareporttemplate/definition/version.json +4 -0
- sempy_labs/report/_bpareporttemplate/definition.pbir +14 -0
- sempy_labs/report/_download_report.py +76 -0
- sempy_labs/report/_export_report.py +257 -0
- sempy_labs/report/_generate_report.py +427 -0
- sempy_labs/report/_paginated.py +76 -0
- sempy_labs/report/_report_bpa.py +354 -0
- sempy_labs/report/_report_bpa_rules.py +115 -0
- sempy_labs/report/_report_functions.py +581 -0
- sempy_labs/report/_report_helper.py +227 -0
- sempy_labs/report/_report_list_functions.py +110 -0
- sempy_labs/report/_report_rebind.py +149 -0
- sempy_labs/report/_reportwrapper.py +3100 -0
- sempy_labs/report/_save_report.py +147 -0
- sempy_labs/snowflake_database/__init__.py +10 -0
- sempy_labs/snowflake_database/_items.py +105 -0
- sempy_labs/sql_database/__init__.py +21 -0
- sempy_labs/sql_database/_items.py +201 -0
- sempy_labs/sql_database/_mirroring.py +79 -0
- sempy_labs/theme/__init__.py +12 -0
- sempy_labs/theme/_org_themes.py +129 -0
- sempy_labs/tom/__init__.py +3 -0
- sempy_labs/tom/_model.py +5977 -0
- sempy_labs/variable_library/__init__.py +19 -0
- sempy_labs/variable_library/_functions.py +403 -0
- sempy_labs/warehouse/__init__.py +28 -0
- sempy_labs/warehouse/_items.py +234 -0
- sempy_labs/warehouse/_restore_points.py +309 -0
|
@@ -0,0 +1,212 @@
|
|
|
1
|
+
import pandas as pd
|
|
2
|
+
import sempy_labs._icons as icons
|
|
3
|
+
from typing import Optional
|
|
4
|
+
from sempy_labs._helper_functions import (
|
|
5
|
+
resolve_workspace_name_and_id,
|
|
6
|
+
resolve_workspace_id,
|
|
7
|
+
_base_api,
|
|
8
|
+
_create_dataframe,
|
|
9
|
+
resolve_item_id,
|
|
10
|
+
delete_item,
|
|
11
|
+
create_item,
|
|
12
|
+
)
|
|
13
|
+
from uuid import UUID
|
|
14
|
+
from sempy._utils._log import log
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
@log
|
|
18
|
+
def create_environment(
|
|
19
|
+
environment: str,
|
|
20
|
+
description: Optional[str] = None,
|
|
21
|
+
workspace: Optional[str | UUID] = None,
|
|
22
|
+
):
|
|
23
|
+
"""
|
|
24
|
+
Creates a Fabric environment.
|
|
25
|
+
|
|
26
|
+
This is a wrapper function for the following API: `Items - Create Environment <https://learn.microsoft.com/rest/api/fabric/environment/items/create-environment>`_.
|
|
27
|
+
|
|
28
|
+
Parameters
|
|
29
|
+
----------
|
|
30
|
+
environment: str
|
|
31
|
+
Name of the environment.
|
|
32
|
+
description : str, default=None
|
|
33
|
+
A description of the environment.
|
|
34
|
+
workspace : str | uuid.UUID, default=None
|
|
35
|
+
The Fabric workspace name or ID.
|
|
36
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
37
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
38
|
+
"""
|
|
39
|
+
|
|
40
|
+
create_item(
|
|
41
|
+
name=environment,
|
|
42
|
+
description=description,
|
|
43
|
+
type="Environment",
|
|
44
|
+
workspace=workspace,
|
|
45
|
+
)
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
@log
|
|
49
|
+
def list_environments(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
50
|
+
"""
|
|
51
|
+
Shows the environments within a workspace.
|
|
52
|
+
|
|
53
|
+
This is a wrapper function for the following API: `Items - List Environments <https://learn.microsoft.com/rest/api/fabric/environment/items/list-environments>`_.
|
|
54
|
+
|
|
55
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
56
|
+
|
|
57
|
+
Parameters
|
|
58
|
+
----------
|
|
59
|
+
workspace : str | uuid.UUID, default=None
|
|
60
|
+
The Fabric workspace name or ID.
|
|
61
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
62
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
63
|
+
|
|
64
|
+
Returns
|
|
65
|
+
-------
|
|
66
|
+
pandas.DataFrame
|
|
67
|
+
A pandas dataframe showing the environments within a workspace.
|
|
68
|
+
"""
|
|
69
|
+
|
|
70
|
+
columns = {
|
|
71
|
+
"Environment Name": "string",
|
|
72
|
+
"Environment Id": "string",
|
|
73
|
+
"Description": "string",
|
|
74
|
+
"Publish State": "string",
|
|
75
|
+
"Publish Target Version": "string",
|
|
76
|
+
"Publish Start Time": "string",
|
|
77
|
+
"Publish End Time": "string",
|
|
78
|
+
"Spark Libraries State": "string",
|
|
79
|
+
"Spark Settings State": "string",
|
|
80
|
+
}
|
|
81
|
+
df = _create_dataframe(columns=columns)
|
|
82
|
+
|
|
83
|
+
workspace_id = resolve_workspace_id(workspace)
|
|
84
|
+
|
|
85
|
+
responses = _base_api(
|
|
86
|
+
request=f"/v1/workspaces/{workspace_id}/environments",
|
|
87
|
+
uses_pagination=True,
|
|
88
|
+
client="fabric_sp",
|
|
89
|
+
)
|
|
90
|
+
|
|
91
|
+
rows = []
|
|
92
|
+
for r in responses:
|
|
93
|
+
for v in r.get("value", []):
|
|
94
|
+
pub = v.get("properties", {}).get("publishDetails", {})
|
|
95
|
+
rows.append(
|
|
96
|
+
{
|
|
97
|
+
"Environment Name": v.get("displayName"),
|
|
98
|
+
"Environment Id": v.get("id"),
|
|
99
|
+
"Description": v.get("description"),
|
|
100
|
+
"Publish State": pub.get("state"),
|
|
101
|
+
"Publish Target Version": pub.get("targetVersion"),
|
|
102
|
+
"Publish Start Time": pub.get("startTime"),
|
|
103
|
+
"Publish End Time": pub.get("endTime"),
|
|
104
|
+
"Spark Libraries State": pub.get("componentPublishInfo", {})
|
|
105
|
+
.get("sparkLibraries", {})
|
|
106
|
+
.get("state"),
|
|
107
|
+
"Spark Settings State": pub.get("componentPublishInfo", {})
|
|
108
|
+
.get("sparkSettings", {})
|
|
109
|
+
.get("state"),
|
|
110
|
+
}
|
|
111
|
+
)
|
|
112
|
+
|
|
113
|
+
if rows:
|
|
114
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
115
|
+
|
|
116
|
+
return df
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
@log
|
|
120
|
+
def delete_environment(environment: str | UUID, workspace: Optional[str | UUID] = None):
|
|
121
|
+
"""
|
|
122
|
+
Deletes a Fabric environment.
|
|
123
|
+
|
|
124
|
+
This is a wrapper function for the following API: `Items - Delete Environment <https://learn.microsoft.com/rest/api/fabric/environment/items/delete-environment>`_.
|
|
125
|
+
|
|
126
|
+
Parameters
|
|
127
|
+
----------
|
|
128
|
+
environment: str | uuid.UUID
|
|
129
|
+
Name or ID of the environment.
|
|
130
|
+
workspace : str | uuid.UUID, default=None
|
|
131
|
+
The Fabric workspace name or ID.
|
|
132
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
133
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
134
|
+
"""
|
|
135
|
+
|
|
136
|
+
delete_item(item=environment, type="Environment", workspace=workspace)
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
@log
|
|
140
|
+
def publish_environment(
|
|
141
|
+
environment: str | UUID, workspace: Optional[str | UUID] = None
|
|
142
|
+
):
|
|
143
|
+
"""
|
|
144
|
+
Publishes a Fabric environment.
|
|
145
|
+
|
|
146
|
+
This is a wrapper function for the following API: `Spark Libraries - Publish Environment <https://learn.microsoft.com/rest/api/fabric/environment/spark-libraries/publish-environment>`_.
|
|
147
|
+
|
|
148
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
149
|
+
|
|
150
|
+
Parameters
|
|
151
|
+
----------
|
|
152
|
+
environment: str | uuid.UUID
|
|
153
|
+
Name or ID of the environment.
|
|
154
|
+
workspace : str | uuid.UUID, default=None
|
|
155
|
+
The Fabric workspace name or ID.
|
|
156
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
157
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
158
|
+
"""
|
|
159
|
+
|
|
160
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
161
|
+
item_id = resolve_item_id(
|
|
162
|
+
item=environment, type="Environment", workspace=workspace_id
|
|
163
|
+
)
|
|
164
|
+
|
|
165
|
+
_base_api(
|
|
166
|
+
request=f"/v1/workspaces/{workspace_id}/environments/{item_id}/staging/publish",
|
|
167
|
+
method="post",
|
|
168
|
+
lro_return_status_code=True,
|
|
169
|
+
status_codes=None,
|
|
170
|
+
client="fabric_sp",
|
|
171
|
+
)
|
|
172
|
+
|
|
173
|
+
print(
|
|
174
|
+
f"{icons.green_dot} The '{environment}' environment within the '{workspace_name}' workspace has been published."
|
|
175
|
+
)
|
|
176
|
+
|
|
177
|
+
|
|
178
|
+
@log
|
|
179
|
+
def cancel_publish_environment(
|
|
180
|
+
environment: str | UUID, workspace: Optional[str | UUID] = None
|
|
181
|
+
):
|
|
182
|
+
"""
|
|
183
|
+
Trigger an environment publish cancellation.
|
|
184
|
+
|
|
185
|
+
This is a wrapper function for the following API: `Items - Cancel Publish Environment <https://learn.microsoft.com/rest/api/fabric/environment/items/cancel-publish-environment>`_.
|
|
186
|
+
|
|
187
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
188
|
+
|
|
189
|
+
Parameters
|
|
190
|
+
----------
|
|
191
|
+
environment: str | uuid.UUID
|
|
192
|
+
Name or ID of the environment.
|
|
193
|
+
workspace : str | uuid.UUID, default=None
|
|
194
|
+
The Fabric workspace name or ID.
|
|
195
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
196
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
197
|
+
"""
|
|
198
|
+
|
|
199
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
200
|
+
item_id = resolve_item_id(
|
|
201
|
+
item=environment, type="Environment", workspace=workspace_id
|
|
202
|
+
)
|
|
203
|
+
|
|
204
|
+
_base_api(
|
|
205
|
+
request=f"/v1/workspaces/{workspace_id}/environments/{item_id}/staging/cancelPublish",
|
|
206
|
+
method="post",
|
|
207
|
+
client="fabric_sp",
|
|
208
|
+
)
|
|
209
|
+
|
|
210
|
+
print(
|
|
211
|
+
f"{icons.green_dot} The publish of the '{environment}' environment within the '{workspace_name}' workspace has been cancelled."
|
|
212
|
+
)
|
|
@@ -0,0 +1,223 @@
|
|
|
1
|
+
import pandas as pd
|
|
2
|
+
from typing import Optional
|
|
3
|
+
from sempy_labs._helper_functions import (
|
|
4
|
+
resolve_workspace_id,
|
|
5
|
+
_base_api,
|
|
6
|
+
_create_dataframe,
|
|
7
|
+
resolve_item_id,
|
|
8
|
+
_update_dataframe_datatypes,
|
|
9
|
+
)
|
|
10
|
+
from uuid import UUID
|
|
11
|
+
from sempy._utils._log import log
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def _get_spark_compute(
|
|
15
|
+
environment: str | UUID,
|
|
16
|
+
workspace: Optional[str | UUID] = None,
|
|
17
|
+
staging: bool = False,
|
|
18
|
+
) -> pd.DataFrame:
|
|
19
|
+
|
|
20
|
+
columns = {
|
|
21
|
+
"Instance Pool Name": "string",
|
|
22
|
+
"Instance Pool Type": "string",
|
|
23
|
+
"Instance Pool Id": "string",
|
|
24
|
+
"Driver Cores": "int",
|
|
25
|
+
"Driver Memory": "string",
|
|
26
|
+
"Executor Cores": "int",
|
|
27
|
+
"Executor Memory": "string",
|
|
28
|
+
"Dynamic Executor Allocation Enabled": "bool",
|
|
29
|
+
"Dynamic Executor Allocation Min Executors": "int",
|
|
30
|
+
"Dynamic Executor Allocation Max Executors": "int",
|
|
31
|
+
"Spark Properties": "string",
|
|
32
|
+
"Runtime Version": "string",
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
df = _create_dataframe(columns=columns)
|
|
36
|
+
|
|
37
|
+
workspace_id = resolve_workspace_id(workspace)
|
|
38
|
+
item_id = resolve_item_id(
|
|
39
|
+
item=environment, type="Environment", workspace=workspace_id
|
|
40
|
+
)
|
|
41
|
+
|
|
42
|
+
url = f"/v1/workspaces/{workspace_id}/environments/{item_id}/sparkCompute"
|
|
43
|
+
if staging:
|
|
44
|
+
url = (
|
|
45
|
+
f"/v1/workspaces/{workspace_id}/environments/{item_id}/staging/sparkCompute"
|
|
46
|
+
)
|
|
47
|
+
|
|
48
|
+
response = _base_api(
|
|
49
|
+
request=url,
|
|
50
|
+
client="fabric_sp",
|
|
51
|
+
)
|
|
52
|
+
|
|
53
|
+
rows = []
|
|
54
|
+
rows.append(
|
|
55
|
+
{
|
|
56
|
+
"Instance Pool Name": response.get("instancePool", {}).get("name"),
|
|
57
|
+
"Instance Pool Type": response.get("instancePool", {}).get("type"),
|
|
58
|
+
"Instance Pool Id": response.get("instancePool", {}).get("id"),
|
|
59
|
+
"Driver Cores": response.get("driverCores"),
|
|
60
|
+
"Driver Memory": response.get("driverMemory"),
|
|
61
|
+
"Executor Cores": response.get("executorCores"),
|
|
62
|
+
"Executor Memory": response.get("executorMemory"),
|
|
63
|
+
"Dynamic Executor Allocation Enabled": response.get(
|
|
64
|
+
"dynamicExecutorAllocation", {}
|
|
65
|
+
).get("enabled"),
|
|
66
|
+
"Dynamic Executor Allocation Min Executors": response.get(
|
|
67
|
+
"dynamicExecutorAllocation", {}
|
|
68
|
+
).get("minExecutors"),
|
|
69
|
+
"Dynamic Executor Allocation Max Executors": response.get(
|
|
70
|
+
"dynamicExecutorAllocation", {}
|
|
71
|
+
).get("maxExecutors"),
|
|
72
|
+
"Spark Properties": response.get("sparkProperties"),
|
|
73
|
+
"Runtime Version": response.get("runtimeVersion"),
|
|
74
|
+
}
|
|
75
|
+
)
|
|
76
|
+
|
|
77
|
+
if rows:
|
|
78
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
79
|
+
_update_dataframe_datatypes(df, columns)
|
|
80
|
+
|
|
81
|
+
return df
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
@log
|
|
85
|
+
def get_published_spark_compute(
|
|
86
|
+
environment: str | UUID, workspace: Optional[str | UUID] = None
|
|
87
|
+
) -> pd.DataFrame:
|
|
88
|
+
"""
|
|
89
|
+
Gets the Spark compute of a published Fabric environment.
|
|
90
|
+
|
|
91
|
+
This is a wrapper function for the following API: `Published - Get Spark Compute <https://learn.microsoft.com/rest/api/fabric/environment/published/get-spark-compute>`_.
|
|
92
|
+
|
|
93
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
94
|
+
|
|
95
|
+
Parameters
|
|
96
|
+
----------
|
|
97
|
+
environment: str | uuid.UUID
|
|
98
|
+
Name or ID of the environment.
|
|
99
|
+
workspace : str | uuid.UUID, default=None
|
|
100
|
+
The Fabric workspace name or ID.
|
|
101
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
102
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
103
|
+
"""
|
|
104
|
+
|
|
105
|
+
return _get_spark_compute(environment, workspace, staging=False)
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
@log
|
|
109
|
+
def get_staging_spark_compute(
|
|
110
|
+
environment: str | UUID, workspace: Optional[str | UUID] = None
|
|
111
|
+
) -> pd.DataFrame:
|
|
112
|
+
"""
|
|
113
|
+
Gets the Spark compute of a staging Fabric environment.
|
|
114
|
+
|
|
115
|
+
This is a wrapper function for the following API: `Staging - Get Spark Compute <https://learn.microsoft.com/rest/api/fabric/environment/staging/get-spark-compute>`_.
|
|
116
|
+
|
|
117
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
118
|
+
|
|
119
|
+
Parameters
|
|
120
|
+
----------
|
|
121
|
+
environment: str | uuid.UUID
|
|
122
|
+
Name or ID of the environment.
|
|
123
|
+
workspace : str | uuid.UUID, default=None
|
|
124
|
+
The Fabric workspace name or ID.
|
|
125
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
126
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
127
|
+
"""
|
|
128
|
+
|
|
129
|
+
return _get_spark_compute(environment, workspace, staging=True)
|
|
130
|
+
|
|
131
|
+
|
|
132
|
+
def _list_libraries(
|
|
133
|
+
environment: str | UUID,
|
|
134
|
+
workspace: Optional[str | UUID] = None,
|
|
135
|
+
staging: bool = False,
|
|
136
|
+
) -> pd.DataFrame:
|
|
137
|
+
|
|
138
|
+
columns = {
|
|
139
|
+
"Library Name": "string",
|
|
140
|
+
"Library Type": "string",
|
|
141
|
+
"Library Version": "string",
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
df = _create_dataframe(columns=columns)
|
|
145
|
+
|
|
146
|
+
workspace_id = resolve_workspace_id(workspace)
|
|
147
|
+
item_id = resolve_item_id(
|
|
148
|
+
item=environment, type="Environment", workspace=workspace_id
|
|
149
|
+
)
|
|
150
|
+
|
|
151
|
+
url = f"/v1/workspaces/{workspace_id}/environments/{item_id}/libraries"
|
|
152
|
+
if staging:
|
|
153
|
+
url = f"/v1/workspaces/{workspace_id}/environments/{item_id}/staging/libraries"
|
|
154
|
+
|
|
155
|
+
responses = _base_api(
|
|
156
|
+
request=url,
|
|
157
|
+
client="fabric_sp",
|
|
158
|
+
uses_pagination=True,
|
|
159
|
+
)
|
|
160
|
+
|
|
161
|
+
rows = []
|
|
162
|
+
for r in responses:
|
|
163
|
+
for lib in r.get("libraries", []):
|
|
164
|
+
rows.append(
|
|
165
|
+
{
|
|
166
|
+
"Library Name": lib.get("name"),
|
|
167
|
+
"Library Type": lib.get("libraryType"),
|
|
168
|
+
"Library Version": lib.get("version"),
|
|
169
|
+
}
|
|
170
|
+
)
|
|
171
|
+
|
|
172
|
+
if rows:
|
|
173
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
174
|
+
|
|
175
|
+
return df
|
|
176
|
+
|
|
177
|
+
|
|
178
|
+
@log
|
|
179
|
+
def list_published_libraries(
|
|
180
|
+
environment: str | UUID, workspace: Optional[str | UUID] = None
|
|
181
|
+
) -> pd.DataFrame:
|
|
182
|
+
"""
|
|
183
|
+
Gets the published libraries of a Fabric environment.
|
|
184
|
+
|
|
185
|
+
This is a wrapper function for the following API: `Published - List Libraries <https://learn.microsoft.com/rest/api/fabric/environment/published/list-libraries>`_.
|
|
186
|
+
|
|
187
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
188
|
+
|
|
189
|
+
Parameters
|
|
190
|
+
----------
|
|
191
|
+
environment: str | uuid.UUID
|
|
192
|
+
Name or ID of the environment.
|
|
193
|
+
workspace : str | uuid.UUID, default=None
|
|
194
|
+
The Fabric workspace name or ID.
|
|
195
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
196
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
197
|
+
"""
|
|
198
|
+
|
|
199
|
+
return _list_libraries(environment, workspace, staging=False)
|
|
200
|
+
|
|
201
|
+
|
|
202
|
+
@log
|
|
203
|
+
def list_staging_libraries(
|
|
204
|
+
environment: str | UUID, workspace: Optional[str | UUID] = None
|
|
205
|
+
) -> pd.DataFrame:
|
|
206
|
+
"""
|
|
207
|
+
Gets the staging libraries of a Fabric environment.
|
|
208
|
+
|
|
209
|
+
This is a wrapper function for the following API: `Staging - List Libraries <https://learn.microsoft.com/rest/api/fabric/environment/staging/list-libraries>`_.
|
|
210
|
+
|
|
211
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
212
|
+
|
|
213
|
+
Parameters
|
|
214
|
+
----------
|
|
215
|
+
environment: str | uuid.UUID
|
|
216
|
+
Name or ID of the environment.
|
|
217
|
+
workspace : str | uuid.UUID, default=None
|
|
218
|
+
The Fabric workspace name or ID.
|
|
219
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
220
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
221
|
+
"""
|
|
222
|
+
|
|
223
|
+
return _list_libraries(environment, workspace, staging=True)
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
from ._items import (
|
|
2
|
+
list_eventstreams,
|
|
3
|
+
create_eventstream,
|
|
4
|
+
delete_eventstream,
|
|
5
|
+
get_eventstream_definition,
|
|
6
|
+
)
|
|
7
|
+
from ._topology import (
|
|
8
|
+
get_eventstream_destination,
|
|
9
|
+
get_eventstream_destination_connection,
|
|
10
|
+
get_eventstream_source,
|
|
11
|
+
get_eventstream_source_connection,
|
|
12
|
+
get_eventstream_topology,
|
|
13
|
+
pause_eventstream,
|
|
14
|
+
pause_eventstream_destination,
|
|
15
|
+
pause_eventstream_source,
|
|
16
|
+
resume_eventstream,
|
|
17
|
+
resume_eventstream_destination,
|
|
18
|
+
resume_eventstream_source,
|
|
19
|
+
)
|
|
20
|
+
|
|
21
|
+
__all__ = [
|
|
22
|
+
"list_eventstreams",
|
|
23
|
+
"create_eventstream",
|
|
24
|
+
"delete_eventstream",
|
|
25
|
+
"get_eventstream_definition",
|
|
26
|
+
"get_eventstream_destination",
|
|
27
|
+
"get_eventstream_destination_connection",
|
|
28
|
+
"get_eventstream_source",
|
|
29
|
+
"get_eventstream_source_connection",
|
|
30
|
+
"get_eventstream_topology",
|
|
31
|
+
"pause_eventstream",
|
|
32
|
+
"pause_eventstream_destination",
|
|
33
|
+
"pause_eventstream_source",
|
|
34
|
+
"resume_eventstream",
|
|
35
|
+
"resume_eventstream_destination",
|
|
36
|
+
"resume_eventstream_source",
|
|
37
|
+
]
|