semantic-link-labs 0.12.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- semantic_link_labs-0.12.8.dist-info/METADATA +354 -0
- semantic_link_labs-0.12.8.dist-info/RECORD +243 -0
- semantic_link_labs-0.12.8.dist-info/WHEEL +5 -0
- semantic_link_labs-0.12.8.dist-info/licenses/LICENSE +21 -0
- semantic_link_labs-0.12.8.dist-info/top_level.txt +1 -0
- sempy_labs/__init__.py +606 -0
- sempy_labs/_a_lib_info.py +2 -0
- sempy_labs/_ai.py +437 -0
- sempy_labs/_authentication.py +264 -0
- sempy_labs/_bpa_translation/_model/_translations_am-ET.po +869 -0
- sempy_labs/_bpa_translation/_model/_translations_ar-AE.po +908 -0
- sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +968 -0
- sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +963 -0
- sempy_labs/_bpa_translation/_model/_translations_cs-CZ.po +943 -0
- sempy_labs/_bpa_translation/_model/_translations_da-DK.po +945 -0
- sempy_labs/_bpa_translation/_model/_translations_de-DE.po +988 -0
- sempy_labs/_bpa_translation/_model/_translations_el-GR.po +993 -0
- sempy_labs/_bpa_translation/_model/_translations_es-ES.po +971 -0
- sempy_labs/_bpa_translation/_model/_translations_fa-IR.po +933 -0
- sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +942 -0
- sempy_labs/_bpa_translation/_model/_translations_fr-FR.po +994 -0
- sempy_labs/_bpa_translation/_model/_translations_ga-IE.po +967 -0
- sempy_labs/_bpa_translation/_model/_translations_he-IL.po +902 -0
- sempy_labs/_bpa_translation/_model/_translations_hi-IN.po +944 -0
- sempy_labs/_bpa_translation/_model/_translations_hu-HU.po +963 -0
- sempy_labs/_bpa_translation/_model/_translations_id-ID.po +946 -0
- sempy_labs/_bpa_translation/_model/_translations_is-IS.po +939 -0
- sempy_labs/_bpa_translation/_model/_translations_it-IT.po +986 -0
- sempy_labs/_bpa_translation/_model/_translations_ja-JP.po +846 -0
- sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +839 -0
- sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +967 -0
- sempy_labs/_bpa_translation/_model/_translations_nl-NL.po +978 -0
- sempy_labs/_bpa_translation/_model/_translations_pl-PL.po +962 -0
- sempy_labs/_bpa_translation/_model/_translations_pt-BR.po +962 -0
- sempy_labs/_bpa_translation/_model/_translations_pt-PT.po +957 -0
- sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +968 -0
- sempy_labs/_bpa_translation/_model/_translations_ru-RU.po +964 -0
- sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +952 -0
- sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +950 -0
- sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +942 -0
- sempy_labs/_bpa_translation/_model/_translations_ta-IN.po +976 -0
- sempy_labs/_bpa_translation/_model/_translations_te-IN.po +947 -0
- sempy_labs/_bpa_translation/_model/_translations_th-TH.po +924 -0
- sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +953 -0
- sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +961 -0
- sempy_labs/_bpa_translation/_model/_translations_zh-CN.po +804 -0
- sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po +969 -0
- sempy_labs/_capacities.py +1198 -0
- sempy_labs/_capacity_migration.py +660 -0
- sempy_labs/_clear_cache.py +351 -0
- sempy_labs/_connections.py +610 -0
- sempy_labs/_dashboards.py +69 -0
- sempy_labs/_data_access_security.py +98 -0
- sempy_labs/_data_pipelines.py +162 -0
- sempy_labs/_dataflows.py +668 -0
- sempy_labs/_dax.py +501 -0
- sempy_labs/_daxformatter.py +80 -0
- sempy_labs/_delta_analyzer.py +467 -0
- sempy_labs/_delta_analyzer_history.py +301 -0
- sempy_labs/_dictionary_diffs.py +221 -0
- sempy_labs/_documentation.py +147 -0
- sempy_labs/_domains.py +51 -0
- sempy_labs/_eventhouses.py +182 -0
- sempy_labs/_external_data_shares.py +230 -0
- sempy_labs/_gateways.py +521 -0
- sempy_labs/_generate_semantic_model.py +521 -0
- sempy_labs/_get_connection_string.py +84 -0
- sempy_labs/_git.py +543 -0
- sempy_labs/_graphQL.py +90 -0
- sempy_labs/_helper_functions.py +2833 -0
- sempy_labs/_icons.py +149 -0
- sempy_labs/_job_scheduler.py +609 -0
- sempy_labs/_kql_databases.py +149 -0
- sempy_labs/_kql_querysets.py +124 -0
- sempy_labs/_kusto.py +137 -0
- sempy_labs/_labels.py +124 -0
- sempy_labs/_list_functions.py +1720 -0
- sempy_labs/_managed_private_endpoints.py +253 -0
- sempy_labs/_mirrored_databases.py +416 -0
- sempy_labs/_mirrored_warehouses.py +60 -0
- sempy_labs/_ml_experiments.py +113 -0
- sempy_labs/_model_auto_build.py +140 -0
- sempy_labs/_model_bpa.py +557 -0
- sempy_labs/_model_bpa_bulk.py +378 -0
- sempy_labs/_model_bpa_rules.py +859 -0
- sempy_labs/_model_dependencies.py +343 -0
- sempy_labs/_mounted_data_factories.py +123 -0
- sempy_labs/_notebooks.py +441 -0
- sempy_labs/_one_lake_integration.py +151 -0
- sempy_labs/_onelake.py +131 -0
- sempy_labs/_query_scale_out.py +433 -0
- sempy_labs/_refresh_semantic_model.py +435 -0
- sempy_labs/_semantic_models.py +468 -0
- sempy_labs/_spark.py +455 -0
- sempy_labs/_sql.py +241 -0
- sempy_labs/_sql_audit_settings.py +207 -0
- sempy_labs/_sql_endpoints.py +214 -0
- sempy_labs/_tags.py +201 -0
- sempy_labs/_translations.py +43 -0
- sempy_labs/_user_delegation_key.py +44 -0
- sempy_labs/_utils.py +79 -0
- sempy_labs/_vertipaq.py +1021 -0
- sempy_labs/_vpax.py +388 -0
- sempy_labs/_warehouses.py +234 -0
- sempy_labs/_workloads.py +140 -0
- sempy_labs/_workspace_identity.py +72 -0
- sempy_labs/_workspaces.py +595 -0
- sempy_labs/admin/__init__.py +170 -0
- sempy_labs/admin/_activities.py +167 -0
- sempy_labs/admin/_apps.py +145 -0
- sempy_labs/admin/_artifacts.py +65 -0
- sempy_labs/admin/_basic_functions.py +463 -0
- sempy_labs/admin/_capacities.py +508 -0
- sempy_labs/admin/_dataflows.py +45 -0
- sempy_labs/admin/_datasets.py +186 -0
- sempy_labs/admin/_domains.py +522 -0
- sempy_labs/admin/_external_data_share.py +100 -0
- sempy_labs/admin/_git.py +72 -0
- sempy_labs/admin/_items.py +265 -0
- sempy_labs/admin/_labels.py +211 -0
- sempy_labs/admin/_reports.py +241 -0
- sempy_labs/admin/_scanner.py +118 -0
- sempy_labs/admin/_shared.py +82 -0
- sempy_labs/admin/_sharing_links.py +110 -0
- sempy_labs/admin/_tags.py +131 -0
- sempy_labs/admin/_tenant.py +503 -0
- sempy_labs/admin/_tenant_keys.py +89 -0
- sempy_labs/admin/_users.py +140 -0
- sempy_labs/admin/_workspaces.py +236 -0
- sempy_labs/deployment_pipeline/__init__.py +23 -0
- sempy_labs/deployment_pipeline/_items.py +580 -0
- sempy_labs/directlake/__init__.py +57 -0
- sempy_labs/directlake/_autosync.py +58 -0
- sempy_labs/directlake/_directlake_schema_compare.py +120 -0
- sempy_labs/directlake/_directlake_schema_sync.py +161 -0
- sempy_labs/directlake/_dl_helper.py +274 -0
- sempy_labs/directlake/_generate_shared_expression.py +94 -0
- sempy_labs/directlake/_get_directlake_lakehouse.py +62 -0
- sempy_labs/directlake/_get_shared_expression.py +34 -0
- sempy_labs/directlake/_guardrails.py +96 -0
- sempy_labs/directlake/_list_directlake_model_calc_tables.py +70 -0
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +90 -0
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +239 -0
- sempy_labs/directlake/_update_directlake_partition_entity.py +259 -0
- sempy_labs/directlake/_warm_cache.py +236 -0
- sempy_labs/dotnet_lib/dotnet.runtime.config.json +10 -0
- sempy_labs/environment/__init__.py +23 -0
- sempy_labs/environment/_items.py +212 -0
- sempy_labs/environment/_pubstage.py +223 -0
- sempy_labs/eventstream/__init__.py +37 -0
- sempy_labs/eventstream/_items.py +263 -0
- sempy_labs/eventstream/_topology.py +652 -0
- sempy_labs/graph/__init__.py +59 -0
- sempy_labs/graph/_groups.py +651 -0
- sempy_labs/graph/_sensitivity_labels.py +120 -0
- sempy_labs/graph/_teams.py +125 -0
- sempy_labs/graph/_user_licenses.py +96 -0
- sempy_labs/graph/_users.py +516 -0
- sempy_labs/graph_model/__init__.py +15 -0
- sempy_labs/graph_model/_background_jobs.py +63 -0
- sempy_labs/graph_model/_items.py +149 -0
- sempy_labs/lakehouse/__init__.py +67 -0
- sempy_labs/lakehouse/_blobs.py +247 -0
- sempy_labs/lakehouse/_get_lakehouse_columns.py +102 -0
- sempy_labs/lakehouse/_get_lakehouse_tables.py +274 -0
- sempy_labs/lakehouse/_helper.py +250 -0
- sempy_labs/lakehouse/_lakehouse.py +351 -0
- sempy_labs/lakehouse/_livy_sessions.py +143 -0
- sempy_labs/lakehouse/_materialized_lake_views.py +157 -0
- sempy_labs/lakehouse/_partitioning.py +165 -0
- sempy_labs/lakehouse/_schemas.py +217 -0
- sempy_labs/lakehouse/_shortcuts.py +440 -0
- sempy_labs/migration/__init__.py +35 -0
- sempy_labs/migration/_create_pqt_file.py +238 -0
- sempy_labs/migration/_direct_lake_to_import.py +105 -0
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +398 -0
- sempy_labs/migration/_migrate_calctables_to_semantic_model.py +148 -0
- sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +533 -0
- sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +172 -0
- sempy_labs/migration/_migration_validation.py +71 -0
- sempy_labs/migration/_refresh_calc_tables.py +131 -0
- sempy_labs/mirrored_azure_databricks_catalog/__init__.py +15 -0
- sempy_labs/mirrored_azure_databricks_catalog/_discover.py +213 -0
- sempy_labs/mirrored_azure_databricks_catalog/_refresh_catalog_metadata.py +45 -0
- sempy_labs/ml_model/__init__.py +23 -0
- sempy_labs/ml_model/_functions.py +427 -0
- sempy_labs/report/_BPAReportTemplate.json +232 -0
- sempy_labs/report/__init__.py +55 -0
- sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
- sempy_labs/report/_bpareporttemplate/.platform +11 -0
- sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +710 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +11 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +191 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +438 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +576 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +207 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +506 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +513 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +8 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +112 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +11 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +513 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +352 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +37 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +542 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +221 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +576 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +9 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +38 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +10 -0
- sempy_labs/report/_bpareporttemplate/definition/report.json +176 -0
- sempy_labs/report/_bpareporttemplate/definition/version.json +4 -0
- sempy_labs/report/_bpareporttemplate/definition.pbir +14 -0
- sempy_labs/report/_download_report.py +76 -0
- sempy_labs/report/_export_report.py +257 -0
- sempy_labs/report/_generate_report.py +427 -0
- sempy_labs/report/_paginated.py +76 -0
- sempy_labs/report/_report_bpa.py +354 -0
- sempy_labs/report/_report_bpa_rules.py +115 -0
- sempy_labs/report/_report_functions.py +581 -0
- sempy_labs/report/_report_helper.py +227 -0
- sempy_labs/report/_report_list_functions.py +110 -0
- sempy_labs/report/_report_rebind.py +149 -0
- sempy_labs/report/_reportwrapper.py +3100 -0
- sempy_labs/report/_save_report.py +147 -0
- sempy_labs/snowflake_database/__init__.py +10 -0
- sempy_labs/snowflake_database/_items.py +105 -0
- sempy_labs/sql_database/__init__.py +21 -0
- sempy_labs/sql_database/_items.py +201 -0
- sempy_labs/sql_database/_mirroring.py +79 -0
- sempy_labs/theme/__init__.py +12 -0
- sempy_labs/theme/_org_themes.py +129 -0
- sempy_labs/tom/__init__.py +3 -0
- sempy_labs/tom/_model.py +5977 -0
- sempy_labs/variable_library/__init__.py +19 -0
- sempy_labs/variable_library/_functions.py +403 -0
- sempy_labs/warehouse/__init__.py +28 -0
- sempy_labs/warehouse/_items.py +234 -0
- sempy_labs/warehouse/_restore_points.py +309 -0
sempy_labs/_domains.py
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
import pandas as pd
|
|
2
|
+
from sempy_labs._helper_functions import (
|
|
3
|
+
_base_api,
|
|
4
|
+
_create_dataframe,
|
|
5
|
+
)
|
|
6
|
+
from sempy._utils._log import log
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
@log
|
|
10
|
+
def list_domains() -> pd.DataFrame:
|
|
11
|
+
"""
|
|
12
|
+
Returns a list of all the tenant's domains.
|
|
13
|
+
|
|
14
|
+
This is a wrapper function for the following API: `Domains - List Domains <https://learn.microsoft.com/rest/api/fabric/core/domains/list-domains>`_.
|
|
15
|
+
|
|
16
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
17
|
+
|
|
18
|
+
Returns
|
|
19
|
+
-------
|
|
20
|
+
pandas.DataFrame
|
|
21
|
+
A pandas dataframe showing a list of the domains.
|
|
22
|
+
"""
|
|
23
|
+
|
|
24
|
+
columns = {
|
|
25
|
+
"Domain Id": "string",
|
|
26
|
+
"Domain Name": "string",
|
|
27
|
+
"Description": "string",
|
|
28
|
+
"Parent Domain Id": "string",
|
|
29
|
+
}
|
|
30
|
+
df = _create_dataframe(columns=columns)
|
|
31
|
+
|
|
32
|
+
responses = _base_api(
|
|
33
|
+
request="/v1/domains", client="fabric_sp", uses_pagination=True
|
|
34
|
+
)
|
|
35
|
+
|
|
36
|
+
rows = []
|
|
37
|
+
for r in responses:
|
|
38
|
+
for v in r.get("value", []):
|
|
39
|
+
rows.append(
|
|
40
|
+
{
|
|
41
|
+
"Domain Id": v.get("id"),
|
|
42
|
+
"Domain Name": v.get("displayName"),
|
|
43
|
+
"Description": v.get("description"),
|
|
44
|
+
"Parent Domain Id": v.get("parentDomainId"),
|
|
45
|
+
}
|
|
46
|
+
)
|
|
47
|
+
|
|
48
|
+
if rows:
|
|
49
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
50
|
+
|
|
51
|
+
return df
|
|
@@ -0,0 +1,182 @@
|
|
|
1
|
+
import pandas as pd
|
|
2
|
+
from typing import Optional
|
|
3
|
+
from sempy_labs._helper_functions import (
|
|
4
|
+
_base_api,
|
|
5
|
+
_create_dataframe,
|
|
6
|
+
_conv_b64,
|
|
7
|
+
delete_item,
|
|
8
|
+
create_item,
|
|
9
|
+
_get_item_definition,
|
|
10
|
+
resolve_workspace_id,
|
|
11
|
+
)
|
|
12
|
+
from uuid import UUID
|
|
13
|
+
import sempy_labs._icons as icons
|
|
14
|
+
from sempy._utils._log import log
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
@log
|
|
18
|
+
def create_eventhouse(
|
|
19
|
+
name: str,
|
|
20
|
+
definition: Optional[dict],
|
|
21
|
+
description: Optional[str] = None,
|
|
22
|
+
workspace: Optional[str | UUID] = None,
|
|
23
|
+
):
|
|
24
|
+
"""
|
|
25
|
+
Creates a Fabric eventhouse.
|
|
26
|
+
|
|
27
|
+
This is a wrapper function for the following API: `Items - Create Eventhouse <https://learn.microsoft.com/rest/api/fabric/environment/items/create-eventhouse>`_.
|
|
28
|
+
|
|
29
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
30
|
+
|
|
31
|
+
Parameters
|
|
32
|
+
----------
|
|
33
|
+
name: str
|
|
34
|
+
Name of the eventhouse.
|
|
35
|
+
definition : dict
|
|
36
|
+
The definition (EventhouseProperties.json) of the eventhouse.
|
|
37
|
+
description : str, default=None
|
|
38
|
+
A description of the environment.
|
|
39
|
+
workspace : str | uuid.UUID, default=None
|
|
40
|
+
The Fabric workspace name or ID.
|
|
41
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
42
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
43
|
+
"""
|
|
44
|
+
|
|
45
|
+
if definition is not None and not isinstance(definition, dict):
|
|
46
|
+
raise ValueError(f"{icons.red_dot} The definition must be a dictionary.")
|
|
47
|
+
|
|
48
|
+
definition_payload = (
|
|
49
|
+
{
|
|
50
|
+
"parts": [
|
|
51
|
+
{
|
|
52
|
+
"path": "EventhouseProperties.json",
|
|
53
|
+
"payload": _conv_b64(definition),
|
|
54
|
+
"payloadType": "InlineBase64",
|
|
55
|
+
}
|
|
56
|
+
]
|
|
57
|
+
}
|
|
58
|
+
if definition is not None
|
|
59
|
+
else None
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
create_item(
|
|
63
|
+
name=name,
|
|
64
|
+
type="Eventhouse",
|
|
65
|
+
workspace=workspace,
|
|
66
|
+
description=description,
|
|
67
|
+
definition=definition_payload,
|
|
68
|
+
)
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
@log
|
|
72
|
+
def list_eventhouses(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
73
|
+
"""
|
|
74
|
+
Shows the eventhouses within a workspace.
|
|
75
|
+
|
|
76
|
+
This is a wrapper function for the following API: `Items - List Eventhouses <https://learn.microsoft.com/rest/api/fabric/environment/items/list-eventhouses>`_.
|
|
77
|
+
|
|
78
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
79
|
+
|
|
80
|
+
Parameters
|
|
81
|
+
----------
|
|
82
|
+
workspace : str | uuid.UUID, default=None
|
|
83
|
+
The Fabric workspace name or ID.
|
|
84
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
85
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
86
|
+
|
|
87
|
+
Returns
|
|
88
|
+
-------
|
|
89
|
+
pandas.DataFrame
|
|
90
|
+
A pandas dataframe showing the eventhouses within a workspace.
|
|
91
|
+
"""
|
|
92
|
+
|
|
93
|
+
columns = {
|
|
94
|
+
"Eventhouse Name": "string",
|
|
95
|
+
"Eventhouse Id": "string",
|
|
96
|
+
"Description": "string",
|
|
97
|
+
}
|
|
98
|
+
df = _create_dataframe(columns=columns)
|
|
99
|
+
|
|
100
|
+
workspace_id = resolve_workspace_id(workspace)
|
|
101
|
+
|
|
102
|
+
responses = _base_api(
|
|
103
|
+
request=f"/v1/workspaces/{workspace_id}/eventhouses",
|
|
104
|
+
uses_pagination=True,
|
|
105
|
+
client="fabric_sp",
|
|
106
|
+
)
|
|
107
|
+
|
|
108
|
+
rows = []
|
|
109
|
+
for r in responses:
|
|
110
|
+
for v in r.get("value", []):
|
|
111
|
+
rows.append(
|
|
112
|
+
{
|
|
113
|
+
"Eventhouse Name": v.get("displayName"),
|
|
114
|
+
"Eventhouse Id": v.get("id"),
|
|
115
|
+
"Description": v.get("description"),
|
|
116
|
+
}
|
|
117
|
+
)
|
|
118
|
+
|
|
119
|
+
if rows:
|
|
120
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
121
|
+
|
|
122
|
+
return df
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
@log
|
|
126
|
+
def delete_eventhouse(name: str, workspace: Optional[str | UUID] = None):
|
|
127
|
+
"""
|
|
128
|
+
Deletes a Fabric eventhouse.
|
|
129
|
+
|
|
130
|
+
This is a wrapper function for the following API: `Items - Delete Eventhouse <https://learn.microsoft.com/rest/api/fabric/environment/items/delete-eventhouse>`_.
|
|
131
|
+
|
|
132
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
133
|
+
|
|
134
|
+
Parameters
|
|
135
|
+
----------
|
|
136
|
+
name: str
|
|
137
|
+
Name of the eventhouse.
|
|
138
|
+
workspace : str | uuid.UUID, default=None
|
|
139
|
+
The Fabric workspace name or ID.
|
|
140
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
141
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
142
|
+
"""
|
|
143
|
+
|
|
144
|
+
delete_item(item=name, type="Eventhouse", workspace=workspace)
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
@log
|
|
148
|
+
def get_eventhouse_definition(
|
|
149
|
+
eventhouse: str | UUID,
|
|
150
|
+
workspace: Optional[str | UUID] = None,
|
|
151
|
+
return_dataframe: bool = False,
|
|
152
|
+
) -> dict | pd.DataFrame:
|
|
153
|
+
"""
|
|
154
|
+
Gets the eventhouse definition.
|
|
155
|
+
|
|
156
|
+
This is a wrapper function for the following API: `Items - Get Eventhouse Definition <https://learn.microsoft.com/rest/api/fabric/eventhouse/items/get-eventhouse-definition>`_.
|
|
157
|
+
|
|
158
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
159
|
+
|
|
160
|
+
Parameters
|
|
161
|
+
----------
|
|
162
|
+
eventhouse : str
|
|
163
|
+
Name of the eventhouse.
|
|
164
|
+
workspace : str | uuid.UUID, default=None
|
|
165
|
+
The Fabric workspace name or ID in which the eventhouse resides.
|
|
166
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
167
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
168
|
+
return_dataframe : bool, default=False
|
|
169
|
+
If True, returns a dataframe. If False, returns a json dictionary.
|
|
170
|
+
|
|
171
|
+
Returns
|
|
172
|
+
-------
|
|
173
|
+
dict | pandas.DataFrame
|
|
174
|
+
The eventhouse definition in .json format or as a pandas dataframe.
|
|
175
|
+
"""
|
|
176
|
+
|
|
177
|
+
return _get_item_definition(
|
|
178
|
+
item=eventhouse,
|
|
179
|
+
type="Eventhouse",
|
|
180
|
+
workspace=workspace,
|
|
181
|
+
return_dataframe=return_dataframe,
|
|
182
|
+
)
|
|
@@ -0,0 +1,230 @@
|
|
|
1
|
+
from uuid import UUID
|
|
2
|
+
import pandas as pd
|
|
3
|
+
from typing import Optional, List
|
|
4
|
+
import sempy_labs._icons as icons
|
|
5
|
+
from sempy_labs._helper_functions import (
|
|
6
|
+
resolve_workspace_name_and_id,
|
|
7
|
+
_base_api,
|
|
8
|
+
_create_dataframe,
|
|
9
|
+
resolve_item_id,
|
|
10
|
+
resolve_item_name_and_id,
|
|
11
|
+
resolve_workspace_id,
|
|
12
|
+
)
|
|
13
|
+
from sempy._utils._log import log
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@log
|
|
17
|
+
def create_external_data_share(
|
|
18
|
+
item_name: str,
|
|
19
|
+
item_type: str,
|
|
20
|
+
paths: str | List[str],
|
|
21
|
+
recipient: str,
|
|
22
|
+
workspace: Optional[str | UUID] = None,
|
|
23
|
+
):
|
|
24
|
+
"""
|
|
25
|
+
Creates an external data share for a given path or list of paths in the specified item.
|
|
26
|
+
|
|
27
|
+
This is a wrapper function for the following API: `External Data Shares - Create External Data Share <https://learn.microsoft.com/rest/api/fabric/core/external-data-shares/create-external-data-share>`_.
|
|
28
|
+
|
|
29
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
30
|
+
|
|
31
|
+
Parameters
|
|
32
|
+
----------
|
|
33
|
+
item_name : str
|
|
34
|
+
The item name.
|
|
35
|
+
item_type : str
|
|
36
|
+
The `item type <https://learn.microsoft.com/rest/api/fabric/core/items/list-items?tabs=HTTP#itemtype>`_.
|
|
37
|
+
paths : str | List[str]
|
|
38
|
+
The path or list of paths that are to be externally shared. Currently, only a single path is supported.
|
|
39
|
+
recipient : str
|
|
40
|
+
The email address of the recipient.
|
|
41
|
+
workspace : str | uuid.UUID, default=None
|
|
42
|
+
The Fabric workspace name or ID.
|
|
43
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
44
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
45
|
+
"""
|
|
46
|
+
|
|
47
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
48
|
+
item_id = resolve_item_id(item=item_name, type=item_type, workspace=workspace_id)
|
|
49
|
+
|
|
50
|
+
if isinstance(paths, str):
|
|
51
|
+
paths = [paths]
|
|
52
|
+
|
|
53
|
+
payload = {"paths": paths, "recipient": {"userPrincipalName": recipient}}
|
|
54
|
+
|
|
55
|
+
_base_api(
|
|
56
|
+
request=f"/v1/workspaces/{workspace_id}/items/{item_id}/externalDataShares",
|
|
57
|
+
method="post",
|
|
58
|
+
status_codes=201,
|
|
59
|
+
payload=payload,
|
|
60
|
+
client="fabric_sp",
|
|
61
|
+
)
|
|
62
|
+
print(
|
|
63
|
+
f"{icons.green_dot} An external data share was created for the '{item_name}' {item_type} within the '{workspace_name}' workspace for the {paths} paths."
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
@log
|
|
68
|
+
def revoke_external_data_share(
|
|
69
|
+
external_data_share_id: UUID,
|
|
70
|
+
item_name: str,
|
|
71
|
+
item_type: str,
|
|
72
|
+
workspace: Optional[str | UUID] = None,
|
|
73
|
+
):
|
|
74
|
+
"""
|
|
75
|
+
Revokes the specified external data share. Note: This action cannot be undone.
|
|
76
|
+
|
|
77
|
+
This is a wrapper function for the following API: `External Data Shares - Revoke External Data Share <https://learn.microsoft.com/rest/api/fabric/core/external-data-shares/revoke-external-data-share`_.
|
|
78
|
+
|
|
79
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
80
|
+
|
|
81
|
+
Parameters
|
|
82
|
+
----------
|
|
83
|
+
external_data_share_id : uuid.UUID
|
|
84
|
+
The external data share ID.
|
|
85
|
+
item_name : str
|
|
86
|
+
The item name.
|
|
87
|
+
item_type : str
|
|
88
|
+
The `item type <https://learn.microsoft.com/rest/api/fabric/core/items/list-items?tabs=HTTP#itemtype>`_.
|
|
89
|
+
workspace : str | uuid.UUID, default=None
|
|
90
|
+
The Fabric workspace name or ID.
|
|
91
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
92
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
93
|
+
"""
|
|
94
|
+
|
|
95
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
96
|
+
item_id = resolve_item_id(item=item_name, type=item_type, workspace=workspace_id)
|
|
97
|
+
|
|
98
|
+
_base_api(
|
|
99
|
+
request=f"/v1/workspaces/{workspace_id}/items/{item_id}/externalDataShares/{external_data_share_id}/revoke",
|
|
100
|
+
method="post",
|
|
101
|
+
client="fabric_sp",
|
|
102
|
+
)
|
|
103
|
+
print(
|
|
104
|
+
f"{icons.green_dot} The '{external_data_share_id}' external data share for the '{item_name}' {item_type} within the '{workspace_name}' workspace has been revoked."
|
|
105
|
+
)
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
@log
|
|
109
|
+
def list_external_data_shares_in_item(
|
|
110
|
+
item_name: str, item_type: str, workspace: Optional[str | UUID] = None
|
|
111
|
+
) -> pd.DataFrame:
|
|
112
|
+
"""
|
|
113
|
+
Returns a list of the external data shares that exist for the specified item.
|
|
114
|
+
|
|
115
|
+
This is a wrapper function for the following API: `External Data Shares - List External Data Shares In Item <https://learn.microsoft.com/rest/api/fabric/core/external-data-shares/list-external-data-shares-in-item`_.
|
|
116
|
+
|
|
117
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
118
|
+
|
|
119
|
+
Parameters
|
|
120
|
+
----------
|
|
121
|
+
item_name : str
|
|
122
|
+
The item name.
|
|
123
|
+
item_type : str
|
|
124
|
+
The `item type <https://learn.microsoft.com/rest/api/fabric/core/items/list-items?tabs=HTTP#itemtype>`_.
|
|
125
|
+
workspace : str | uuid.UUID, default=None
|
|
126
|
+
The Fabric workspace name or ID.
|
|
127
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
128
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
129
|
+
|
|
130
|
+
Returns
|
|
131
|
+
-------
|
|
132
|
+
pandas.DataFrame
|
|
133
|
+
A pandas dataframe showing a list of the external data shares that exist for the specified item.
|
|
134
|
+
"""
|
|
135
|
+
|
|
136
|
+
workspace_id = resolve_workspace_id(workspace)
|
|
137
|
+
item_id = resolve_item_id(item=item_name, type=item_type, workspace=workspace_id)
|
|
138
|
+
|
|
139
|
+
columns = {
|
|
140
|
+
"External Data Share Id": "string",
|
|
141
|
+
"Paths": "string",
|
|
142
|
+
"Creator Principal Id": "string",
|
|
143
|
+
"Creator Principal Type": "string",
|
|
144
|
+
"Recipient User Principal Name": "string",
|
|
145
|
+
"Status": "string",
|
|
146
|
+
"Expiration Time UTC": "string",
|
|
147
|
+
"Workspace Id": "string",
|
|
148
|
+
"Item Id": "string",
|
|
149
|
+
"Item Name": "string",
|
|
150
|
+
"Item Type": "string",
|
|
151
|
+
"Invitation URL": "string",
|
|
152
|
+
}
|
|
153
|
+
df = _create_dataframe(columns=columns)
|
|
154
|
+
|
|
155
|
+
responses = _base_api(
|
|
156
|
+
request=f"/v1/workspaces/{workspace_id}/items/{item_id}/externalDataShares",
|
|
157
|
+
uses_pagination=True,
|
|
158
|
+
client="fabric_sp",
|
|
159
|
+
)
|
|
160
|
+
|
|
161
|
+
rows = []
|
|
162
|
+
for r in responses:
|
|
163
|
+
for i in r.get("value", []):
|
|
164
|
+
item_id = i.get("itemId")
|
|
165
|
+
rows.append(
|
|
166
|
+
{
|
|
167
|
+
"External Data Share Id": i.get("id"),
|
|
168
|
+
"Paths": [i.get("paths")],
|
|
169
|
+
"Creator Principal Id": i.get("creatorPrincipal", {}).get("id"),
|
|
170
|
+
"Creator Principal Type": i.get("creatorPrincipal", {}).get("type"),
|
|
171
|
+
"Recipient User Principal Name": i.get("recipient", {}).get(
|
|
172
|
+
"userPrincipalName"
|
|
173
|
+
),
|
|
174
|
+
"Status": i.get("status"),
|
|
175
|
+
"Expiration Time UTC": i.get("expriationTimeUtc"),
|
|
176
|
+
"Workspace Id": i.get("workspaceId"),
|
|
177
|
+
"Item Id": item_id,
|
|
178
|
+
"Item Name": item_name,
|
|
179
|
+
"Item Type": item_type,
|
|
180
|
+
"Invitation URL": i.get("invitationUrl"),
|
|
181
|
+
}
|
|
182
|
+
)
|
|
183
|
+
|
|
184
|
+
if rows:
|
|
185
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
186
|
+
|
|
187
|
+
return df
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
@log
|
|
191
|
+
def delete_external_data_share(
|
|
192
|
+
external_data_share_id: UUID,
|
|
193
|
+
item: str | UUID,
|
|
194
|
+
item_type: str,
|
|
195
|
+
workspace: Optional[str | UUID] = None,
|
|
196
|
+
):
|
|
197
|
+
"""
|
|
198
|
+
Deletes the specified external data share.
|
|
199
|
+
|
|
200
|
+
This is a wrapper function for the following API: `External Data Shares Provider - Delete External Data Share <https://learn.microsoft.com/rest/api/fabric/core/external-data-shares-provider/delete-external-data-share`_.
|
|
201
|
+
|
|
202
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
203
|
+
|
|
204
|
+
Parameters
|
|
205
|
+
----------
|
|
206
|
+
external_data_share_id : uuid.UUID
|
|
207
|
+
The external data share ID.
|
|
208
|
+
item : str | uuid.UUID
|
|
209
|
+
The item name or ID.
|
|
210
|
+
item_type : str
|
|
211
|
+
The `item type <https://learn.microsoft.com/rest/api/fabric/core/items/list-items?tabs=HTTP#itemtype>`_.
|
|
212
|
+
workspace : str | uuid.UUID, default=None
|
|
213
|
+
The Fabric workspace name or ID.
|
|
214
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
215
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
216
|
+
"""
|
|
217
|
+
|
|
218
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
219
|
+
(item_name, item_id) = resolve_item_name_and_id(
|
|
220
|
+
item=item, type=item_type, workspace=workspace_id
|
|
221
|
+
)
|
|
222
|
+
|
|
223
|
+
_base_api(
|
|
224
|
+
request=f"/v1/workspaces/{workspace_id}/items/{item_id}/externalDataShares/{external_data_share_id}",
|
|
225
|
+
method="delete",
|
|
226
|
+
client="fabric_sp",
|
|
227
|
+
)
|
|
228
|
+
print(
|
|
229
|
+
f"{icons.green_dot} The '{external_data_share_id}' external data share for the '{item_name}' {item_type} within the '{workspace_name}' workspace has been revoked."
|
|
230
|
+
)
|