semantic-link-labs 0.12.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- semantic_link_labs-0.12.8.dist-info/METADATA +354 -0
- semantic_link_labs-0.12.8.dist-info/RECORD +243 -0
- semantic_link_labs-0.12.8.dist-info/WHEEL +5 -0
- semantic_link_labs-0.12.8.dist-info/licenses/LICENSE +21 -0
- semantic_link_labs-0.12.8.dist-info/top_level.txt +1 -0
- sempy_labs/__init__.py +606 -0
- sempy_labs/_a_lib_info.py +2 -0
- sempy_labs/_ai.py +437 -0
- sempy_labs/_authentication.py +264 -0
- sempy_labs/_bpa_translation/_model/_translations_am-ET.po +869 -0
- sempy_labs/_bpa_translation/_model/_translations_ar-AE.po +908 -0
- sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +968 -0
- sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +963 -0
- sempy_labs/_bpa_translation/_model/_translations_cs-CZ.po +943 -0
- sempy_labs/_bpa_translation/_model/_translations_da-DK.po +945 -0
- sempy_labs/_bpa_translation/_model/_translations_de-DE.po +988 -0
- sempy_labs/_bpa_translation/_model/_translations_el-GR.po +993 -0
- sempy_labs/_bpa_translation/_model/_translations_es-ES.po +971 -0
- sempy_labs/_bpa_translation/_model/_translations_fa-IR.po +933 -0
- sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +942 -0
- sempy_labs/_bpa_translation/_model/_translations_fr-FR.po +994 -0
- sempy_labs/_bpa_translation/_model/_translations_ga-IE.po +967 -0
- sempy_labs/_bpa_translation/_model/_translations_he-IL.po +902 -0
- sempy_labs/_bpa_translation/_model/_translations_hi-IN.po +944 -0
- sempy_labs/_bpa_translation/_model/_translations_hu-HU.po +963 -0
- sempy_labs/_bpa_translation/_model/_translations_id-ID.po +946 -0
- sempy_labs/_bpa_translation/_model/_translations_is-IS.po +939 -0
- sempy_labs/_bpa_translation/_model/_translations_it-IT.po +986 -0
- sempy_labs/_bpa_translation/_model/_translations_ja-JP.po +846 -0
- sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +839 -0
- sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +967 -0
- sempy_labs/_bpa_translation/_model/_translations_nl-NL.po +978 -0
- sempy_labs/_bpa_translation/_model/_translations_pl-PL.po +962 -0
- sempy_labs/_bpa_translation/_model/_translations_pt-BR.po +962 -0
- sempy_labs/_bpa_translation/_model/_translations_pt-PT.po +957 -0
- sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +968 -0
- sempy_labs/_bpa_translation/_model/_translations_ru-RU.po +964 -0
- sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +952 -0
- sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +950 -0
- sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +942 -0
- sempy_labs/_bpa_translation/_model/_translations_ta-IN.po +976 -0
- sempy_labs/_bpa_translation/_model/_translations_te-IN.po +947 -0
- sempy_labs/_bpa_translation/_model/_translations_th-TH.po +924 -0
- sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +953 -0
- sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +961 -0
- sempy_labs/_bpa_translation/_model/_translations_zh-CN.po +804 -0
- sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po +969 -0
- sempy_labs/_capacities.py +1198 -0
- sempy_labs/_capacity_migration.py +660 -0
- sempy_labs/_clear_cache.py +351 -0
- sempy_labs/_connections.py +610 -0
- sempy_labs/_dashboards.py +69 -0
- sempy_labs/_data_access_security.py +98 -0
- sempy_labs/_data_pipelines.py +162 -0
- sempy_labs/_dataflows.py +668 -0
- sempy_labs/_dax.py +501 -0
- sempy_labs/_daxformatter.py +80 -0
- sempy_labs/_delta_analyzer.py +467 -0
- sempy_labs/_delta_analyzer_history.py +301 -0
- sempy_labs/_dictionary_diffs.py +221 -0
- sempy_labs/_documentation.py +147 -0
- sempy_labs/_domains.py +51 -0
- sempy_labs/_eventhouses.py +182 -0
- sempy_labs/_external_data_shares.py +230 -0
- sempy_labs/_gateways.py +521 -0
- sempy_labs/_generate_semantic_model.py +521 -0
- sempy_labs/_get_connection_string.py +84 -0
- sempy_labs/_git.py +543 -0
- sempy_labs/_graphQL.py +90 -0
- sempy_labs/_helper_functions.py +2833 -0
- sempy_labs/_icons.py +149 -0
- sempy_labs/_job_scheduler.py +609 -0
- sempy_labs/_kql_databases.py +149 -0
- sempy_labs/_kql_querysets.py +124 -0
- sempy_labs/_kusto.py +137 -0
- sempy_labs/_labels.py +124 -0
- sempy_labs/_list_functions.py +1720 -0
- sempy_labs/_managed_private_endpoints.py +253 -0
- sempy_labs/_mirrored_databases.py +416 -0
- sempy_labs/_mirrored_warehouses.py +60 -0
- sempy_labs/_ml_experiments.py +113 -0
- sempy_labs/_model_auto_build.py +140 -0
- sempy_labs/_model_bpa.py +557 -0
- sempy_labs/_model_bpa_bulk.py +378 -0
- sempy_labs/_model_bpa_rules.py +859 -0
- sempy_labs/_model_dependencies.py +343 -0
- sempy_labs/_mounted_data_factories.py +123 -0
- sempy_labs/_notebooks.py +441 -0
- sempy_labs/_one_lake_integration.py +151 -0
- sempy_labs/_onelake.py +131 -0
- sempy_labs/_query_scale_out.py +433 -0
- sempy_labs/_refresh_semantic_model.py +435 -0
- sempy_labs/_semantic_models.py +468 -0
- sempy_labs/_spark.py +455 -0
- sempy_labs/_sql.py +241 -0
- sempy_labs/_sql_audit_settings.py +207 -0
- sempy_labs/_sql_endpoints.py +214 -0
- sempy_labs/_tags.py +201 -0
- sempy_labs/_translations.py +43 -0
- sempy_labs/_user_delegation_key.py +44 -0
- sempy_labs/_utils.py +79 -0
- sempy_labs/_vertipaq.py +1021 -0
- sempy_labs/_vpax.py +388 -0
- sempy_labs/_warehouses.py +234 -0
- sempy_labs/_workloads.py +140 -0
- sempy_labs/_workspace_identity.py +72 -0
- sempy_labs/_workspaces.py +595 -0
- sempy_labs/admin/__init__.py +170 -0
- sempy_labs/admin/_activities.py +167 -0
- sempy_labs/admin/_apps.py +145 -0
- sempy_labs/admin/_artifacts.py +65 -0
- sempy_labs/admin/_basic_functions.py +463 -0
- sempy_labs/admin/_capacities.py +508 -0
- sempy_labs/admin/_dataflows.py +45 -0
- sempy_labs/admin/_datasets.py +186 -0
- sempy_labs/admin/_domains.py +522 -0
- sempy_labs/admin/_external_data_share.py +100 -0
- sempy_labs/admin/_git.py +72 -0
- sempy_labs/admin/_items.py +265 -0
- sempy_labs/admin/_labels.py +211 -0
- sempy_labs/admin/_reports.py +241 -0
- sempy_labs/admin/_scanner.py +118 -0
- sempy_labs/admin/_shared.py +82 -0
- sempy_labs/admin/_sharing_links.py +110 -0
- sempy_labs/admin/_tags.py +131 -0
- sempy_labs/admin/_tenant.py +503 -0
- sempy_labs/admin/_tenant_keys.py +89 -0
- sempy_labs/admin/_users.py +140 -0
- sempy_labs/admin/_workspaces.py +236 -0
- sempy_labs/deployment_pipeline/__init__.py +23 -0
- sempy_labs/deployment_pipeline/_items.py +580 -0
- sempy_labs/directlake/__init__.py +57 -0
- sempy_labs/directlake/_autosync.py +58 -0
- sempy_labs/directlake/_directlake_schema_compare.py +120 -0
- sempy_labs/directlake/_directlake_schema_sync.py +161 -0
- sempy_labs/directlake/_dl_helper.py +274 -0
- sempy_labs/directlake/_generate_shared_expression.py +94 -0
- sempy_labs/directlake/_get_directlake_lakehouse.py +62 -0
- sempy_labs/directlake/_get_shared_expression.py +34 -0
- sempy_labs/directlake/_guardrails.py +96 -0
- sempy_labs/directlake/_list_directlake_model_calc_tables.py +70 -0
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +90 -0
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +239 -0
- sempy_labs/directlake/_update_directlake_partition_entity.py +259 -0
- sempy_labs/directlake/_warm_cache.py +236 -0
- sempy_labs/dotnet_lib/dotnet.runtime.config.json +10 -0
- sempy_labs/environment/__init__.py +23 -0
- sempy_labs/environment/_items.py +212 -0
- sempy_labs/environment/_pubstage.py +223 -0
- sempy_labs/eventstream/__init__.py +37 -0
- sempy_labs/eventstream/_items.py +263 -0
- sempy_labs/eventstream/_topology.py +652 -0
- sempy_labs/graph/__init__.py +59 -0
- sempy_labs/graph/_groups.py +651 -0
- sempy_labs/graph/_sensitivity_labels.py +120 -0
- sempy_labs/graph/_teams.py +125 -0
- sempy_labs/graph/_user_licenses.py +96 -0
- sempy_labs/graph/_users.py +516 -0
- sempy_labs/graph_model/__init__.py +15 -0
- sempy_labs/graph_model/_background_jobs.py +63 -0
- sempy_labs/graph_model/_items.py +149 -0
- sempy_labs/lakehouse/__init__.py +67 -0
- sempy_labs/lakehouse/_blobs.py +247 -0
- sempy_labs/lakehouse/_get_lakehouse_columns.py +102 -0
- sempy_labs/lakehouse/_get_lakehouse_tables.py +274 -0
- sempy_labs/lakehouse/_helper.py +250 -0
- sempy_labs/lakehouse/_lakehouse.py +351 -0
- sempy_labs/lakehouse/_livy_sessions.py +143 -0
- sempy_labs/lakehouse/_materialized_lake_views.py +157 -0
- sempy_labs/lakehouse/_partitioning.py +165 -0
- sempy_labs/lakehouse/_schemas.py +217 -0
- sempy_labs/lakehouse/_shortcuts.py +440 -0
- sempy_labs/migration/__init__.py +35 -0
- sempy_labs/migration/_create_pqt_file.py +238 -0
- sempy_labs/migration/_direct_lake_to_import.py +105 -0
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +398 -0
- sempy_labs/migration/_migrate_calctables_to_semantic_model.py +148 -0
- sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +533 -0
- sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +172 -0
- sempy_labs/migration/_migration_validation.py +71 -0
- sempy_labs/migration/_refresh_calc_tables.py +131 -0
- sempy_labs/mirrored_azure_databricks_catalog/__init__.py +15 -0
- sempy_labs/mirrored_azure_databricks_catalog/_discover.py +213 -0
- sempy_labs/mirrored_azure_databricks_catalog/_refresh_catalog_metadata.py +45 -0
- sempy_labs/ml_model/__init__.py +23 -0
- sempy_labs/ml_model/_functions.py +427 -0
- sempy_labs/report/_BPAReportTemplate.json +232 -0
- sempy_labs/report/__init__.py +55 -0
- sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
- sempy_labs/report/_bpareporttemplate/.platform +11 -0
- sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +710 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +11 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +191 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +438 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +576 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +207 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +506 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +513 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +8 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +112 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +11 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +513 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +352 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +37 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +542 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +221 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +576 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +9 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +38 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +10 -0
- sempy_labs/report/_bpareporttemplate/definition/report.json +176 -0
- sempy_labs/report/_bpareporttemplate/definition/version.json +4 -0
- sempy_labs/report/_bpareporttemplate/definition.pbir +14 -0
- sempy_labs/report/_download_report.py +76 -0
- sempy_labs/report/_export_report.py +257 -0
- sempy_labs/report/_generate_report.py +427 -0
- sempy_labs/report/_paginated.py +76 -0
- sempy_labs/report/_report_bpa.py +354 -0
- sempy_labs/report/_report_bpa_rules.py +115 -0
- sempy_labs/report/_report_functions.py +581 -0
- sempy_labs/report/_report_helper.py +227 -0
- sempy_labs/report/_report_list_functions.py +110 -0
- sempy_labs/report/_report_rebind.py +149 -0
- sempy_labs/report/_reportwrapper.py +3100 -0
- sempy_labs/report/_save_report.py +147 -0
- sempy_labs/snowflake_database/__init__.py +10 -0
- sempy_labs/snowflake_database/_items.py +105 -0
- sempy_labs/sql_database/__init__.py +21 -0
- sempy_labs/sql_database/_items.py +201 -0
- sempy_labs/sql_database/_mirroring.py +79 -0
- sempy_labs/theme/__init__.py +12 -0
- sempy_labs/theme/_org_themes.py +129 -0
- sempy_labs/tom/__init__.py +3 -0
- sempy_labs/tom/_model.py +5977 -0
- sempy_labs/variable_library/__init__.py +19 -0
- sempy_labs/variable_library/_functions.py +403 -0
- sempy_labs/warehouse/__init__.py +28 -0
- sempy_labs/warehouse/_items.py +234 -0
- sempy_labs/warehouse/_restore_points.py +309 -0
|
@@ -0,0 +1,263 @@
|
|
|
1
|
+
import pandas as pd
|
|
2
|
+
from typing import Optional
|
|
3
|
+
from sempy_labs._helper_functions import (
|
|
4
|
+
_base_api,
|
|
5
|
+
delete_item,
|
|
6
|
+
_create_dataframe,
|
|
7
|
+
create_item,
|
|
8
|
+
resolve_workspace_id,
|
|
9
|
+
resolve_item_id,
|
|
10
|
+
_decode_b64,
|
|
11
|
+
)
|
|
12
|
+
from uuid import UUID
|
|
13
|
+
import sempy_labs._icons as icons
|
|
14
|
+
from sempy._utils._log import log
|
|
15
|
+
import json
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@log
|
|
19
|
+
def list_eventstreams(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
20
|
+
"""
|
|
21
|
+
Shows the eventstreams within a workspace.
|
|
22
|
+
|
|
23
|
+
This is a wrapper function for the following API: `Items - List Eventstreams <https://learn.microsoft.com/rest/api/fabric/environment/items/list-eventstreams>`_.
|
|
24
|
+
|
|
25
|
+
Parameters
|
|
26
|
+
----------
|
|
27
|
+
workspace : str | uuid.UUID, default=None
|
|
28
|
+
The Fabric workspace name or ID.
|
|
29
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
30
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
31
|
+
|
|
32
|
+
Returns
|
|
33
|
+
-------
|
|
34
|
+
pandas.DataFrame
|
|
35
|
+
A pandas dataframe showing the eventstreams within a workspace.
|
|
36
|
+
"""
|
|
37
|
+
|
|
38
|
+
columns = {
|
|
39
|
+
"Eventstream Name": "string",
|
|
40
|
+
"Eventstream Id": "string",
|
|
41
|
+
"Description": "string",
|
|
42
|
+
}
|
|
43
|
+
df = _create_dataframe(columns=columns)
|
|
44
|
+
|
|
45
|
+
workspace_id = resolve_workspace_id(workspace)
|
|
46
|
+
responses = _base_api(
|
|
47
|
+
request=f"/v1/workspaces/{workspace_id}/eventstreams", uses_pagination=True
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
rows = []
|
|
51
|
+
for r in responses:
|
|
52
|
+
for v in r.get("value", []):
|
|
53
|
+
rows.append(
|
|
54
|
+
{
|
|
55
|
+
"Eventstream Name": v.get("displayName"),
|
|
56
|
+
"Eventstream Id": v.get("id"),
|
|
57
|
+
"Description": v.get("description"),
|
|
58
|
+
}
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
if rows:
|
|
62
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
63
|
+
|
|
64
|
+
return df
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
@log
|
|
68
|
+
def create_eventstream(
|
|
69
|
+
name: str, description: Optional[str] = None, workspace: Optional[str | UUID] = None
|
|
70
|
+
):
|
|
71
|
+
"""
|
|
72
|
+
Creates a Fabric eventstream.
|
|
73
|
+
|
|
74
|
+
This is a wrapper function for the following API: `Items - Create Eventstream <https://learn.microsoft.com/rest/api/fabric/environment/items/create-eventstream>`_.
|
|
75
|
+
|
|
76
|
+
Parameters
|
|
77
|
+
----------
|
|
78
|
+
name: str
|
|
79
|
+
Name of the eventstream.
|
|
80
|
+
description : str, default=None
|
|
81
|
+
A description of the environment.
|
|
82
|
+
workspace : str | uuid.UUID, default=None
|
|
83
|
+
The Fabric workspace name or ID.
|
|
84
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
85
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
86
|
+
"""
|
|
87
|
+
|
|
88
|
+
create_item(
|
|
89
|
+
name=name, description=description, type="Eventstream", workspace=workspace
|
|
90
|
+
)
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
@log
|
|
94
|
+
def delete_eventstream(
|
|
95
|
+
eventstream: str | UUID, workspace: Optional[str | UUID] = None, **kwargs
|
|
96
|
+
):
|
|
97
|
+
"""
|
|
98
|
+
Deletes a Fabric eventstream.
|
|
99
|
+
|
|
100
|
+
This is a wrapper function for the following API: `Items - Delete Eventstream <https://learn.microsoft.com/rest/api/fabric/environment/items/delete-eventstream>`_.
|
|
101
|
+
|
|
102
|
+
Parameters
|
|
103
|
+
----------
|
|
104
|
+
eventstream: str | uuid.UUID
|
|
105
|
+
Name or ID of the eventstream.
|
|
106
|
+
workspace : str | uuid.UUID, default=None
|
|
107
|
+
The Fabric workspace name or ID.
|
|
108
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
109
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
110
|
+
"""
|
|
111
|
+
|
|
112
|
+
if "name" in kwargs:
|
|
113
|
+
eventstream = kwargs["name"]
|
|
114
|
+
print(
|
|
115
|
+
f"{icons.warning} The 'name' parameter is deprecated. Please use 'eventstream' instead."
|
|
116
|
+
)
|
|
117
|
+
|
|
118
|
+
delete_item(item=eventstream, type="Eventstream", workspace=workspace)
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
@log
|
|
122
|
+
def get_eventstream_definition(
|
|
123
|
+
eventstream: str | UUID,
|
|
124
|
+
workspace: Optional[str | UUID] = None,
|
|
125
|
+
decode: bool = True,
|
|
126
|
+
return_dataframe: bool = False,
|
|
127
|
+
) -> dict:
|
|
128
|
+
|
|
129
|
+
workspace_id = resolve_workspace_id(workspace)
|
|
130
|
+
item_id = resolve_item_id(item=eventstream, type="Eventstream", workspace=workspace)
|
|
131
|
+
|
|
132
|
+
result = _base_api(
|
|
133
|
+
request=f"/v1/workspaces/{workspace_id}/eventstreams/{item_id}/getDefinition",
|
|
134
|
+
method="post",
|
|
135
|
+
client="fabric_sp",
|
|
136
|
+
status_codes=None,
|
|
137
|
+
lro_return_json=True,
|
|
138
|
+
)
|
|
139
|
+
|
|
140
|
+
if decode:
|
|
141
|
+
definition = {"definition": {"parts": []}}
|
|
142
|
+
|
|
143
|
+
for part in result.get("definition", {}).get("parts", []):
|
|
144
|
+
path = part.get("path")
|
|
145
|
+
payload = json.loads(_decode_b64(part.get("payload")))
|
|
146
|
+
definition["definition"]["parts"].append({"path": path, "payload": payload})
|
|
147
|
+
else:
|
|
148
|
+
definition = result.copy()
|
|
149
|
+
|
|
150
|
+
if return_dataframe:
|
|
151
|
+
df = pd.DataFrame(definition["definition"]["parts"])
|
|
152
|
+
df.columns = ["Path", "Payload", "Payload Type"]
|
|
153
|
+
return df
|
|
154
|
+
else:
|
|
155
|
+
return definition
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
@log
|
|
159
|
+
def list_eventstream_destinations(
|
|
160
|
+
eventstream: str | UUID, workspace: Optional[str | UUID] = None
|
|
161
|
+
) -> pd.DataFrame:
|
|
162
|
+
"""
|
|
163
|
+
Lists the destinations of the specified eventstream.
|
|
164
|
+
|
|
165
|
+
Parameters
|
|
166
|
+
----------
|
|
167
|
+
eventstream : str | uuid.UUID
|
|
168
|
+
The name or ID of the eventstream.
|
|
169
|
+
workspace : str | uuid.UUID, default=None
|
|
170
|
+
The Fabric workspace name or ID.
|
|
171
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
172
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
173
|
+
|
|
174
|
+
Returns
|
|
175
|
+
-------
|
|
176
|
+
pandas.DataFrame
|
|
177
|
+
A pandas dataframe showing the destinations of the eventstream.
|
|
178
|
+
"""
|
|
179
|
+
|
|
180
|
+
definition = get_eventstream_definition(
|
|
181
|
+
eventstream=eventstream, workspace=workspace
|
|
182
|
+
)
|
|
183
|
+
|
|
184
|
+
columns = {
|
|
185
|
+
"Destination Id": "string",
|
|
186
|
+
"Destination Name": "string",
|
|
187
|
+
"Destination Type": "string",
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
df = _create_dataframe(columns=columns)
|
|
191
|
+
|
|
192
|
+
rows = []
|
|
193
|
+
for part in definition.get("definition").get("parts"):
|
|
194
|
+
payload = part.get("payload")
|
|
195
|
+
if part.get("path") == "eventstream.json":
|
|
196
|
+
destinations = payload.get("destinations")
|
|
197
|
+
for d in destinations:
|
|
198
|
+
rows.append(
|
|
199
|
+
{
|
|
200
|
+
"Destination Id": d.get("id"),
|
|
201
|
+
"Destination Name": d.get("name"),
|
|
202
|
+
"Destination Type": d.get("type"),
|
|
203
|
+
}
|
|
204
|
+
)
|
|
205
|
+
|
|
206
|
+
if rows:
|
|
207
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
208
|
+
|
|
209
|
+
return df
|
|
210
|
+
|
|
211
|
+
|
|
212
|
+
@log
|
|
213
|
+
def list_eventstream_sources(
|
|
214
|
+
eventstream: str | UUID, workspace: Optional[str | UUID] = None
|
|
215
|
+
) -> pd.DataFrame:
|
|
216
|
+
"""
|
|
217
|
+
Lists the destinations of the specified eventstream.
|
|
218
|
+
|
|
219
|
+
Parameters
|
|
220
|
+
----------
|
|
221
|
+
eventstream : str | uuid.UUID
|
|
222
|
+
The name or ID of the eventstream.
|
|
223
|
+
workspace : str | uuid.UUID, default=None
|
|
224
|
+
The Fabric workspace name or ID.
|
|
225
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
226
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
227
|
+
|
|
228
|
+
Returns
|
|
229
|
+
-------
|
|
230
|
+
pandas.DataFrame
|
|
231
|
+
A pandas dataframe showing the destinations of the eventstream.
|
|
232
|
+
"""
|
|
233
|
+
|
|
234
|
+
definition = get_eventstream_definition(
|
|
235
|
+
eventstream=eventstream, workspace=workspace
|
|
236
|
+
)
|
|
237
|
+
|
|
238
|
+
columns = {
|
|
239
|
+
"Source Id": "string",
|
|
240
|
+
"Source Name": "string",
|
|
241
|
+
"Source Type": "string",
|
|
242
|
+
}
|
|
243
|
+
|
|
244
|
+
df = _create_dataframe(columns=columns)
|
|
245
|
+
|
|
246
|
+
rows = []
|
|
247
|
+
for part in definition.get("definition").get("parts"):
|
|
248
|
+
payload = part.get("payload")
|
|
249
|
+
if part.get("path") == "eventstream.json":
|
|
250
|
+
sources = payload.get("sources")
|
|
251
|
+
for s in sources:
|
|
252
|
+
rows.append(
|
|
253
|
+
{
|
|
254
|
+
"Source Id": s.get("id"),
|
|
255
|
+
"Source Name": s.get("name"),
|
|
256
|
+
"Source Type": s.get("type"),
|
|
257
|
+
}
|
|
258
|
+
)
|
|
259
|
+
|
|
260
|
+
if rows:
|
|
261
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
262
|
+
|
|
263
|
+
return df
|