semantic-link-labs 0.8.9__py3-none-any.whl → 0.8.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/METADATA +5 -2
- {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/RECORD +76 -75
- sempy_labs/__init__.py +14 -2
- sempy_labs/_authentication.py +31 -2
- sempy_labs/_clear_cache.py +39 -37
- sempy_labs/_connections.py +13 -13
- sempy_labs/_data_pipelines.py +20 -20
- sempy_labs/_dataflows.py +27 -28
- sempy_labs/_dax.py +41 -47
- sempy_labs/_environments.py +26 -23
- sempy_labs/_eventhouses.py +16 -15
- sempy_labs/_eventstreams.py +16 -15
- sempy_labs/_external_data_shares.py +18 -20
- sempy_labs/_gateways.py +57 -11
- sempy_labs/_generate_semantic_model.py +100 -71
- sempy_labs/_git.py +134 -67
- sempy_labs/_helper_functions.py +199 -145
- sempy_labs/_job_scheduler.py +92 -0
- sempy_labs/_kql_databases.py +16 -15
- sempy_labs/_kql_querysets.py +16 -15
- sempy_labs/_list_functions.py +281 -120
- sempy_labs/_managed_private_endpoints.py +19 -17
- sempy_labs/_mirrored_databases.py +51 -48
- sempy_labs/_mirrored_warehouses.py +5 -4
- sempy_labs/_ml_experiments.py +16 -15
- sempy_labs/_ml_models.py +15 -14
- sempy_labs/_model_bpa.py +27 -25
- sempy_labs/_model_bpa_bulk.py +3 -3
- sempy_labs/_model_dependencies.py +60 -28
- sempy_labs/_notebooks.py +73 -39
- sempy_labs/_one_lake_integration.py +23 -26
- sempy_labs/_query_scale_out.py +67 -64
- sempy_labs/_refresh_semantic_model.py +47 -42
- sempy_labs/_spark.py +33 -32
- sempy_labs/_sql.py +12 -9
- sempy_labs/_translations.py +10 -7
- sempy_labs/_vertipaq.py +34 -31
- sempy_labs/_warehouses.py +22 -21
- sempy_labs/_workspace_identity.py +11 -10
- sempy_labs/_workspaces.py +40 -33
- sempy_labs/admin/__init__.py +4 -0
- sempy_labs/admin/_basic_functions.py +44 -12
- sempy_labs/admin/_external_data_share.py +3 -3
- sempy_labs/admin/_items.py +4 -4
- sempy_labs/admin/_scanner.py +7 -5
- sempy_labs/directlake/_directlake_schema_compare.py +18 -14
- sempy_labs/directlake/_directlake_schema_sync.py +18 -12
- sempy_labs/directlake/_dl_helper.py +36 -32
- sempy_labs/directlake/_generate_shared_expression.py +10 -9
- sempy_labs/directlake/_get_directlake_lakehouse.py +16 -13
- sempy_labs/directlake/_get_shared_expression.py +4 -3
- sempy_labs/directlake/_guardrails.py +12 -6
- sempy_labs/directlake/_list_directlake_model_calc_tables.py +15 -9
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +16 -10
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +35 -31
- sempy_labs/directlake/_update_directlake_partition_entity.py +34 -31
- sempy_labs/directlake/_warm_cache.py +87 -65
- sempy_labs/lakehouse/_get_lakehouse_columns.py +10 -8
- sempy_labs/lakehouse/_get_lakehouse_tables.py +10 -9
- sempy_labs/lakehouse/_lakehouse.py +17 -13
- sempy_labs/lakehouse/_shortcuts.py +42 -23
- sempy_labs/migration/_create_pqt_file.py +16 -11
- sempy_labs/migration/_refresh_calc_tables.py +16 -10
- sempy_labs/report/_download_report.py +9 -8
- sempy_labs/report/_generate_report.py +40 -44
- sempy_labs/report/_paginated.py +9 -9
- sempy_labs/report/_report_bpa.py +13 -9
- sempy_labs/report/_report_functions.py +80 -91
- sempy_labs/report/_report_helper.py +8 -4
- sempy_labs/report/_report_list_functions.py +24 -13
- sempy_labs/report/_report_rebind.py +17 -16
- sempy_labs/report/_reportwrapper.py +41 -33
- sempy_labs/tom/_model.py +117 -38
- {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/WHEEL +0 -0
- {semantic_link_labs-0.8.9.dist-info → semantic_link_labs-0.8.11.dist-info}/top_level.txt +0 -0
sempy_labs/_environments.py
CHANGED
|
@@ -8,10 +8,13 @@ from sempy_labs._helper_functions import (
|
|
|
8
8
|
pagination,
|
|
9
9
|
)
|
|
10
10
|
from sempy.fabric.exceptions import FabricHTTPException
|
|
11
|
+
from uuid import UUID
|
|
11
12
|
|
|
12
13
|
|
|
13
14
|
def create_environment(
|
|
14
|
-
environment: str,
|
|
15
|
+
environment: str,
|
|
16
|
+
description: Optional[str] = None,
|
|
17
|
+
workspace: Optional[str | UUID] = None,
|
|
15
18
|
):
|
|
16
19
|
"""
|
|
17
20
|
Creates a Fabric environment.
|
|
@@ -24,13 +27,13 @@ def create_environment(
|
|
|
24
27
|
Name of the environment.
|
|
25
28
|
description : str, default=None
|
|
26
29
|
A description of the environment.
|
|
27
|
-
workspace : str, default=None
|
|
28
|
-
The Fabric workspace name.
|
|
30
|
+
workspace : str | uuid.UUID, default=None
|
|
31
|
+
The Fabric workspace name or ID.
|
|
29
32
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
30
33
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
31
34
|
"""
|
|
32
35
|
|
|
33
|
-
(
|
|
36
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
34
37
|
|
|
35
38
|
request_body = {"displayName": environment}
|
|
36
39
|
|
|
@@ -45,11 +48,11 @@ def create_environment(
|
|
|
45
48
|
lro(client, response, status_codes=[201, 202])
|
|
46
49
|
|
|
47
50
|
print(
|
|
48
|
-
f"{icons.green_dot} The '{environment}' environment has been created within the '{
|
|
51
|
+
f"{icons.green_dot} The '{environment}' environment has been created within the '{workspace_name}' workspace."
|
|
49
52
|
)
|
|
50
53
|
|
|
51
54
|
|
|
52
|
-
def list_environments(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
55
|
+
def list_environments(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
53
56
|
"""
|
|
54
57
|
Shows the environments within a workspace.
|
|
55
58
|
|
|
@@ -57,8 +60,8 @@ def list_environments(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
57
60
|
|
|
58
61
|
Parameters
|
|
59
62
|
----------
|
|
60
|
-
workspace : str, default=None
|
|
61
|
-
The Fabric workspace name.
|
|
63
|
+
workspace : str | uuid.UUID, default=None
|
|
64
|
+
The Fabric workspace name or ID.
|
|
62
65
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
63
66
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
64
67
|
|
|
@@ -70,7 +73,7 @@ def list_environments(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
70
73
|
|
|
71
74
|
df = pd.DataFrame(columns=["Environment Name", "Environment Id", "Description"])
|
|
72
75
|
|
|
73
|
-
(
|
|
76
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
74
77
|
|
|
75
78
|
client = fabric.FabricRestClient()
|
|
76
79
|
response = client.get(f"/v1/workspaces/{workspace_id}/environments")
|
|
@@ -91,7 +94,7 @@ def list_environments(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
91
94
|
return df
|
|
92
95
|
|
|
93
96
|
|
|
94
|
-
def delete_environment(environment: str, workspace: Optional[str] = None):
|
|
97
|
+
def delete_environment(environment: str, workspace: Optional[str | UUID] = None):
|
|
95
98
|
"""
|
|
96
99
|
Deletes a Fabric environment.
|
|
97
100
|
|
|
@@ -101,17 +104,17 @@ def delete_environment(environment: str, workspace: Optional[str] = None):
|
|
|
101
104
|
----------
|
|
102
105
|
environment: str
|
|
103
106
|
Name of the environment.
|
|
104
|
-
workspace : str, default=None
|
|
105
|
-
The Fabric workspace name.
|
|
107
|
+
workspace : str | uuid.UUID, default=None
|
|
108
|
+
The Fabric workspace name or ID.
|
|
106
109
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
107
110
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
108
111
|
"""
|
|
109
112
|
|
|
110
113
|
from sempy_labs._helper_functions import resolve_environment_id
|
|
111
114
|
|
|
112
|
-
(
|
|
115
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
113
116
|
environment_id = resolve_environment_id(
|
|
114
|
-
environment=environment, workspace=
|
|
117
|
+
environment=environment, workspace=workspace_id
|
|
115
118
|
)
|
|
116
119
|
|
|
117
120
|
client = fabric.FabricRestClient()
|
|
@@ -123,31 +126,31 @@ def delete_environment(environment: str, workspace: Optional[str] = None):
|
|
|
123
126
|
raise FabricHTTPException(response)
|
|
124
127
|
|
|
125
128
|
print(
|
|
126
|
-
f"{icons.green_dot} The '{environment}' environment within the '{
|
|
129
|
+
f"{icons.green_dot} The '{environment}' environment within the '{workspace_name}' workspace has been deleted."
|
|
127
130
|
)
|
|
128
131
|
|
|
129
132
|
|
|
130
|
-
def publish_environment(environment: str, workspace: Optional[str] = None):
|
|
133
|
+
def publish_environment(environment: str, workspace: Optional[str | UUID] = None):
|
|
131
134
|
"""
|
|
132
135
|
Publishes a Fabric environment.
|
|
133
136
|
|
|
137
|
+
This is a wrapper function for the following API: `Spark Libraries - Publish Environment <https://learn.microsoft.com/rest/api/fabric/environment/spark-libraries/publish-environment>`_.
|
|
138
|
+
|
|
134
139
|
Parameters
|
|
135
140
|
----------
|
|
136
141
|
environment: str
|
|
137
142
|
Name of the environment.
|
|
138
|
-
workspace : str, default=None
|
|
139
|
-
The Fabric workspace name.
|
|
143
|
+
workspace : str | uuid.UUID, default=None
|
|
144
|
+
The Fabric workspace name or ID.
|
|
140
145
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
141
146
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
142
147
|
"""
|
|
143
148
|
|
|
144
|
-
# https://learn.microsoft.com/en-us/rest/api/fabric/environment/spark-libraries/publish-environment?tabs=HTTP
|
|
145
|
-
|
|
146
149
|
from sempy_labs._helper_functions import resolve_environment_id
|
|
147
150
|
|
|
148
|
-
(
|
|
151
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
149
152
|
environment_id = resolve_environment_id(
|
|
150
|
-
environment=environment, workspace=
|
|
153
|
+
environment=environment, workspace=workspace_id
|
|
151
154
|
)
|
|
152
155
|
|
|
153
156
|
client = fabric.FabricRestClient()
|
|
@@ -158,5 +161,5 @@ def publish_environment(environment: str, workspace: Optional[str] = None):
|
|
|
158
161
|
lro(client, response)
|
|
159
162
|
|
|
160
163
|
print(
|
|
161
|
-
f"{icons.green_dot} The '{environment}' environment within the '{
|
|
164
|
+
f"{icons.green_dot} The '{environment}' environment within the '{workspace_name}' workspace has been published."
|
|
162
165
|
)
|
sempy_labs/_eventhouses.py
CHANGED
|
@@ -8,10 +8,11 @@ from sempy_labs._helper_functions import (
|
|
|
8
8
|
pagination,
|
|
9
9
|
)
|
|
10
10
|
from sempy.fabric.exceptions import FabricHTTPException
|
|
11
|
+
from uuid import UUID
|
|
11
12
|
|
|
12
13
|
|
|
13
14
|
def create_eventhouse(
|
|
14
|
-
name: str, description: Optional[str] = None, workspace: Optional[str] = None
|
|
15
|
+
name: str, description: Optional[str] = None, workspace: Optional[str | UUID] = None
|
|
15
16
|
):
|
|
16
17
|
"""
|
|
17
18
|
Creates a Fabric eventhouse.
|
|
@@ -24,13 +25,13 @@ def create_eventhouse(
|
|
|
24
25
|
Name of the eventhouse.
|
|
25
26
|
description : str, default=None
|
|
26
27
|
A description of the environment.
|
|
27
|
-
workspace : str, default=None
|
|
28
|
-
The Fabric workspace name.
|
|
28
|
+
workspace : str | uuid.UUID, default=None
|
|
29
|
+
The Fabric workspace name or ID.
|
|
29
30
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
30
31
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
31
32
|
"""
|
|
32
33
|
|
|
33
|
-
(
|
|
34
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
34
35
|
|
|
35
36
|
request_body = {"displayName": name}
|
|
36
37
|
|
|
@@ -45,11 +46,11 @@ def create_eventhouse(
|
|
|
45
46
|
lro(client, response, status_codes=[201, 202])
|
|
46
47
|
|
|
47
48
|
print(
|
|
48
|
-
f"{icons.green_dot} The '{name}' eventhouse has been created within the '{
|
|
49
|
+
f"{icons.green_dot} The '{name}' eventhouse has been created within the '{workspace_name}' workspace."
|
|
49
50
|
)
|
|
50
51
|
|
|
51
52
|
|
|
52
|
-
def list_eventhouses(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
53
|
+
def list_eventhouses(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
53
54
|
"""
|
|
54
55
|
Shows the eventhouses within a workspace.
|
|
55
56
|
|
|
@@ -57,8 +58,8 @@ def list_eventhouses(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
57
58
|
|
|
58
59
|
Parameters
|
|
59
60
|
----------
|
|
60
|
-
workspace : str, default=None
|
|
61
|
-
The Fabric workspace name.
|
|
61
|
+
workspace : str | uuid.UUID, default=None
|
|
62
|
+
The Fabric workspace name or ID.
|
|
62
63
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
63
64
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
64
65
|
|
|
@@ -70,7 +71,7 @@ def list_eventhouses(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
70
71
|
|
|
71
72
|
df = pd.DataFrame(columns=["Eventhouse Name", "Eventhouse Id", "Description"])
|
|
72
73
|
|
|
73
|
-
(
|
|
74
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
74
75
|
|
|
75
76
|
client = fabric.FabricRestClient()
|
|
76
77
|
response = client.get(f"/v1/workspaces/{workspace_id}/eventhouses")
|
|
@@ -91,7 +92,7 @@ def list_eventhouses(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
91
92
|
return df
|
|
92
93
|
|
|
93
94
|
|
|
94
|
-
def delete_eventhouse(name: str, workspace: Optional[str] = None):
|
|
95
|
+
def delete_eventhouse(name: str, workspace: Optional[str | UUID] = None):
|
|
95
96
|
"""
|
|
96
97
|
Deletes a Fabric eventhouse.
|
|
97
98
|
|
|
@@ -101,16 +102,16 @@ def delete_eventhouse(name: str, workspace: Optional[str] = None):
|
|
|
101
102
|
----------
|
|
102
103
|
name: str
|
|
103
104
|
Name of the eventhouse.
|
|
104
|
-
workspace : str, default=None
|
|
105
|
-
The Fabric workspace name.
|
|
105
|
+
workspace : str | uuid.UUID, default=None
|
|
106
|
+
The Fabric workspace name or ID.
|
|
106
107
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
107
108
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
108
109
|
"""
|
|
109
110
|
|
|
110
|
-
(
|
|
111
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
111
112
|
|
|
112
113
|
item_id = fabric.resolve_item_id(
|
|
113
|
-
item_name=name, type="Eventhouse", workspace=
|
|
114
|
+
item_name=name, type="Eventhouse", workspace=workspace_id
|
|
114
115
|
)
|
|
115
116
|
|
|
116
117
|
client = fabric.FabricRestClient()
|
|
@@ -120,5 +121,5 @@ def delete_eventhouse(name: str, workspace: Optional[str] = None):
|
|
|
120
121
|
raise FabricHTTPException(response)
|
|
121
122
|
|
|
122
123
|
print(
|
|
123
|
-
f"{icons.green_dot} The '{name}' eventhouse within the '{
|
|
124
|
+
f"{icons.green_dot} The '{name}' eventhouse within the '{workspace_name}' workspace has been deleted."
|
|
124
125
|
)
|
sempy_labs/_eventstreams.py
CHANGED
|
@@ -8,9 +8,10 @@ from sempy_labs._helper_functions import (
|
|
|
8
8
|
pagination,
|
|
9
9
|
)
|
|
10
10
|
from sempy.fabric.exceptions import FabricHTTPException
|
|
11
|
+
from uuid import UUID
|
|
11
12
|
|
|
12
13
|
|
|
13
|
-
def list_eventstreams(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
14
|
+
def list_eventstreams(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
14
15
|
"""
|
|
15
16
|
Shows the eventstreams within a workspace.
|
|
16
17
|
|
|
@@ -18,8 +19,8 @@ def list_eventstreams(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
18
19
|
|
|
19
20
|
Parameters
|
|
20
21
|
----------
|
|
21
|
-
workspace : str, default=None
|
|
22
|
-
The Fabric workspace name.
|
|
22
|
+
workspace : str | uuid.UUID, default=None
|
|
23
|
+
The Fabric workspace name or ID.
|
|
23
24
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
24
25
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
25
26
|
|
|
@@ -31,7 +32,7 @@ def list_eventstreams(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
31
32
|
|
|
32
33
|
df = pd.DataFrame(columns=["Eventstream Name", "Eventstream Id", "Description"])
|
|
33
34
|
|
|
34
|
-
(
|
|
35
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
35
36
|
|
|
36
37
|
client = fabric.FabricRestClient()
|
|
37
38
|
response = client.get(f"/v1/workspaces/{workspace_id}/eventstreams")
|
|
@@ -53,7 +54,7 @@ def list_eventstreams(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
53
54
|
|
|
54
55
|
|
|
55
56
|
def create_eventstream(
|
|
56
|
-
name: str, description: Optional[str] = None, workspace: Optional[str] = None
|
|
57
|
+
name: str, description: Optional[str] = None, workspace: Optional[str | UUID] = None
|
|
57
58
|
):
|
|
58
59
|
"""
|
|
59
60
|
Creates a Fabric eventstream.
|
|
@@ -66,13 +67,13 @@ def create_eventstream(
|
|
|
66
67
|
Name of the eventstream.
|
|
67
68
|
description : str, default=None
|
|
68
69
|
A description of the environment.
|
|
69
|
-
workspace : str, default=None
|
|
70
|
-
The Fabric workspace name.
|
|
70
|
+
workspace : str | uuid.UUID, default=None
|
|
71
|
+
The Fabric workspace name or ID.
|
|
71
72
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
72
73
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
73
74
|
"""
|
|
74
75
|
|
|
75
|
-
(
|
|
76
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
76
77
|
|
|
77
78
|
request_body = {"displayName": name}
|
|
78
79
|
|
|
@@ -87,11 +88,11 @@ def create_eventstream(
|
|
|
87
88
|
lro(client, response, status_codes=[201, 202])
|
|
88
89
|
|
|
89
90
|
print(
|
|
90
|
-
f"{icons.green_dot} The '{name}' eventstream has been created within the '{
|
|
91
|
+
f"{icons.green_dot} The '{name}' eventstream has been created within the '{workspace_name}' workspace."
|
|
91
92
|
)
|
|
92
93
|
|
|
93
94
|
|
|
94
|
-
def delete_eventstream(name: str, workspace: Optional[str] = None):
|
|
95
|
+
def delete_eventstream(name: str, workspace: Optional[str | UUID] = None):
|
|
95
96
|
"""
|
|
96
97
|
Deletes a Fabric eventstream.
|
|
97
98
|
|
|
@@ -101,16 +102,16 @@ def delete_eventstream(name: str, workspace: Optional[str] = None):
|
|
|
101
102
|
----------
|
|
102
103
|
name: str
|
|
103
104
|
Name of the eventstream.
|
|
104
|
-
workspace : str, default=None
|
|
105
|
-
The Fabric workspace name.
|
|
105
|
+
workspace : str | uuid.UUID, default=None
|
|
106
|
+
The Fabric workspace name or ID.
|
|
106
107
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
107
108
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
108
109
|
"""
|
|
109
110
|
|
|
110
|
-
(
|
|
111
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
111
112
|
|
|
112
113
|
item_id = fabric.resolve_item_id(
|
|
113
|
-
item_name=name, type="Eventstream", workspace=
|
|
114
|
+
item_name=name, type="Eventstream", workspace=workspace_id
|
|
114
115
|
)
|
|
115
116
|
|
|
116
117
|
client = fabric.FabricRestClient()
|
|
@@ -120,5 +121,5 @@ def delete_eventstream(name: str, workspace: Optional[str] = None):
|
|
|
120
121
|
raise FabricHTTPException(response)
|
|
121
122
|
|
|
122
123
|
print(
|
|
123
|
-
f"{icons.green_dot} The '{name}' eventstream within the '{
|
|
124
|
+
f"{icons.green_dot} The '{name}' eventstream within the '{workspace_name}' workspace has been deleted."
|
|
124
125
|
)
|
|
@@ -15,7 +15,7 @@ def create_external_data_share(
|
|
|
15
15
|
item_type: str,
|
|
16
16
|
paths: str | List[str],
|
|
17
17
|
recipient: str,
|
|
18
|
-
workspace: Optional[str] = None,
|
|
18
|
+
workspace: Optional[str | UUID] = None,
|
|
19
19
|
):
|
|
20
20
|
"""
|
|
21
21
|
Creates an external data share for a given path or list of paths in the specified item.
|
|
@@ -32,17 +32,15 @@ def create_external_data_share(
|
|
|
32
32
|
The path or list of paths that are to be externally shared. Currently, only a single path is supported.
|
|
33
33
|
recipient : str
|
|
34
34
|
The email address of the recipient.
|
|
35
|
-
workspace : str, default=None
|
|
36
|
-
The Fabric workspace name.
|
|
35
|
+
workspace : str | uuid.UUID, default=None
|
|
36
|
+
The Fabric workspace name or ID.
|
|
37
37
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
38
38
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
39
39
|
"""
|
|
40
40
|
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
41
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
44
42
|
item_id = fabric.resolve_item_id(
|
|
45
|
-
item_name=item_name, type=item_type, workspace=
|
|
43
|
+
item_name=item_name, type=item_type, workspace=workspace_id
|
|
46
44
|
)
|
|
47
45
|
|
|
48
46
|
if isinstance(paths, str):
|
|
@@ -60,7 +58,7 @@ def create_external_data_share(
|
|
|
60
58
|
raise FabricHTTPException(response)
|
|
61
59
|
|
|
62
60
|
print(
|
|
63
|
-
f"{icons.green_dot} An external data share was created for the '{item_name}' {item_type} within the '{
|
|
61
|
+
f"{icons.green_dot} An external data share was created for the '{item_name}' {item_type} within the '{workspace_name}' workspace for the {paths} paths."
|
|
64
62
|
)
|
|
65
63
|
|
|
66
64
|
|
|
@@ -68,7 +66,7 @@ def revoke_external_data_share(
|
|
|
68
66
|
external_data_share_id: UUID,
|
|
69
67
|
item_name: str,
|
|
70
68
|
item_type: str,
|
|
71
|
-
workspace: Optional[str] = None,
|
|
69
|
+
workspace: Optional[str | UUID] = None,
|
|
72
70
|
):
|
|
73
71
|
"""
|
|
74
72
|
Revokes the specified external data share. Note: This action cannot be undone.
|
|
@@ -77,21 +75,21 @@ def revoke_external_data_share(
|
|
|
77
75
|
|
|
78
76
|
Parameters
|
|
79
77
|
----------
|
|
80
|
-
external_data_share_id : UUID
|
|
78
|
+
external_data_share_id : uuid.UUID
|
|
81
79
|
The external data share ID.
|
|
82
80
|
item_name : str
|
|
83
81
|
The item name.
|
|
84
82
|
item_type : str
|
|
85
83
|
The `item type <https://learn.microsoft.com/rest/api/fabric/core/items/list-items?tabs=HTTP#itemtype>`_.
|
|
86
|
-
workspace : str, default=None
|
|
87
|
-
The Fabric workspace name.
|
|
84
|
+
workspace : str | uuid.UUID, default=None
|
|
85
|
+
The Fabric workspace name or ID.
|
|
88
86
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
89
87
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
90
88
|
"""
|
|
91
89
|
|
|
92
|
-
(
|
|
90
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
93
91
|
item_id = fabric.resolve_item_id(
|
|
94
|
-
item_name=item_name, type=item_type, workspace=
|
|
92
|
+
item_name=item_name, type=item_type, workspace=workspace_id
|
|
95
93
|
)
|
|
96
94
|
|
|
97
95
|
client = fabric.FabricRestClient()
|
|
@@ -103,12 +101,12 @@ def revoke_external_data_share(
|
|
|
103
101
|
raise FabricHTTPException(response)
|
|
104
102
|
|
|
105
103
|
print(
|
|
106
|
-
f"{icons.green_dot} The '{external_data_share_id}' external data share for the '{item_name}' {item_type} within the '{
|
|
104
|
+
f"{icons.green_dot} The '{external_data_share_id}' external data share for the '{item_name}' {item_type} within the '{workspace_name}' workspace has been revoked."
|
|
107
105
|
)
|
|
108
106
|
|
|
109
107
|
|
|
110
108
|
def list_external_data_shares_in_item(
|
|
111
|
-
item_name: str, item_type: str, workspace: Optional[str] = None
|
|
109
|
+
item_name: str, item_type: str, workspace: Optional[str | UUID] = None
|
|
112
110
|
) -> pd.DataFrame:
|
|
113
111
|
"""
|
|
114
112
|
Returns a list of the external data shares that exist for the specified item.
|
|
@@ -121,8 +119,8 @@ def list_external_data_shares_in_item(
|
|
|
121
119
|
The item name.
|
|
122
120
|
item_type : str
|
|
123
121
|
The `item type <https://learn.microsoft.com/rest/api/fabric/core/items/list-items?tabs=HTTP#itemtype>`_.
|
|
124
|
-
workspace : str, default=None
|
|
125
|
-
The Fabric workspace name.
|
|
122
|
+
workspace : str | uuid.UUID, default=None
|
|
123
|
+
The Fabric workspace name or ID.
|
|
126
124
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
127
125
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
128
126
|
|
|
@@ -132,9 +130,9 @@ def list_external_data_shares_in_item(
|
|
|
132
130
|
A pandas dataframe showing a list of the external data shares that exist for the specified item.
|
|
133
131
|
"""
|
|
134
132
|
|
|
135
|
-
(
|
|
133
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
136
134
|
item_id = fabric.resolve_item_id(
|
|
137
|
-
item_name=item_name, type=item_type, workspace=
|
|
135
|
+
item_name=item_name, type=item_type, workspace=workspace_id
|
|
138
136
|
)
|
|
139
137
|
|
|
140
138
|
client = fabric.FabricRestClient()
|
sempy_labs/_gateways.py
CHANGED
|
@@ -6,6 +6,8 @@ from sempy_labs._helper_functions import (
|
|
|
6
6
|
pagination,
|
|
7
7
|
_is_valid_uuid,
|
|
8
8
|
resolve_capacity_id,
|
|
9
|
+
resolve_workspace_name_and_id,
|
|
10
|
+
resolve_dataset_name_and_id,
|
|
9
11
|
)
|
|
10
12
|
from uuid import UUID
|
|
11
13
|
import sempy_labs._icons as icons
|
|
@@ -93,7 +95,7 @@ def delete_gateway(gateway: str | UUID):
|
|
|
93
95
|
|
|
94
96
|
Parameters
|
|
95
97
|
----------
|
|
96
|
-
gateway : str | UUID
|
|
98
|
+
gateway : str | uuid.UUID
|
|
97
99
|
The gateway name or ID.
|
|
98
100
|
"""
|
|
99
101
|
|
|
@@ -115,7 +117,7 @@ def list_gateway_role_assigments(gateway: str | UUID) -> pd.DataFrame:
|
|
|
115
117
|
|
|
116
118
|
Parameters
|
|
117
119
|
----------
|
|
118
|
-
gateway : str | UUID
|
|
120
|
+
gateway : str | uuid.UUID
|
|
119
121
|
The gateway name or ID.
|
|
120
122
|
|
|
121
123
|
Returns
|
|
@@ -157,9 +159,9 @@ def delete_gateway_role_assignment(gateway: str | UUID, role_assignement_id: UUI
|
|
|
157
159
|
|
|
158
160
|
Parameters
|
|
159
161
|
----------
|
|
160
|
-
gateway : str | UUID
|
|
162
|
+
gateway : str | uuid.UUID
|
|
161
163
|
The gateway name or ID.
|
|
162
|
-
role_assignement_id : UUID
|
|
164
|
+
role_assignement_id : uuid.UUID
|
|
163
165
|
The role assignment ID.
|
|
164
166
|
"""
|
|
165
167
|
|
|
@@ -202,9 +204,9 @@ def delete_gateway_member(gateway: str | UUID, gateway_member: str | UUID):
|
|
|
202
204
|
|
|
203
205
|
Parameters
|
|
204
206
|
----------
|
|
205
|
-
gateway : str | UUID
|
|
207
|
+
gateway : str | uuid.UUID
|
|
206
208
|
The gateway name or ID.
|
|
207
|
-
gateway_member : str | UUID
|
|
209
|
+
gateway_member : str | uuid.UUID
|
|
208
210
|
The gateway member name or ID.
|
|
209
211
|
"""
|
|
210
212
|
|
|
@@ -232,7 +234,7 @@ def list_gateway_members(gateway: str | UUID) -> pd.DataFrame:
|
|
|
232
234
|
|
|
233
235
|
Parameters
|
|
234
236
|
----------
|
|
235
|
-
gateway : str | UUID
|
|
237
|
+
gateway : str | uuid.UUID
|
|
236
238
|
The gateway name or ID.
|
|
237
239
|
|
|
238
240
|
Returns
|
|
@@ -296,7 +298,7 @@ def create_vnet_gateway(
|
|
|
296
298
|
----------
|
|
297
299
|
name : str
|
|
298
300
|
The gateway name.
|
|
299
|
-
capacity : str | UUID
|
|
301
|
+
capacity : str | uuid.UUID
|
|
300
302
|
The capacity name or Id.
|
|
301
303
|
inactivity_minutes_before_sleep : int
|
|
302
304
|
The minutes of inactivity before the virtual network gateway goes into auto-sleep. Must be one of the following values: 30, 60, 90, 120, 150, 240, 360, 480, 720, 1440.
|
|
@@ -351,7 +353,7 @@ def update_on_premises_gateway(
|
|
|
351
353
|
|
|
352
354
|
Parameters
|
|
353
355
|
----------
|
|
354
|
-
gateway : str | UUID
|
|
356
|
+
gateway : str | uuid.UUID
|
|
355
357
|
The gateway name or ID.
|
|
356
358
|
allow_cloud_connection_refresh : bool, default=None
|
|
357
359
|
Whether to allow cloud connections to refresh through this on-premises gateway. True - Allow, False - Do not allow.
|
|
@@ -401,9 +403,9 @@ def update_vnet_gateway(
|
|
|
401
403
|
|
|
402
404
|
Parameters
|
|
403
405
|
----------
|
|
404
|
-
gateway : str | UUID
|
|
406
|
+
gateway : str | uuid.UUID
|
|
405
407
|
The gateway name or ID.
|
|
406
|
-
capacity: str | UUID
|
|
408
|
+
capacity: str | uuid.UUID
|
|
407
409
|
The capacity name or ID.
|
|
408
410
|
inactivity_minutes_before_sleep : int, default=None
|
|
409
411
|
The minutes of inactivity before the virtual network gateway goes into auto-sleep. Must be one of the following values: 30, 60, 90, 120, 150, 240, 360, 480, 720, 1440.
|
|
@@ -437,3 +439,47 @@ def update_vnet_gateway(
|
|
|
437
439
|
raise FabricHTTPException(response)
|
|
438
440
|
|
|
439
441
|
print(f"{icons.green_dot} The '{gateway}' has been updated accordingly.")
|
|
442
|
+
|
|
443
|
+
|
|
444
|
+
def bind_semantic_model_to_gateway(
|
|
445
|
+
dataset: str | UUID, gateway: str | UUID, workspace: Optional[str | UUID] = None
|
|
446
|
+
):
|
|
447
|
+
"""
|
|
448
|
+
Binds the specified dataset from the specified workspace to the specified gateway.
|
|
449
|
+
|
|
450
|
+
This is a wrapper function for the following API: `Datasets - Bind To Gateway In Group <https://learn.microsoft.com/rest/api/power-bi/datasets/bind-to-gateway-in-group>`_.
|
|
451
|
+
|
|
452
|
+
Parameters
|
|
453
|
+
----------
|
|
454
|
+
dataset : str | uuid.UUID
|
|
455
|
+
The name or ID of the semantic model.
|
|
456
|
+
gateway : str | uuid.UUID
|
|
457
|
+
The name or ID of the gateway.
|
|
458
|
+
workspace : str | uuid.UUID, default=None
|
|
459
|
+
The Fabric workspace name.
|
|
460
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
461
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
462
|
+
"""
|
|
463
|
+
|
|
464
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
465
|
+
(dataset_name, dataset_id) = resolve_dataset_name_and_id(
|
|
466
|
+
dataset, workspace=workspace_id
|
|
467
|
+
)
|
|
468
|
+
|
|
469
|
+
gateway_id = _resolve_gateway_id(gateway)
|
|
470
|
+
payload = {
|
|
471
|
+
"gatewayObjectId": gateway_id,
|
|
472
|
+
}
|
|
473
|
+
|
|
474
|
+
client = fabric.FabricRestClient()
|
|
475
|
+
response = client.post(
|
|
476
|
+
f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}/Default.BindToGateway",
|
|
477
|
+
json=payload,
|
|
478
|
+
)
|
|
479
|
+
|
|
480
|
+
if response.status_code != 200:
|
|
481
|
+
raise FabricHTTPException(response)
|
|
482
|
+
|
|
483
|
+
print(
|
|
484
|
+
f"{icons.green_dot} The '{dataset_name}' semantic model within the '{workspace_name}' workspace has been binded to the '{gateway_id}' gateway."
|
|
485
|
+
)
|