semantic-link-labs 0.9.4__py3-none-any.whl → 0.9.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.5.dist-info}/METADATA +18 -2
- {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.5.dist-info}/RECORD +49 -43
- sempy_labs/__init__.py +18 -3
- sempy_labs/_capacities.py +22 -127
- sempy_labs/_capacity_migration.py +8 -7
- sempy_labs/_dashboards.py +60 -0
- sempy_labs/_data_pipelines.py +5 -31
- sempy_labs/_environments.py +20 -48
- sempy_labs/_eventhouses.py +22 -52
- sempy_labs/_eventstreams.py +16 -34
- sempy_labs/_gateways.py +4 -4
- sempy_labs/_generate_semantic_model.py +0 -1
- sempy_labs/_git.py +90 -1
- sempy_labs/_graphQL.py +3 -20
- sempy_labs/_helper_functions.py +171 -43
- sempy_labs/_kql_databases.py +19 -34
- sempy_labs/_kql_querysets.py +15 -32
- sempy_labs/_list_functions.py +12 -155
- sempy_labs/_mirrored_databases.py +14 -48
- sempy_labs/_ml_experiments.py +5 -30
- sempy_labs/_ml_models.py +4 -28
- sempy_labs/_model_bpa.py +2 -0
- sempy_labs/_mounted_data_factories.py +119 -0
- sempy_labs/_notebooks.py +16 -26
- sempy_labs/_sql.py +7 -6
- sempy_labs/_utils.py +42 -0
- sempy_labs/_vertipaq.py +17 -2
- sempy_labs/_warehouses.py +5 -17
- sempy_labs/_workloads.py +23 -9
- sempy_labs/_workspaces.py +13 -5
- sempy_labs/admin/__init__.py +21 -1
- sempy_labs/admin/_apps.py +1 -1
- sempy_labs/admin/_artifacts.py +62 -0
- sempy_labs/admin/_basic_functions.py +0 -52
- sempy_labs/admin/_capacities.py +61 -0
- sempy_labs/admin/_reports.py +74 -0
- sempy_labs/admin/_shared.py +4 -2
- sempy_labs/admin/_users.py +133 -0
- sempy_labs/admin/_workspaces.py +148 -0
- sempy_labs/directlake/_update_directlake_partition_entity.py +9 -1
- sempy_labs/lakehouse/__init__.py +2 -0
- sempy_labs/lakehouse/_lakehouse.py +6 -7
- sempy_labs/lakehouse/_shortcuts.py +192 -53
- sempy_labs/report/_generate_report.py +9 -17
- sempy_labs/report/_report_bpa.py +12 -19
- sempy_labs/tom/_model.py +34 -16
- {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.5.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.5.dist-info}/WHEEL +0 -0
- {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.5.dist-info}/top_level.txt +0 -0
sempy_labs/_data_pipelines.py
CHANGED
|
@@ -1,13 +1,13 @@
|
|
|
1
|
-
import sempy.fabric as fabric
|
|
2
1
|
import pandas as pd
|
|
3
2
|
from typing import Optional
|
|
4
3
|
from sempy_labs._helper_functions import (
|
|
5
4
|
resolve_workspace_name_and_id,
|
|
6
5
|
_decode_b64,
|
|
7
6
|
_base_api,
|
|
8
|
-
_print_success,
|
|
9
7
|
resolve_item_id,
|
|
10
8
|
_create_dataframe,
|
|
9
|
+
delete_item,
|
|
10
|
+
create_item,
|
|
11
11
|
)
|
|
12
12
|
from uuid import UUID
|
|
13
13
|
|
|
@@ -76,25 +76,8 @@ def create_data_pipeline(
|
|
|
76
76
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
77
77
|
"""
|
|
78
78
|
|
|
79
|
-
(
|
|
80
|
-
|
|
81
|
-
payload = {"displayName": name}
|
|
82
|
-
|
|
83
|
-
if description:
|
|
84
|
-
payload["description"] = description
|
|
85
|
-
|
|
86
|
-
_base_api(
|
|
87
|
-
request=f"/v1/workspaces/{workspace_id}/dataPipelines",
|
|
88
|
-
method="post",
|
|
89
|
-
payload=payload,
|
|
90
|
-
status_codes=[201, 202],
|
|
91
|
-
lro_return_status_code=True,
|
|
92
|
-
)
|
|
93
|
-
_print_success(
|
|
94
|
-
item_name=name,
|
|
95
|
-
item_type="data pipeline",
|
|
96
|
-
workspace_name=workspace_name,
|
|
97
|
-
action="created",
|
|
79
|
+
create_item(
|
|
80
|
+
name=name, description=description, type="DataPipeline", workspace=workspace
|
|
98
81
|
)
|
|
99
82
|
|
|
100
83
|
|
|
@@ -114,16 +97,7 @@ def delete_data_pipeline(name: str | UUID, workspace: Optional[str | UUID] = Non
|
|
|
114
97
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
115
98
|
"""
|
|
116
99
|
|
|
117
|
-
(
|
|
118
|
-
item_id = resolve_item_id(item=name, type="DataPipeline", workspace=workspace)
|
|
119
|
-
|
|
120
|
-
fabric.delete_item(item_id=item_id, workspace=workspace)
|
|
121
|
-
_print_success(
|
|
122
|
-
item_name=name,
|
|
123
|
-
item_type="data pipeline",
|
|
124
|
-
workspace_name=workspace_name,
|
|
125
|
-
action="deleted",
|
|
126
|
-
)
|
|
100
|
+
delete_item(item=name, type="DataPipeline", workspace=workspace)
|
|
127
101
|
|
|
128
102
|
|
|
129
103
|
def get_data_pipeline_definition(
|
sempy_labs/_environments.py
CHANGED
|
@@ -4,8 +4,10 @@ from typing import Optional
|
|
|
4
4
|
from sempy_labs._helper_functions import (
|
|
5
5
|
resolve_workspace_name_and_id,
|
|
6
6
|
_base_api,
|
|
7
|
-
_print_success,
|
|
8
7
|
_create_dataframe,
|
|
8
|
+
resolve_item_id,
|
|
9
|
+
delete_item,
|
|
10
|
+
create_item,
|
|
9
11
|
)
|
|
10
12
|
from uuid import UUID
|
|
11
13
|
|
|
@@ -32,25 +34,11 @@ def create_environment(
|
|
|
32
34
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
33
35
|
"""
|
|
34
36
|
|
|
35
|
-
(
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
payload["description"] = description
|
|
41
|
-
|
|
42
|
-
_base_api(
|
|
43
|
-
request="/v1/workspaces/{workspace_id}/environments",
|
|
44
|
-
method="post",
|
|
45
|
-
payload=payload,
|
|
46
|
-
status_codes=[201, 202],
|
|
47
|
-
lro_return_status_code=True,
|
|
48
|
-
)
|
|
49
|
-
_print_success(
|
|
50
|
-
item_name=environment,
|
|
51
|
-
item_type="environment",
|
|
52
|
-
workspace_name=workspace_name,
|
|
53
|
-
action="created",
|
|
37
|
+
create_item(
|
|
38
|
+
name=environment,
|
|
39
|
+
description=description,
|
|
40
|
+
type="Environment",
|
|
41
|
+
workspace=workspace,
|
|
54
42
|
)
|
|
55
43
|
|
|
56
44
|
|
|
@@ -98,7 +86,7 @@ def list_environments(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
|
98
86
|
return df
|
|
99
87
|
|
|
100
88
|
|
|
101
|
-
def delete_environment(environment: str, workspace: Optional[str | UUID] = None):
|
|
89
|
+
def delete_environment(environment: str | UUID, workspace: Optional[str | UUID] = None):
|
|
102
90
|
"""
|
|
103
91
|
Deletes a Fabric environment.
|
|
104
92
|
|
|
@@ -106,34 +94,20 @@ def delete_environment(environment: str, workspace: Optional[str | UUID] = None)
|
|
|
106
94
|
|
|
107
95
|
Parameters
|
|
108
96
|
----------
|
|
109
|
-
environment: str
|
|
110
|
-
Name of the environment.
|
|
97
|
+
environment: str | uuid.UUID
|
|
98
|
+
Name or ID of the environment.
|
|
111
99
|
workspace : str | uuid.UUID, default=None
|
|
112
100
|
The Fabric workspace name or ID.
|
|
113
101
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
114
102
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
115
103
|
"""
|
|
116
104
|
|
|
117
|
-
|
|
105
|
+
delete_item(item=environment, type="Environment", workspace=workspace)
|
|
118
106
|
|
|
119
|
-
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
120
|
-
environment_id = resolve_environment_id(
|
|
121
|
-
environment=environment, workspace=workspace_id
|
|
122
|
-
)
|
|
123
|
-
|
|
124
|
-
_base_api(
|
|
125
|
-
request=f"/v1/workspaces/{workspace_id}/environments/{environment_id}",
|
|
126
|
-
method="delete",
|
|
127
|
-
)
|
|
128
|
-
_print_success(
|
|
129
|
-
item_name=environment,
|
|
130
|
-
item_type="environment",
|
|
131
|
-
workspace_name=workspace_name,
|
|
132
|
-
action="deleted",
|
|
133
|
-
)
|
|
134
107
|
|
|
135
|
-
|
|
136
|
-
|
|
108
|
+
def publish_environment(
|
|
109
|
+
environment: str | UUID, workspace: Optional[str | UUID] = None
|
|
110
|
+
):
|
|
137
111
|
"""
|
|
138
112
|
Publishes a Fabric environment.
|
|
139
113
|
|
|
@@ -141,23 +115,21 @@ def publish_environment(environment: str, workspace: Optional[str | UUID] = None
|
|
|
141
115
|
|
|
142
116
|
Parameters
|
|
143
117
|
----------
|
|
144
|
-
environment: str
|
|
145
|
-
Name of the environment.
|
|
118
|
+
environment: str | uuid.UUID
|
|
119
|
+
Name or ID of the environment.
|
|
146
120
|
workspace : str | uuid.UUID, default=None
|
|
147
121
|
The Fabric workspace name or ID.
|
|
148
122
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
149
123
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
150
124
|
"""
|
|
151
125
|
|
|
152
|
-
from sempy_labs._helper_functions import resolve_environment_id
|
|
153
|
-
|
|
154
126
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
155
|
-
|
|
156
|
-
|
|
127
|
+
item_id = resolve_item_id(
|
|
128
|
+
item=environment, type="Environment", workspace=workspace_id
|
|
157
129
|
)
|
|
158
130
|
|
|
159
131
|
_base_api(
|
|
160
|
-
request=f"/v1/workspaces/{workspace_id}/environments/{
|
|
132
|
+
request=f"/v1/workspaces/{workspace_id}/environments/{item_id}/staging/publish",
|
|
161
133
|
method="post",
|
|
162
134
|
lro_return_status_code=True,
|
|
163
135
|
status_codes=None,
|
sempy_labs/_eventhouses.py
CHANGED
|
@@ -1,14 +1,15 @@
|
|
|
1
|
-
import sempy.fabric as fabric
|
|
2
1
|
import pandas as pd
|
|
3
2
|
from typing import Optional
|
|
4
3
|
from sempy_labs._helper_functions import (
|
|
5
4
|
resolve_workspace_name_and_id,
|
|
6
5
|
_base_api,
|
|
7
|
-
_print_success,
|
|
8
6
|
resolve_item_id,
|
|
9
7
|
_create_dataframe,
|
|
10
8
|
_conv_b64,
|
|
11
9
|
_decode_b64,
|
|
10
|
+
delete_item,
|
|
11
|
+
create_item,
|
|
12
|
+
get_item_definition,
|
|
12
13
|
)
|
|
13
14
|
from uuid import UUID
|
|
14
15
|
import sempy_labs._icons as icons
|
|
@@ -39,18 +40,11 @@ def create_eventhouse(
|
|
|
39
40
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
40
41
|
"""
|
|
41
42
|
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
payload = {"displayName": name}
|
|
45
|
-
|
|
46
|
-
if description:
|
|
47
|
-
payload["description"] = description
|
|
43
|
+
if definition is not None and not isinstance(definition, dict):
|
|
44
|
+
raise ValueError(f"{icons.red_dot} The definition must be a dictionary.")
|
|
48
45
|
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
raise ValueError(f"{icons.red_dot} The definition must be a dictionary.")
|
|
52
|
-
|
|
53
|
-
payload["definition"] = {
|
|
46
|
+
definition_payload = (
|
|
47
|
+
{
|
|
54
48
|
"parts": [
|
|
55
49
|
{
|
|
56
50
|
"path": "EventhouseProperties.json",
|
|
@@ -59,19 +53,16 @@ def create_eventhouse(
|
|
|
59
53
|
}
|
|
60
54
|
]
|
|
61
55
|
}
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
request=f"/v1/workspaces/{workspace_id}/eventhouses",
|
|
65
|
-
method="post",
|
|
66
|
-
status_codes=[201, 202],
|
|
67
|
-
payload=payload,
|
|
68
|
-
lro_return_status_code=True,
|
|
56
|
+
if definition is not None
|
|
57
|
+
else None
|
|
69
58
|
)
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
59
|
+
|
|
60
|
+
create_item(
|
|
61
|
+
name=name,
|
|
62
|
+
type="Eventhouse",
|
|
63
|
+
workspace=workspace,
|
|
64
|
+
description=description,
|
|
65
|
+
definition=definition_payload,
|
|
75
66
|
)
|
|
76
67
|
|
|
77
68
|
|
|
@@ -135,16 +126,7 @@ def delete_eventhouse(name: str, workspace: Optional[str | UUID] = None):
|
|
|
135
126
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
136
127
|
"""
|
|
137
128
|
|
|
138
|
-
(
|
|
139
|
-
item_id = resolve_item_id(item=name, type="Eventhouse", workspace=workspace)
|
|
140
|
-
|
|
141
|
-
fabric.delete_item(item_id=item_id, workspace=workspace)
|
|
142
|
-
_print_success(
|
|
143
|
-
item_name=name,
|
|
144
|
-
item_type="eventhouse",
|
|
145
|
-
workspace_name=workspace_name,
|
|
146
|
-
action="deleted",
|
|
147
|
-
)
|
|
129
|
+
delete_item(item=name, type="Eventhouse", workspace=workspace)
|
|
148
130
|
|
|
149
131
|
|
|
150
132
|
def get_eventhouse_definition(
|
|
@@ -174,21 +156,9 @@ def get_eventhouse_definition(
|
|
|
174
156
|
The eventhouse definition in .json format or as a pandas dataframe.
|
|
175
157
|
"""
|
|
176
158
|
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
method="post",
|
|
183
|
-
status_codes=None,
|
|
184
|
-
lro_return_json=True,
|
|
159
|
+
return get_item_definition(
|
|
160
|
+
item=eventhouse,
|
|
161
|
+
type="Eventhouse",
|
|
162
|
+
workspace=workspace,
|
|
163
|
+
return_dataframe=return_dataframe,
|
|
185
164
|
)
|
|
186
|
-
|
|
187
|
-
df = pd.json_normalize(result["definition"]["parts"])
|
|
188
|
-
|
|
189
|
-
if return_dataframe:
|
|
190
|
-
return df
|
|
191
|
-
else:
|
|
192
|
-
df_filt = df[df["path"] == "EventhouseProperties.json"]
|
|
193
|
-
payload = df_filt["payload"].iloc[0]
|
|
194
|
-
return _decode_b64(payload)
|
sempy_labs/_eventstreams.py
CHANGED
|
@@ -1,14 +1,14 @@
|
|
|
1
|
-
import sempy.fabric as fabric
|
|
2
1
|
import pandas as pd
|
|
3
2
|
from typing import Optional
|
|
4
3
|
from sempy_labs._helper_functions import (
|
|
5
4
|
resolve_workspace_name_and_id,
|
|
6
5
|
_base_api,
|
|
7
|
-
|
|
8
|
-
resolve_item_id,
|
|
6
|
+
delete_item,
|
|
9
7
|
_create_dataframe,
|
|
8
|
+
create_item,
|
|
10
9
|
)
|
|
11
10
|
from uuid import UUID
|
|
11
|
+
import sempy_labs._icons as icons
|
|
12
12
|
|
|
13
13
|
|
|
14
14
|
def list_eventstreams(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
@@ -74,29 +74,14 @@ def create_eventstream(
|
|
|
74
74
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
75
75
|
"""
|
|
76
76
|
|
|
77
|
-
(
|
|
78
|
-
|
|
79
|
-
payload = {"displayName": name}
|
|
80
|
-
|
|
81
|
-
if description:
|
|
82
|
-
payload["description"] = description
|
|
83
|
-
|
|
84
|
-
_base_api(
|
|
85
|
-
request=f"/v1/workspaces/{workspace_id}/eventstreams",
|
|
86
|
-
method="post",
|
|
87
|
-
payload=payload,
|
|
88
|
-
status_codes=[201, 202],
|
|
89
|
-
lro_return_status_code=True,
|
|
90
|
-
)
|
|
91
|
-
_print_success(
|
|
92
|
-
item_name=name,
|
|
93
|
-
item_type="eventstream",
|
|
94
|
-
workspace_name=workspace_name,
|
|
95
|
-
action="created",
|
|
77
|
+
create_item(
|
|
78
|
+
name=name, description=description, type="Eventstream", workspace=workspace
|
|
96
79
|
)
|
|
97
80
|
|
|
98
81
|
|
|
99
|
-
def delete_eventstream(
|
|
82
|
+
def delete_eventstream(
|
|
83
|
+
eventstream: str | UUID, workspace: Optional[str | UUID] = None, **kwargs
|
|
84
|
+
):
|
|
100
85
|
"""
|
|
101
86
|
Deletes a Fabric eventstream.
|
|
102
87
|
|
|
@@ -104,7 +89,7 @@ def delete_eventstream(name: str | UUID, workspace: Optional[str | UUID] = None)
|
|
|
104
89
|
|
|
105
90
|
Parameters
|
|
106
91
|
----------
|
|
107
|
-
|
|
92
|
+
eventstream: str | uuid.UUID
|
|
108
93
|
Name or ID of the eventstream.
|
|
109
94
|
workspace : str | uuid.UUID, default=None
|
|
110
95
|
The Fabric workspace name or ID.
|
|
@@ -112,13 +97,10 @@ def delete_eventstream(name: str | UUID, workspace: Optional[str | UUID] = None)
|
|
|
112
97
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
113
98
|
"""
|
|
114
99
|
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
workspace_name=workspace_name,
|
|
123
|
-
action="deleted",
|
|
124
|
-
)
|
|
100
|
+
if "name" in kwargs:
|
|
101
|
+
eventstream = kwargs["name"]
|
|
102
|
+
print(
|
|
103
|
+
f"{icons.warning} The 'name' parameter is deprecated. Please use 'eventstream' instead."
|
|
104
|
+
)
|
|
105
|
+
|
|
106
|
+
delete_item(item=eventstream, type="Eventstream", workspace=workspace)
|
sempy_labs/_gateways.py
CHANGED
|
@@ -314,7 +314,7 @@ def create_vnet_gateway(
|
|
|
314
314
|
The name of the subnet.
|
|
315
315
|
"""
|
|
316
316
|
|
|
317
|
-
capacity_id = resolve_capacity_id(capacity)
|
|
317
|
+
capacity_id = resolve_capacity_id(capacity=capacity)
|
|
318
318
|
payload = {
|
|
319
319
|
"type": "VirtualNetwork",
|
|
320
320
|
"displayName": name,
|
|
@@ -343,7 +343,7 @@ def create_vnet_gateway(
|
|
|
343
343
|
|
|
344
344
|
|
|
345
345
|
def update_on_premises_gateway(
|
|
346
|
-
gateway: str,
|
|
346
|
+
gateway: str | UUID,
|
|
347
347
|
allow_cloud_connection_refresh: Optional[bool] = None,
|
|
348
348
|
allow_custom_connectors: Optional[bool] = None,
|
|
349
349
|
load_balancing_setting: Optional[str] = None,
|
|
@@ -396,7 +396,7 @@ def update_on_premises_gateway(
|
|
|
396
396
|
|
|
397
397
|
|
|
398
398
|
def update_vnet_gateway(
|
|
399
|
-
gateway: str,
|
|
399
|
+
gateway: str | UUID,
|
|
400
400
|
capacity: str | UUID,
|
|
401
401
|
inactivity_minutes_before_sleep: Optional[int] = None,
|
|
402
402
|
number_of_member_gateways: Optional[int] = None,
|
|
@@ -425,7 +425,7 @@ def update_vnet_gateway(
|
|
|
425
425
|
payload = {}
|
|
426
426
|
|
|
427
427
|
if capacity is not None:
|
|
428
|
-
capacity_id = resolve_capacity_id(capacity)
|
|
428
|
+
capacity_id = resolve_capacity_id(capacity=capacity)
|
|
429
429
|
payload["capacityId"] = capacity_id
|
|
430
430
|
if inactivity_minutes_before_sleep is not None:
|
|
431
431
|
payload["inactivityMinutesBeforeSleep"] = inactivity_minutes_before_sleep
|
sempy_labs/_git.py
CHANGED
|
@@ -4,6 +4,7 @@ from typing import Optional, List
|
|
|
4
4
|
from sempy_labs._helper_functions import (
|
|
5
5
|
resolve_workspace_name_and_id,
|
|
6
6
|
_base_api,
|
|
7
|
+
_create_dataframe,
|
|
7
8
|
)
|
|
8
9
|
from uuid import UUID
|
|
9
10
|
|
|
@@ -126,7 +127,7 @@ def connect_workspace_to_github(
|
|
|
126
127
|
|
|
127
128
|
def disconnect_workspace_from_git(workspace: Optional[str | UUID] = None):
|
|
128
129
|
"""
|
|
129
|
-
Disconnects a
|
|
130
|
+
Disconnects a workspace from a git repository.
|
|
130
131
|
|
|
131
132
|
This is a wrapper function for the following API: `Git - Disconnect <https://learn.microsoft.com/rest/api/fabric/core/git/disconnect>`_.
|
|
132
133
|
|
|
@@ -432,3 +433,91 @@ def update_from_git(
|
|
|
432
433
|
print(
|
|
433
434
|
f"{icons.green_dot} The '{workspace_name}' workspace has been updated with commits pushed to the connected branch."
|
|
434
435
|
)
|
|
436
|
+
|
|
437
|
+
|
|
438
|
+
def get_my_git_credentials(
|
|
439
|
+
workspace: Optional[str | UUID] = None,
|
|
440
|
+
) -> pd.DataFrame:
|
|
441
|
+
"""
|
|
442
|
+
Returns the user's Git credentials configuration details.
|
|
443
|
+
|
|
444
|
+
This is a wrapper function for the following API: `Git - Get My Git Credentials <https://learn.microsoft.com/rest/api/fabric/core/git/get-my-git-credentials>`_.
|
|
445
|
+
|
|
446
|
+
Parameters
|
|
447
|
+
----------
|
|
448
|
+
workspace : str | uuid.UUID, default=None
|
|
449
|
+
The workspace name or ID.
|
|
450
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
451
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
452
|
+
|
|
453
|
+
Returns
|
|
454
|
+
-------
|
|
455
|
+
pandas.DataFrame
|
|
456
|
+
A pandas dataframe showing the user's Git credentials configuration details.
|
|
457
|
+
"""
|
|
458
|
+
|
|
459
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
460
|
+
|
|
461
|
+
columns = {
|
|
462
|
+
"Source": "string",
|
|
463
|
+
}
|
|
464
|
+
|
|
465
|
+
df = _create_dataframe(columns)
|
|
466
|
+
|
|
467
|
+
response = _base_api(request=f"/v1/workspaces/{workspace_id}/git/myGitCredentials")
|
|
468
|
+
|
|
469
|
+
r = response.json()
|
|
470
|
+
new_data = {
|
|
471
|
+
"Source": r.get("source"),
|
|
472
|
+
"Connection Id": r.get("connectionId"),
|
|
473
|
+
}
|
|
474
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
475
|
+
|
|
476
|
+
return df
|
|
477
|
+
|
|
478
|
+
|
|
479
|
+
def update_my_git_credentials(
|
|
480
|
+
source: str,
|
|
481
|
+
connection_id: Optional[UUID] = None,
|
|
482
|
+
workspace: Optional[str | UUID] = None,
|
|
483
|
+
):
|
|
484
|
+
"""
|
|
485
|
+
Updates the user's Git credentials configuration details.
|
|
486
|
+
|
|
487
|
+
This is a wrapper function for the following API: `Git - Update My Git Credentials <https://learn.microsoft.com/rest/api/fabric/core/git/update-my-git-credentials>`_.
|
|
488
|
+
|
|
489
|
+
Parameters
|
|
490
|
+
----------
|
|
491
|
+
source : str
|
|
492
|
+
The Git credentials source. Valid options: 'Automatic', 'ConfiguredConnection', 'None'.
|
|
493
|
+
connection_id : UUID, default=None
|
|
494
|
+
The object ID of the connection. Valid only for the 'ConfiguredConnection' source.
|
|
495
|
+
workspace : str | uuid.UUID, default=None
|
|
496
|
+
The workspace name or ID.
|
|
497
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
498
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
499
|
+
"""
|
|
500
|
+
|
|
501
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
502
|
+
|
|
503
|
+
if source == "ConfiguredConnection" and connection_id is None:
|
|
504
|
+
raise ValueError(
|
|
505
|
+
f"{icons.red_dot} The 'ConfiguredConnection' source requires a connection_id."
|
|
506
|
+
)
|
|
507
|
+
|
|
508
|
+
payload = {
|
|
509
|
+
"source": source,
|
|
510
|
+
}
|
|
511
|
+
|
|
512
|
+
if connection_id is not None:
|
|
513
|
+
payload["connectionId"] = connection_id
|
|
514
|
+
|
|
515
|
+
_base_api(
|
|
516
|
+
request=f"/v1/workspaces/{workspace_id}/git/myGitCredentials",
|
|
517
|
+
method="patch",
|
|
518
|
+
payload=payload,
|
|
519
|
+
)
|
|
520
|
+
|
|
521
|
+
print(
|
|
522
|
+
f"{icons.green_dot} The user's Git credentials have been updated accordingly."
|
|
523
|
+
)
|
sempy_labs/_graphQL.py
CHANGED
|
@@ -5,7 +5,7 @@ from sempy_labs._helper_functions import (
|
|
|
5
5
|
_base_api,
|
|
6
6
|
_create_dataframe,
|
|
7
7
|
resolve_workspace_name_and_id,
|
|
8
|
-
|
|
8
|
+
create_item,
|
|
9
9
|
)
|
|
10
10
|
|
|
11
11
|
|
|
@@ -73,23 +73,6 @@ def create_graphql_api(
|
|
|
73
73
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
74
74
|
"""
|
|
75
75
|
|
|
76
|
-
(
|
|
77
|
-
|
|
78
|
-
payload = {"displayName": name}
|
|
79
|
-
|
|
80
|
-
if description:
|
|
81
|
-
payload["description"] = description
|
|
82
|
-
|
|
83
|
-
_base_api(
|
|
84
|
-
request=f"/v1/workspaces/{workspace_id}/GraphQLApis",
|
|
85
|
-
method="post",
|
|
86
|
-
status_codes=[201, 202],
|
|
87
|
-
payload=payload,
|
|
88
|
-
lro_return_status_code=True,
|
|
89
|
-
)
|
|
90
|
-
_print_success(
|
|
91
|
-
item_name=name,
|
|
92
|
-
item_type="GraphQL API",
|
|
93
|
-
workspace_name=workspace_name,
|
|
94
|
-
action="created",
|
|
76
|
+
create_item(
|
|
77
|
+
name=name, description=description, type="GraphQLApi", workspace=workspace
|
|
95
78
|
)
|