semantic-link-labs 0.9.3__py3-none-any.whl → 0.9.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.5.dist-info}/METADATA +25 -6
- {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.5.dist-info}/RECORD +68 -52
- {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.5.dist-info}/WHEEL +1 -1
- sempy_labs/__init__.py +45 -4
- sempy_labs/_capacities.py +22 -127
- sempy_labs/_capacity_migration.py +11 -9
- sempy_labs/_dashboards.py +60 -0
- sempy_labs/_data_pipelines.py +5 -31
- sempy_labs/_dax.py +17 -3
- sempy_labs/_delta_analyzer.py +279 -127
- sempy_labs/_environments.py +20 -48
- sempy_labs/_eventhouses.py +69 -30
- sempy_labs/_eventstreams.py +16 -34
- sempy_labs/_gateways.py +4 -4
- sempy_labs/_generate_semantic_model.py +30 -10
- sempy_labs/_git.py +90 -1
- sempy_labs/_graphQL.py +3 -20
- sempy_labs/_helper_functions.py +201 -44
- sempy_labs/_job_scheduler.py +226 -2
- sempy_labs/_kql_databases.py +19 -34
- sempy_labs/_kql_querysets.py +15 -32
- sempy_labs/_list_functions.py +14 -133
- sempy_labs/_mirrored_databases.py +14 -48
- sempy_labs/_ml_experiments.py +5 -30
- sempy_labs/_ml_models.py +4 -28
- sempy_labs/_model_bpa.py +17 -0
- sempy_labs/_model_bpa_rules.py +12 -2
- sempy_labs/_mounted_data_factories.py +119 -0
- sempy_labs/_notebooks.py +16 -26
- sempy_labs/_semantic_models.py +117 -0
- sempy_labs/_sql.py +78 -10
- sempy_labs/_sqldatabase.py +227 -0
- sempy_labs/_utils.py +42 -0
- sempy_labs/_vertipaq.py +17 -2
- sempy_labs/_warehouses.py +5 -17
- sempy_labs/_workloads.py +23 -9
- sempy_labs/_workspaces.py +13 -5
- sempy_labs/admin/__init__.py +70 -9
- sempy_labs/admin/_activities.py +166 -0
- sempy_labs/admin/_apps.py +143 -0
- sempy_labs/admin/_artifacts.py +62 -0
- sempy_labs/admin/_basic_functions.py +32 -704
- sempy_labs/admin/_capacities.py +311 -0
- sempy_labs/admin/_datasets.py +184 -0
- sempy_labs/admin/_domains.py +1 -1
- sempy_labs/admin/_items.py +3 -1
- sempy_labs/admin/_reports.py +239 -0
- sempy_labs/admin/_scanner.py +0 -1
- sempy_labs/admin/_shared.py +76 -0
- sempy_labs/admin/_tenant.py +489 -0
- sempy_labs/admin/_users.py +133 -0
- sempy_labs/admin/_workspaces.py +148 -0
- sempy_labs/directlake/_dl_helper.py +0 -1
- sempy_labs/directlake/_update_directlake_partition_entity.py +14 -0
- sempy_labs/graph/_teams.py +1 -1
- sempy_labs/graph/_users.py +9 -1
- sempy_labs/lakehouse/__init__.py +2 -0
- sempy_labs/lakehouse/_lakehouse.py +6 -7
- sempy_labs/lakehouse/_shortcuts.py +216 -64
- sempy_labs/report/__init__.py +3 -1
- sempy_labs/report/_download_report.py +4 -1
- sempy_labs/report/_export_report.py +272 -0
- sempy_labs/report/_generate_report.py +9 -17
- sempy_labs/report/_report_bpa.py +12 -19
- sempy_labs/report/_report_functions.py +9 -261
- sempy_labs/tom/_model.py +307 -40
- {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.5.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.5.dist-info}/top_level.txt +0 -0
sempy_labs/_eventhouses.py
CHANGED
|
@@ -1,18 +1,25 @@
|
|
|
1
|
-
import sempy.fabric as fabric
|
|
2
1
|
import pandas as pd
|
|
3
2
|
from typing import Optional
|
|
4
3
|
from sempy_labs._helper_functions import (
|
|
5
4
|
resolve_workspace_name_and_id,
|
|
6
5
|
_base_api,
|
|
7
|
-
_print_success,
|
|
8
6
|
resolve_item_id,
|
|
9
7
|
_create_dataframe,
|
|
8
|
+
_conv_b64,
|
|
9
|
+
_decode_b64,
|
|
10
|
+
delete_item,
|
|
11
|
+
create_item,
|
|
12
|
+
get_item_definition,
|
|
10
13
|
)
|
|
11
14
|
from uuid import UUID
|
|
15
|
+
import sempy_labs._icons as icons
|
|
12
16
|
|
|
13
17
|
|
|
14
18
|
def create_eventhouse(
|
|
15
|
-
name: str,
|
|
19
|
+
name: str,
|
|
20
|
+
definition: Optional[dict],
|
|
21
|
+
description: Optional[str] = None,
|
|
22
|
+
workspace: Optional[str | UUID] = None,
|
|
16
23
|
):
|
|
17
24
|
"""
|
|
18
25
|
Creates a Fabric eventhouse.
|
|
@@ -23,6 +30,8 @@ def create_eventhouse(
|
|
|
23
30
|
----------
|
|
24
31
|
name: str
|
|
25
32
|
Name of the eventhouse.
|
|
33
|
+
definition : dict
|
|
34
|
+
The definition (EventhouseProperties.json) of the eventhouse.
|
|
26
35
|
description : str, default=None
|
|
27
36
|
A description of the environment.
|
|
28
37
|
workspace : str | uuid.UUID, default=None
|
|
@@ -31,25 +40,29 @@ def create_eventhouse(
|
|
|
31
40
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
32
41
|
"""
|
|
33
42
|
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
43
|
+
if definition is not None and not isinstance(definition, dict):
|
|
44
|
+
raise ValueError(f"{icons.red_dot} The definition must be a dictionary.")
|
|
45
|
+
|
|
46
|
+
definition_payload = (
|
|
47
|
+
{
|
|
48
|
+
"parts": [
|
|
49
|
+
{
|
|
50
|
+
"path": "EventhouseProperties.json",
|
|
51
|
+
"payload": _conv_b64(definition),
|
|
52
|
+
"payloadType": "InlineBase64",
|
|
53
|
+
}
|
|
54
|
+
]
|
|
55
|
+
}
|
|
56
|
+
if definition is not None
|
|
57
|
+
else None
|
|
47
58
|
)
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
59
|
+
|
|
60
|
+
create_item(
|
|
61
|
+
name=name,
|
|
62
|
+
type="Eventhouse",
|
|
63
|
+
workspace=workspace,
|
|
64
|
+
description=description,
|
|
65
|
+
definition=definition_payload,
|
|
53
66
|
)
|
|
54
67
|
|
|
55
68
|
|
|
@@ -113,13 +126,39 @@ def delete_eventhouse(name: str, workspace: Optional[str | UUID] = None):
|
|
|
113
126
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
114
127
|
"""
|
|
115
128
|
|
|
116
|
-
(
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
129
|
+
delete_item(item=name, type="Eventhouse", workspace=workspace)
|
|
130
|
+
|
|
131
|
+
|
|
132
|
+
def get_eventhouse_definition(
|
|
133
|
+
eventhouse: str | UUID,
|
|
134
|
+
workspace: Optional[str | UUID] = None,
|
|
135
|
+
return_dataframe: bool = False,
|
|
136
|
+
) -> dict | pd.DataFrame:
|
|
137
|
+
"""
|
|
138
|
+
Gets the eventhouse definition.
|
|
139
|
+
|
|
140
|
+
This is a wrapper function for the following API: `Items - Get Eventhouse Definition <https://learn.microsoft.com/rest/api/fabric/eventhouse/items/get-eventhouse-definition>`_.
|
|
141
|
+
|
|
142
|
+
Parameters
|
|
143
|
+
----------
|
|
144
|
+
eventhouse : str
|
|
145
|
+
Name of the eventhouse.
|
|
146
|
+
workspace : str | uuid.UUID, default=None
|
|
147
|
+
The Fabric workspace name or ID in which the eventhouse resides.
|
|
148
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
149
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
150
|
+
return_dataframe : bool, default=False
|
|
151
|
+
If True, returns a dataframe. If False, returns a json dictionary.
|
|
152
|
+
|
|
153
|
+
Returns
|
|
154
|
+
-------
|
|
155
|
+
dict | pandas.DataFrame
|
|
156
|
+
The eventhouse definition in .json format or as a pandas dataframe.
|
|
157
|
+
"""
|
|
158
|
+
|
|
159
|
+
return get_item_definition(
|
|
160
|
+
item=eventhouse,
|
|
161
|
+
type="Eventhouse",
|
|
162
|
+
workspace=workspace,
|
|
163
|
+
return_dataframe=return_dataframe,
|
|
125
164
|
)
|
sempy_labs/_eventstreams.py
CHANGED
|
@@ -1,14 +1,14 @@
|
|
|
1
|
-
import sempy.fabric as fabric
|
|
2
1
|
import pandas as pd
|
|
3
2
|
from typing import Optional
|
|
4
3
|
from sempy_labs._helper_functions import (
|
|
5
4
|
resolve_workspace_name_and_id,
|
|
6
5
|
_base_api,
|
|
7
|
-
|
|
8
|
-
resolve_item_id,
|
|
6
|
+
delete_item,
|
|
9
7
|
_create_dataframe,
|
|
8
|
+
create_item,
|
|
10
9
|
)
|
|
11
10
|
from uuid import UUID
|
|
11
|
+
import sempy_labs._icons as icons
|
|
12
12
|
|
|
13
13
|
|
|
14
14
|
def list_eventstreams(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
@@ -74,29 +74,14 @@ def create_eventstream(
|
|
|
74
74
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
75
75
|
"""
|
|
76
76
|
|
|
77
|
-
(
|
|
78
|
-
|
|
79
|
-
payload = {"displayName": name}
|
|
80
|
-
|
|
81
|
-
if description:
|
|
82
|
-
payload["description"] = description
|
|
83
|
-
|
|
84
|
-
_base_api(
|
|
85
|
-
request=f"/v1/workspaces/{workspace_id}/eventstreams",
|
|
86
|
-
method="post",
|
|
87
|
-
payload=payload,
|
|
88
|
-
status_codes=[201, 202],
|
|
89
|
-
lro_return_status_code=True,
|
|
90
|
-
)
|
|
91
|
-
_print_success(
|
|
92
|
-
item_name=name,
|
|
93
|
-
item_type="eventstream",
|
|
94
|
-
workspace_name=workspace_name,
|
|
95
|
-
action="created",
|
|
77
|
+
create_item(
|
|
78
|
+
name=name, description=description, type="Eventstream", workspace=workspace
|
|
96
79
|
)
|
|
97
80
|
|
|
98
81
|
|
|
99
|
-
def delete_eventstream(
|
|
82
|
+
def delete_eventstream(
|
|
83
|
+
eventstream: str | UUID, workspace: Optional[str | UUID] = None, **kwargs
|
|
84
|
+
):
|
|
100
85
|
"""
|
|
101
86
|
Deletes a Fabric eventstream.
|
|
102
87
|
|
|
@@ -104,7 +89,7 @@ def delete_eventstream(name: str | UUID, workspace: Optional[str | UUID] = None)
|
|
|
104
89
|
|
|
105
90
|
Parameters
|
|
106
91
|
----------
|
|
107
|
-
|
|
92
|
+
eventstream: str | uuid.UUID
|
|
108
93
|
Name or ID of the eventstream.
|
|
109
94
|
workspace : str | uuid.UUID, default=None
|
|
110
95
|
The Fabric workspace name or ID.
|
|
@@ -112,13 +97,10 @@ def delete_eventstream(name: str | UUID, workspace: Optional[str | UUID] = None)
|
|
|
112
97
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
113
98
|
"""
|
|
114
99
|
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
workspace_name=workspace_name,
|
|
123
|
-
action="deleted",
|
|
124
|
-
)
|
|
100
|
+
if "name" in kwargs:
|
|
101
|
+
eventstream = kwargs["name"]
|
|
102
|
+
print(
|
|
103
|
+
f"{icons.warning} The 'name' parameter is deprecated. Please use 'eventstream' instead."
|
|
104
|
+
)
|
|
105
|
+
|
|
106
|
+
delete_item(item=eventstream, type="Eventstream", workspace=workspace)
|
sempy_labs/_gateways.py
CHANGED
|
@@ -314,7 +314,7 @@ def create_vnet_gateway(
|
|
|
314
314
|
The name of the subnet.
|
|
315
315
|
"""
|
|
316
316
|
|
|
317
|
-
capacity_id = resolve_capacity_id(capacity)
|
|
317
|
+
capacity_id = resolve_capacity_id(capacity=capacity)
|
|
318
318
|
payload = {
|
|
319
319
|
"type": "VirtualNetwork",
|
|
320
320
|
"displayName": name,
|
|
@@ -343,7 +343,7 @@ def create_vnet_gateway(
|
|
|
343
343
|
|
|
344
344
|
|
|
345
345
|
def update_on_premises_gateway(
|
|
346
|
-
gateway: str,
|
|
346
|
+
gateway: str | UUID,
|
|
347
347
|
allow_cloud_connection_refresh: Optional[bool] = None,
|
|
348
348
|
allow_custom_connectors: Optional[bool] = None,
|
|
349
349
|
load_balancing_setting: Optional[str] = None,
|
|
@@ -396,7 +396,7 @@ def update_on_premises_gateway(
|
|
|
396
396
|
|
|
397
397
|
|
|
398
398
|
def update_vnet_gateway(
|
|
399
|
-
gateway: str,
|
|
399
|
+
gateway: str | UUID,
|
|
400
400
|
capacity: str | UUID,
|
|
401
401
|
inactivity_minutes_before_sleep: Optional[int] = None,
|
|
402
402
|
number_of_member_gateways: Optional[int] = None,
|
|
@@ -425,7 +425,7 @@ def update_vnet_gateway(
|
|
|
425
425
|
payload = {}
|
|
426
426
|
|
|
427
427
|
if capacity is not None:
|
|
428
|
-
capacity_id = resolve_capacity_id(capacity)
|
|
428
|
+
capacity_id = resolve_capacity_id(capacity=capacity)
|
|
429
429
|
payload["capacityId"] = capacity_id
|
|
430
430
|
if inactivity_minutes_before_sleep is not None:
|
|
431
431
|
payload["inactivityMinutesBeforeSleep"] = inactivity_minutes_before_sleep
|
|
@@ -5,12 +5,12 @@ import os
|
|
|
5
5
|
from typing import Optional, List
|
|
6
6
|
from sempy._utils._log import log
|
|
7
7
|
from sempy_labs._helper_functions import (
|
|
8
|
-
resolve_lakehouse_name,
|
|
9
8
|
resolve_workspace_name_and_id,
|
|
10
9
|
resolve_dataset_name_and_id,
|
|
11
10
|
_conv_b64,
|
|
12
11
|
_decode_b64,
|
|
13
12
|
_base_api,
|
|
13
|
+
_mount,
|
|
14
14
|
)
|
|
15
15
|
from sempy_labs.lakehouse._lakehouse import lakehouse_attached
|
|
16
16
|
import sempy_labs._icons as icons
|
|
@@ -252,6 +252,7 @@ def deploy_semantic_model(
|
|
|
252
252
|
target_workspace: Optional[str | UUID] = None,
|
|
253
253
|
refresh_target_dataset: bool = True,
|
|
254
254
|
overwrite: bool = False,
|
|
255
|
+
perspective: Optional[str] = None,
|
|
255
256
|
):
|
|
256
257
|
"""
|
|
257
258
|
Deploys a semantic model based on an existing semantic model.
|
|
@@ -274,6 +275,8 @@ def deploy_semantic_model(
|
|
|
274
275
|
If set to True, this will initiate a full refresh of the target semantic model in the target workspace.
|
|
275
276
|
overwrite : bool, default=False
|
|
276
277
|
If set to True, overwrites the existing semantic model in the workspace if it exists.
|
|
278
|
+
perspective : str, default=None
|
|
279
|
+
Set this to the name of a perspective in the model and it will reduce the deployed model down to the tables/columns/measures/hierarchies within that perspective.
|
|
277
280
|
"""
|
|
278
281
|
|
|
279
282
|
(source_workspace_name, source_workspace_id) = resolve_workspace_name_and_id(
|
|
@@ -307,7 +310,21 @@ def deploy_semantic_model(
|
|
|
307
310
|
f"{icons.warning} The '{target_dataset}' semantic model already exists within the '{target_workspace_name}' workspace. The 'overwrite' parameter is set to False so the source semantic model was not deployed to the target destination."
|
|
308
311
|
)
|
|
309
312
|
|
|
310
|
-
|
|
313
|
+
if perspective is not None:
|
|
314
|
+
|
|
315
|
+
from sempy_labs.tom import connect_semantic_model
|
|
316
|
+
|
|
317
|
+
with connect_semantic_model(
|
|
318
|
+
dataset=source_dataset, workspace=source_workspace, readonly=True
|
|
319
|
+
) as tom:
|
|
320
|
+
|
|
321
|
+
df_added = tom._reduce_model(perspective_name=perspective)
|
|
322
|
+
bim = tom.get_bim()
|
|
323
|
+
|
|
324
|
+
else:
|
|
325
|
+
bim = get_semantic_model_bim(
|
|
326
|
+
dataset=source_dataset, workspace=source_workspace_id
|
|
327
|
+
)
|
|
311
328
|
|
|
312
329
|
# Create the semantic model if the model does not exist
|
|
313
330
|
if dfD_filt.empty:
|
|
@@ -325,6 +342,9 @@ def deploy_semantic_model(
|
|
|
325
342
|
if refresh_target_dataset:
|
|
326
343
|
refresh_semantic_model(dataset=target_dataset, workspace=target_workspace_id)
|
|
327
344
|
|
|
345
|
+
if perspective is not None:
|
|
346
|
+
return df_added
|
|
347
|
+
|
|
328
348
|
|
|
329
349
|
@log
|
|
330
350
|
def get_semantic_model_bim(
|
|
@@ -368,16 +388,16 @@ def get_semantic_model_bim(
|
|
|
368
388
|
f"{icons.red_dot} In order to save the model.bim file, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
|
|
369
389
|
)
|
|
370
390
|
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
if not save_to_file_name.endswith(
|
|
375
|
-
save_to_file_name = f"{save_to_file_name}{
|
|
376
|
-
|
|
377
|
-
with open(
|
|
391
|
+
local_path = _mount()
|
|
392
|
+
save_folder = f"{local_path}/Files"
|
|
393
|
+
file_ext = ".bim"
|
|
394
|
+
if not save_to_file_name.endswith(file_ext):
|
|
395
|
+
save_to_file_name = f"{save_to_file_name}{file_ext}"
|
|
396
|
+
file_path = os.path.join(save_folder, save_to_file_name)
|
|
397
|
+
with open(file_path, "w") as json_file:
|
|
378
398
|
json.dump(bimJson, json_file, indent=4)
|
|
379
399
|
print(
|
|
380
|
-
f"{icons.green_dot} The {
|
|
400
|
+
f"{icons.green_dot} The {file_ext} file for the '{dataset_name}' semantic model has been saved to the lakehouse attached to the notebook within: 'Files/{save_to_file_name}'.\n\n"
|
|
381
401
|
)
|
|
382
402
|
|
|
383
403
|
return bimJson
|
sempy_labs/_git.py
CHANGED
|
@@ -4,6 +4,7 @@ from typing import Optional, List
|
|
|
4
4
|
from sempy_labs._helper_functions import (
|
|
5
5
|
resolve_workspace_name_and_id,
|
|
6
6
|
_base_api,
|
|
7
|
+
_create_dataframe,
|
|
7
8
|
)
|
|
8
9
|
from uuid import UUID
|
|
9
10
|
|
|
@@ -126,7 +127,7 @@ def connect_workspace_to_github(
|
|
|
126
127
|
|
|
127
128
|
def disconnect_workspace_from_git(workspace: Optional[str | UUID] = None):
|
|
128
129
|
"""
|
|
129
|
-
Disconnects a
|
|
130
|
+
Disconnects a workspace from a git repository.
|
|
130
131
|
|
|
131
132
|
This is a wrapper function for the following API: `Git - Disconnect <https://learn.microsoft.com/rest/api/fabric/core/git/disconnect>`_.
|
|
132
133
|
|
|
@@ -432,3 +433,91 @@ def update_from_git(
|
|
|
432
433
|
print(
|
|
433
434
|
f"{icons.green_dot} The '{workspace_name}' workspace has been updated with commits pushed to the connected branch."
|
|
434
435
|
)
|
|
436
|
+
|
|
437
|
+
|
|
438
|
+
def get_my_git_credentials(
|
|
439
|
+
workspace: Optional[str | UUID] = None,
|
|
440
|
+
) -> pd.DataFrame:
|
|
441
|
+
"""
|
|
442
|
+
Returns the user's Git credentials configuration details.
|
|
443
|
+
|
|
444
|
+
This is a wrapper function for the following API: `Git - Get My Git Credentials <https://learn.microsoft.com/rest/api/fabric/core/git/get-my-git-credentials>`_.
|
|
445
|
+
|
|
446
|
+
Parameters
|
|
447
|
+
----------
|
|
448
|
+
workspace : str | uuid.UUID, default=None
|
|
449
|
+
The workspace name or ID.
|
|
450
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
451
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
452
|
+
|
|
453
|
+
Returns
|
|
454
|
+
-------
|
|
455
|
+
pandas.DataFrame
|
|
456
|
+
A pandas dataframe showing the user's Git credentials configuration details.
|
|
457
|
+
"""
|
|
458
|
+
|
|
459
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
460
|
+
|
|
461
|
+
columns = {
|
|
462
|
+
"Source": "string",
|
|
463
|
+
}
|
|
464
|
+
|
|
465
|
+
df = _create_dataframe(columns)
|
|
466
|
+
|
|
467
|
+
response = _base_api(request=f"/v1/workspaces/{workspace_id}/git/myGitCredentials")
|
|
468
|
+
|
|
469
|
+
r = response.json()
|
|
470
|
+
new_data = {
|
|
471
|
+
"Source": r.get("source"),
|
|
472
|
+
"Connection Id": r.get("connectionId"),
|
|
473
|
+
}
|
|
474
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
475
|
+
|
|
476
|
+
return df
|
|
477
|
+
|
|
478
|
+
|
|
479
|
+
def update_my_git_credentials(
|
|
480
|
+
source: str,
|
|
481
|
+
connection_id: Optional[UUID] = None,
|
|
482
|
+
workspace: Optional[str | UUID] = None,
|
|
483
|
+
):
|
|
484
|
+
"""
|
|
485
|
+
Updates the user's Git credentials configuration details.
|
|
486
|
+
|
|
487
|
+
This is a wrapper function for the following API: `Git - Update My Git Credentials <https://learn.microsoft.com/rest/api/fabric/core/git/update-my-git-credentials>`_.
|
|
488
|
+
|
|
489
|
+
Parameters
|
|
490
|
+
----------
|
|
491
|
+
source : str
|
|
492
|
+
The Git credentials source. Valid options: 'Automatic', 'ConfiguredConnection', 'None'.
|
|
493
|
+
connection_id : UUID, default=None
|
|
494
|
+
The object ID of the connection. Valid only for the 'ConfiguredConnection' source.
|
|
495
|
+
workspace : str | uuid.UUID, default=None
|
|
496
|
+
The workspace name or ID.
|
|
497
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
498
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
499
|
+
"""
|
|
500
|
+
|
|
501
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
502
|
+
|
|
503
|
+
if source == "ConfiguredConnection" and connection_id is None:
|
|
504
|
+
raise ValueError(
|
|
505
|
+
f"{icons.red_dot} The 'ConfiguredConnection' source requires a connection_id."
|
|
506
|
+
)
|
|
507
|
+
|
|
508
|
+
payload = {
|
|
509
|
+
"source": source,
|
|
510
|
+
}
|
|
511
|
+
|
|
512
|
+
if connection_id is not None:
|
|
513
|
+
payload["connectionId"] = connection_id
|
|
514
|
+
|
|
515
|
+
_base_api(
|
|
516
|
+
request=f"/v1/workspaces/{workspace_id}/git/myGitCredentials",
|
|
517
|
+
method="patch",
|
|
518
|
+
payload=payload,
|
|
519
|
+
)
|
|
520
|
+
|
|
521
|
+
print(
|
|
522
|
+
f"{icons.green_dot} The user's Git credentials have been updated accordingly."
|
|
523
|
+
)
|
sempy_labs/_graphQL.py
CHANGED
|
@@ -5,7 +5,7 @@ from sempy_labs._helper_functions import (
|
|
|
5
5
|
_base_api,
|
|
6
6
|
_create_dataframe,
|
|
7
7
|
resolve_workspace_name_and_id,
|
|
8
|
-
|
|
8
|
+
create_item,
|
|
9
9
|
)
|
|
10
10
|
|
|
11
11
|
|
|
@@ -73,23 +73,6 @@ def create_graphql_api(
|
|
|
73
73
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
74
74
|
"""
|
|
75
75
|
|
|
76
|
-
(
|
|
77
|
-
|
|
78
|
-
payload = {"displayName": name}
|
|
79
|
-
|
|
80
|
-
if description:
|
|
81
|
-
payload["description"] = description
|
|
82
|
-
|
|
83
|
-
_base_api(
|
|
84
|
-
request=f"/v1/workspaces/{workspace_id}/GraphQLApis",
|
|
85
|
-
method="post",
|
|
86
|
-
status_codes=[201, 202],
|
|
87
|
-
payload=payload,
|
|
88
|
-
lro_return_status_code=True,
|
|
89
|
-
)
|
|
90
|
-
_print_success(
|
|
91
|
-
item_name=name,
|
|
92
|
-
item_type="GraphQL API",
|
|
93
|
-
workspace_name=workspace_name,
|
|
94
|
-
action="created",
|
|
76
|
+
create_item(
|
|
77
|
+
name=name, description=description, type="GraphQLApi", workspace=workspace
|
|
95
78
|
)
|