semantic-link-labs 0.9.9__py3-none-any.whl → 0.9.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.9.9.dist-info → semantic_link_labs-0.9.11.dist-info}/METADATA +30 -22
- {semantic_link_labs-0.9.9.dist-info → semantic_link_labs-0.9.11.dist-info}/RECORD +47 -40
- {semantic_link_labs-0.9.9.dist-info → semantic_link_labs-0.9.11.dist-info}/WHEEL +1 -1
- sempy_labs/__init__.py +28 -1
- sempy_labs/_clear_cache.py +12 -0
- sempy_labs/_dax.py +8 -2
- sempy_labs/_delta_analyzer.py +17 -26
- sempy_labs/_environments.py +19 -1
- sempy_labs/_generate_semantic_model.py +7 -8
- sempy_labs/_helper_functions.py +351 -151
- sempy_labs/_kql_databases.py +18 -0
- sempy_labs/_kusto.py +137 -0
- sempy_labs/_list_functions.py +18 -36
- sempy_labs/_model_bpa_rules.py +13 -3
- sempy_labs/_notebooks.py +44 -11
- sempy_labs/_semantic_models.py +93 -1
- sempy_labs/_sql.py +3 -2
- sempy_labs/_tags.py +194 -0
- sempy_labs/_variable_libraries.py +89 -0
- sempy_labs/_vertipaq.py +6 -6
- sempy_labs/_vpax.py +386 -0
- sempy_labs/_warehouses.py +3 -3
- sempy_labs/admin/__init__.py +14 -0
- sempy_labs/admin/_artifacts.py +3 -3
- sempy_labs/admin/_capacities.py +161 -1
- sempy_labs/admin/_dataflows.py +45 -0
- sempy_labs/admin/_items.py +16 -11
- sempy_labs/admin/_tags.py +126 -0
- sempy_labs/admin/_tenant.py +5 -5
- sempy_labs/directlake/_generate_shared_expression.py +29 -26
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +55 -5
- sempy_labs/dotnet_lib/dotnet.runtime.config.json +10 -0
- sempy_labs/lakehouse/__init__.py +16 -0
- sempy_labs/lakehouse/_blobs.py +115 -63
- sempy_labs/lakehouse/_get_lakehouse_columns.py +41 -18
- sempy_labs/lakehouse/_get_lakehouse_tables.py +62 -47
- sempy_labs/lakehouse/_helper.py +211 -0
- sempy_labs/lakehouse/_lakehouse.py +45 -36
- sempy_labs/lakehouse/_livy_sessions.py +137 -0
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +7 -12
- sempy_labs/migration/_refresh_calc_tables.py +7 -6
- sempy_labs/report/_download_report.py +1 -1
- sempy_labs/report/_generate_report.py +5 -1
- sempy_labs/report/_reportwrapper.py +31 -18
- sempy_labs/tom/_model.py +104 -35
- sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +0 -9
- sempy_labs/report/_bpareporttemplate/.platform +0 -11
- {semantic_link_labs-0.9.9.dist-info → semantic_link_labs-0.9.11.dist-info}/licenses/LICENSE +0 -0
- {semantic_link_labs-0.9.9.dist-info → semantic_link_labs-0.9.11.dist-info}/top_level.txt +0 -0
sempy_labs/_warehouses.py
CHANGED
|
@@ -53,11 +53,11 @@ def create_warehouse(
|
|
|
53
53
|
"defaultCollation"
|
|
54
54
|
] = "Latin1_General_100_CI_AS_KS_WS_SC_UTF8"
|
|
55
55
|
|
|
56
|
-
|
|
56
|
+
result = _base_api(
|
|
57
57
|
request=f"/v1/workspaces/{workspace_id}/warehouses",
|
|
58
58
|
payload=payload,
|
|
59
59
|
method="post",
|
|
60
|
-
|
|
60
|
+
lro_return_json=True,
|
|
61
61
|
status_codes=[201, 202],
|
|
62
62
|
)
|
|
63
63
|
|
|
@@ -65,7 +65,7 @@ def create_warehouse(
|
|
|
65
65
|
f"{icons.green_dot} The '{warehouse}' warehouse has been created within the '{workspace_name}' workspace."
|
|
66
66
|
)
|
|
67
67
|
|
|
68
|
-
return
|
|
68
|
+
return result.get("id")
|
|
69
69
|
|
|
70
70
|
|
|
71
71
|
def list_warehouses(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
sempy_labs/admin/__init__.py
CHANGED
|
@@ -38,6 +38,7 @@ from sempy_labs.admin._capacities import (
|
|
|
38
38
|
get_capacity_assignment_status,
|
|
39
39
|
get_capacity_state,
|
|
40
40
|
list_capacity_users,
|
|
41
|
+
get_refreshables,
|
|
41
42
|
)
|
|
42
43
|
from sempy_labs.admin._tenant import (
|
|
43
44
|
list_tenant_settings,
|
|
@@ -80,6 +81,14 @@ from sempy_labs.admin._external_data_share import (
|
|
|
80
81
|
from sempy_labs.admin._git import (
|
|
81
82
|
list_git_connections,
|
|
82
83
|
)
|
|
84
|
+
from sempy_labs.admin._dataflows import (
|
|
85
|
+
export_dataflow,
|
|
86
|
+
)
|
|
87
|
+
from sempy_labs.admin._tags import (
|
|
88
|
+
list_tags,
|
|
89
|
+
create_tags,
|
|
90
|
+
delete_tag,
|
|
91
|
+
)
|
|
83
92
|
|
|
84
93
|
__all__ = [
|
|
85
94
|
"list_items",
|
|
@@ -133,4 +142,9 @@ __all__ = [
|
|
|
133
142
|
"list_capacity_users",
|
|
134
143
|
"list_user_subscriptions",
|
|
135
144
|
"list_report_subscriptions",
|
|
145
|
+
"get_refreshables",
|
|
146
|
+
"export_dataflow",
|
|
147
|
+
"list_tags",
|
|
148
|
+
"create_tags",
|
|
149
|
+
"delete_tag",
|
|
136
150
|
]
|
sempy_labs/admin/_artifacts.py
CHANGED
|
@@ -31,7 +31,7 @@ def list_unused_artifacts(workspace: Optional[str | UUID] = None) -> pd.DataFram
|
|
|
31
31
|
"Artifact Name": "string",
|
|
32
32
|
"Artifact Id": "string",
|
|
33
33
|
"Artifact Type": "string",
|
|
34
|
-
"Artifact Size in MB": "
|
|
34
|
+
"Artifact Size in MB": "string",
|
|
35
35
|
"Created Date Time": "datetime",
|
|
36
36
|
"Last Accessed Date Time": "datetime",
|
|
37
37
|
}
|
|
@@ -47,8 +47,8 @@ def list_unused_artifacts(workspace: Optional[str | UUID] = None) -> pd.DataFram
|
|
|
47
47
|
for r in responses:
|
|
48
48
|
for i in r.get("unusedArtifactEntities", []):
|
|
49
49
|
new_data = {
|
|
50
|
-
"Artifact Name": i.get("
|
|
51
|
-
"Artifact Id": i.get("
|
|
50
|
+
"Artifact Name": i.get("displayName"),
|
|
51
|
+
"Artifact Id": i.get("artifactId"),
|
|
52
52
|
"Artifact Type": i.get("artifactType"),
|
|
53
53
|
"Artifact Size in MB": i.get("artifactSizeInMB"),
|
|
54
54
|
"Created Date Time": i.get("createdDateTime"),
|
sempy_labs/admin/_capacities.py
CHANGED
|
@@ -5,6 +5,7 @@ from typing import Optional, Tuple
|
|
|
5
5
|
from sempy._utils._log import log
|
|
6
6
|
from sempy_labs._helper_functions import (
|
|
7
7
|
_base_api,
|
|
8
|
+
_build_url,
|
|
8
9
|
_create_dataframe,
|
|
9
10
|
_update_dataframe_datatypes,
|
|
10
11
|
_is_valid_uuid,
|
|
@@ -57,6 +58,24 @@ def _resolve_capacity_name_and_id(
|
|
|
57
58
|
return capacity_name, capacity_id
|
|
58
59
|
|
|
59
60
|
|
|
61
|
+
def _resolve_capacity_id(
|
|
62
|
+
capacity: str | UUID,
|
|
63
|
+
) -> UUID:
|
|
64
|
+
|
|
65
|
+
if _is_valid_uuid(capacity):
|
|
66
|
+
capacity_id = capacity
|
|
67
|
+
else:
|
|
68
|
+
dfC = list_capacities(capacity=capacity)
|
|
69
|
+
if dfC.empty:
|
|
70
|
+
raise ValueError(
|
|
71
|
+
f"{icons.red_dot} The '{capacity}' capacity was not found."
|
|
72
|
+
)
|
|
73
|
+
|
|
74
|
+
capacity_id = dfC["Capacity Id"].iloc[0]
|
|
75
|
+
|
|
76
|
+
return capacity_id
|
|
77
|
+
|
|
78
|
+
|
|
60
79
|
def _list_capacities_meta() -> pd.DataFrame:
|
|
61
80
|
"""
|
|
62
81
|
Shows the a list of capacities and their properties. This function is the admin version.
|
|
@@ -221,7 +240,7 @@ def list_capacities(
|
|
|
221
240
|
"Sku": "string",
|
|
222
241
|
"Region": "string",
|
|
223
242
|
"State": "string",
|
|
224
|
-
"Admins": "
|
|
243
|
+
"Admins": "list",
|
|
225
244
|
}
|
|
226
245
|
df = _create_dataframe(columns=columns)
|
|
227
246
|
|
|
@@ -309,3 +328,144 @@ def list_capacity_users(capacity: str | UUID) -> pd.DataFrame:
|
|
|
309
328
|
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
310
329
|
|
|
311
330
|
return df
|
|
331
|
+
|
|
332
|
+
|
|
333
|
+
@log
|
|
334
|
+
def get_refreshables(
|
|
335
|
+
top: Optional[int] = None,
|
|
336
|
+
expand: Optional[str] = None,
|
|
337
|
+
filter: Optional[str] = None,
|
|
338
|
+
skip: Optional[int] = None,
|
|
339
|
+
capacity: Optional[str | UUID] = None,
|
|
340
|
+
) -> pd.DataFrame | dict:
|
|
341
|
+
"""
|
|
342
|
+
Returns a list of refreshables for the organization within a capacity.
|
|
343
|
+
|
|
344
|
+
Power BI retains a seven-day refresh history for each dataset, up to a maximum of sixty refreshes.
|
|
345
|
+
|
|
346
|
+
This is a wrapper function for the following API: `Admin - Get Refreshables <https://learn.microsoft.com/rest/api/power-bi/admin/get-refreshables>`_.
|
|
347
|
+
|
|
348
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
349
|
+
|
|
350
|
+
Parameters
|
|
351
|
+
----------
|
|
352
|
+
top : int, default=None
|
|
353
|
+
Returns only the first n results.
|
|
354
|
+
expand : str, default=None
|
|
355
|
+
Accepts a comma-separated list of data types, which will be expanded inline in the response. Supports capacities and groups.
|
|
356
|
+
filter : str, default=None
|
|
357
|
+
Returns a subset of a results based on Odata filter query parameter condition.
|
|
358
|
+
skip : int, default=None
|
|
359
|
+
Skips the first n results. Use with top to fetch results beyond the first 1000.
|
|
360
|
+
capacity : str | uuid.UUID, default=None
|
|
361
|
+
The capacity name or ID to filter. If None, all capacities are returned.
|
|
362
|
+
|
|
363
|
+
Returns
|
|
364
|
+
-------
|
|
365
|
+
pandas.DataFrame
|
|
366
|
+
Returns a list of refreshables for the organization within a capacity.
|
|
367
|
+
"""
|
|
368
|
+
|
|
369
|
+
columns = {
|
|
370
|
+
"Workspace Id": "string",
|
|
371
|
+
"Workspace Name": "string",
|
|
372
|
+
"Item Id": "string",
|
|
373
|
+
"Item Name": "string",
|
|
374
|
+
"Item Kind": "string",
|
|
375
|
+
"Capacity Id": "string",
|
|
376
|
+
"Capacity Name": "string",
|
|
377
|
+
"Capacity SKU": "string",
|
|
378
|
+
"Refresh Count": "int",
|
|
379
|
+
"Refresh Failures": "int",
|
|
380
|
+
"Average Duration": "float",
|
|
381
|
+
"Median Duration": "float",
|
|
382
|
+
"Refreshes Per Day": "int",
|
|
383
|
+
"Refresh Type": "string",
|
|
384
|
+
"Start Time": "string",
|
|
385
|
+
"End Time": "string",
|
|
386
|
+
"Status": "string",
|
|
387
|
+
"Request Id": "string",
|
|
388
|
+
"Service Exception Json": "string",
|
|
389
|
+
"Extended Status": "dict",
|
|
390
|
+
"Refresh Attempts": "list",
|
|
391
|
+
"Refresh Schedule Days": "list",
|
|
392
|
+
"Refresh Schedule Times": "list",
|
|
393
|
+
"Refresh Schedule Enabled": "bool",
|
|
394
|
+
"Refresh Schedule Local Timezone Id": "string",
|
|
395
|
+
"Refresh Schedule Notify Option": "string",
|
|
396
|
+
"Configured By": "list",
|
|
397
|
+
}
|
|
398
|
+
|
|
399
|
+
df = _create_dataframe(columns=columns)
|
|
400
|
+
|
|
401
|
+
params = {}
|
|
402
|
+
url = (
|
|
403
|
+
"/v1.0/myorg/admin/capacities/refreshables"
|
|
404
|
+
if capacity is None
|
|
405
|
+
else f"/v1.0/myorg/admin/capacities/{_resolve_capacity_id(capacity=capacity)}/refreshables"
|
|
406
|
+
)
|
|
407
|
+
|
|
408
|
+
if top is not None:
|
|
409
|
+
params["$top"] = top
|
|
410
|
+
|
|
411
|
+
if expand is not None:
|
|
412
|
+
params["$expand"] = expand
|
|
413
|
+
|
|
414
|
+
if filter is not None:
|
|
415
|
+
params["$filter"] = filter
|
|
416
|
+
|
|
417
|
+
if skip is not None:
|
|
418
|
+
params["$skip"] = skip
|
|
419
|
+
|
|
420
|
+
url = _build_url(url, params)
|
|
421
|
+
|
|
422
|
+
responses = _base_api(request=url, client="fabric_sp")
|
|
423
|
+
|
|
424
|
+
refreshables = []
|
|
425
|
+
|
|
426
|
+
for i in responses.json().get("value", []):
|
|
427
|
+
last_refresh = i.get("lastRefresh", {})
|
|
428
|
+
refresh_schedule = i.get("refreshSchedule", {})
|
|
429
|
+
new_data = {
|
|
430
|
+
"Workspace Id": i.get("group", {}).get("id"),
|
|
431
|
+
"Workspace Name": i.get("group", {}).get("name"),
|
|
432
|
+
"Item Id": i.get("id"),
|
|
433
|
+
"Item Name": i.get("name"),
|
|
434
|
+
"Item Kind": i.get("kind"),
|
|
435
|
+
"Capacity Id": (
|
|
436
|
+
i.get("capacity", {}).get("id").lower()
|
|
437
|
+
if i.get("capacity", {}).get("id")
|
|
438
|
+
else None
|
|
439
|
+
),
|
|
440
|
+
"Capacity Name": i.get("capacity", {}).get("displayName"),
|
|
441
|
+
"Capacity SKU": i.get("capacity", {}).get("sku"),
|
|
442
|
+
"Refresh Count": i.get("refreshCount", 0),
|
|
443
|
+
"Refresh Failures": i.get("refreshFailures", 0),
|
|
444
|
+
"Average Duration": i.get("averageDuration", 0),
|
|
445
|
+
"Median Duration": i.get("medianDuration", 0),
|
|
446
|
+
"Refreshes Per Day": i.get("refreshesPerDay", 0),
|
|
447
|
+
"Refresh Type": last_refresh.get("refreshType"),
|
|
448
|
+
"Start Time": last_refresh.get("startTime"),
|
|
449
|
+
"End Time": last_refresh.get("endTime"),
|
|
450
|
+
"Status": last_refresh.get("status"),
|
|
451
|
+
"Request Id": last_refresh.get("requestId"),
|
|
452
|
+
"Service Exception Json": last_refresh.get("serviceExceptionJson"),
|
|
453
|
+
"Extended Status": last_refresh.get("extendedStatus"),
|
|
454
|
+
"Refresh Attempts": last_refresh.get("refreshAttempts"),
|
|
455
|
+
"Refresh Schedule Days": refresh_schedule.get("days"),
|
|
456
|
+
"Refresh Schedule Times": refresh_schedule.get("times"),
|
|
457
|
+
"Refresh Schedule Enabled": refresh_schedule.get("enabled"),
|
|
458
|
+
"Refresh Schedule Local Timezone Id": refresh_schedule.get(
|
|
459
|
+
"localTimeZoneId"
|
|
460
|
+
),
|
|
461
|
+
"Refresh Schedule Notify Option": refresh_schedule.get("notifyOption"),
|
|
462
|
+
"Configured By": i.get("configuredBy"),
|
|
463
|
+
}
|
|
464
|
+
|
|
465
|
+
refreshables.append(new_data)
|
|
466
|
+
|
|
467
|
+
if len(refreshables) > 0:
|
|
468
|
+
df = pd.DataFrame(refreshables)
|
|
469
|
+
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
470
|
+
|
|
471
|
+
return df
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
from typing import Optional
|
|
2
|
+
from sempy_labs._helper_functions import (
|
|
3
|
+
_base_api,
|
|
4
|
+
)
|
|
5
|
+
from sempy_labs.admin._items import (
|
|
6
|
+
_resolve_item_id,
|
|
7
|
+
)
|
|
8
|
+
from uuid import UUID
|
|
9
|
+
from sempy._utils._log import log
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@log
|
|
13
|
+
def export_dataflow(
|
|
14
|
+
dataflow: str | UUID,
|
|
15
|
+
workspace: Optional[str | UUID] = None,
|
|
16
|
+
) -> dict:
|
|
17
|
+
"""
|
|
18
|
+
Shows a list of datasets for the organization.
|
|
19
|
+
|
|
20
|
+
This is a wrapper function for the following API: `Admin - Dataflows ExportDataflowAsAdmin <https://learn.microsoft.com/rest/api/power-bi/admin/dataflows-export-dataflow-as-admin>`_.
|
|
21
|
+
|
|
22
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
23
|
+
|
|
24
|
+
Parameters
|
|
25
|
+
----------
|
|
26
|
+
dataflow : str | UUID, default=None
|
|
27
|
+
The dataflow Name or Id.
|
|
28
|
+
workspace : str | uuid.UUID, default=None
|
|
29
|
+
The Fabric workspace name or id.
|
|
30
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
31
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
32
|
+
Only used if given a dataflow name and not an id.
|
|
33
|
+
|
|
34
|
+
Returns
|
|
35
|
+
-------
|
|
36
|
+
dict
|
|
37
|
+
Exported Json file.
|
|
38
|
+
"""
|
|
39
|
+
dataflow_id = _resolve_item_id(item=dataflow, type="dataflow", workspace=workspace)
|
|
40
|
+
|
|
41
|
+
url = f"/v1.0/myorg/admin/dataflows/{dataflow_id}/export"
|
|
42
|
+
|
|
43
|
+
response = _base_api(request=url, client="fabric_sp")
|
|
44
|
+
|
|
45
|
+
return response.json()
|
sempy_labs/admin/_items.py
CHANGED
|
@@ -17,20 +17,26 @@ from sempy_labs._helper_functions import (
|
|
|
17
17
|
|
|
18
18
|
|
|
19
19
|
def _resolve_item_id(
|
|
20
|
-
|
|
20
|
+
item: str,
|
|
21
21
|
type: Optional[str] = None,
|
|
22
22
|
workspace: Optional[str | UUID] = None,
|
|
23
23
|
) -> UUID:
|
|
24
|
+
if _is_valid_uuid(item):
|
|
25
|
+
item_id = item
|
|
24
26
|
|
|
25
|
-
|
|
26
|
-
|
|
27
|
+
else:
|
|
28
|
+
workspace_id = _resolve_workspace_name_and_id(workspace)[1]
|
|
29
|
+
dfI = list_items(workspace=workspace_id, type=type)
|
|
30
|
+
dfI_filt = dfI[dfI["Item Name"] == item]
|
|
27
31
|
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
+
if len(dfI_filt) == 0:
|
|
33
|
+
raise ValueError(
|
|
34
|
+
f"The '{item}' {type} does not exist within the '{workspace}' workspace or is not of type '{type}'."
|
|
35
|
+
)
|
|
36
|
+
|
|
37
|
+
item_id = dfI_filt["Item Id"].iloc[0]
|
|
32
38
|
|
|
33
|
-
return
|
|
39
|
+
return item_id
|
|
34
40
|
|
|
35
41
|
|
|
36
42
|
def _resolve_item_name_and_id(
|
|
@@ -84,9 +90,8 @@ def list_items(
|
|
|
84
90
|
capacity : str | uuid.UUID, default=None
|
|
85
91
|
The capacity name or id.
|
|
86
92
|
workspace : str | uuid.UUID, default=None
|
|
87
|
-
The Fabric workspace name.
|
|
88
|
-
Defaults to None which
|
|
89
|
-
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
93
|
+
The Fabric workspace name or id.
|
|
94
|
+
Defaults to None which looks into all the workspaces.
|
|
90
95
|
state : str, default=None
|
|
91
96
|
The item state.
|
|
92
97
|
type : str, default=None
|
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
from sempy_labs._helper_functions import (
|
|
2
|
+
_base_api,
|
|
3
|
+
_is_valid_uuid,
|
|
4
|
+
)
|
|
5
|
+
from uuid import UUID
|
|
6
|
+
from sempy_labs._tags import list_tags
|
|
7
|
+
import sempy_labs._icons as icons
|
|
8
|
+
from typing import List
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def resolve_tag_id(tag: str | UUID):
|
|
12
|
+
|
|
13
|
+
if _is_valid_uuid(tag):
|
|
14
|
+
tag_id = tag
|
|
15
|
+
else:
|
|
16
|
+
df = list_tags()
|
|
17
|
+
df[df["Tag Name"] == tag]
|
|
18
|
+
if df.empty:
|
|
19
|
+
raise ValueError(f"{icons.red_dot} The '{tag}' tag does not exist.")
|
|
20
|
+
tag_id = df.iloc[0]["Tag Id"]
|
|
21
|
+
|
|
22
|
+
return tag_id
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def create_tags(tags: str | List[str]):
|
|
26
|
+
"""
|
|
27
|
+
Creates a new tag or tags.
|
|
28
|
+
|
|
29
|
+
This is a wrapper function for the following API: `Tags - Bulk Create Tags <https://learn.microsoft.com/rest/api/fabric/admin/tags/bulk-create-tags>`_.
|
|
30
|
+
|
|
31
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
32
|
+
|
|
33
|
+
Parameters
|
|
34
|
+
----------
|
|
35
|
+
tags : str | List[str]
|
|
36
|
+
The name of the tag or tags to create.
|
|
37
|
+
"""
|
|
38
|
+
|
|
39
|
+
if isinstance(tags, str):
|
|
40
|
+
tags = [tags]
|
|
41
|
+
|
|
42
|
+
# Check the length of the tags
|
|
43
|
+
for tag in tags:
|
|
44
|
+
if len(tag) > 40:
|
|
45
|
+
raise ValueError(
|
|
46
|
+
f"{icons.red_dot} The '{tag}' tag name is too long. It must be 40 characters or less."
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
# Check if the tags already exist
|
|
50
|
+
df = list_tags()
|
|
51
|
+
existing_names = df["Tag Name"].tolist()
|
|
52
|
+
existing_ids = df["Tag Id"].tolist()
|
|
53
|
+
|
|
54
|
+
available_tags = [
|
|
55
|
+
tag for tag in tags if tag not in existing_names and tag not in existing_ids
|
|
56
|
+
]
|
|
57
|
+
unavailable_tags = [
|
|
58
|
+
tag for tag in tags if tag in existing_names or tag in existing_ids
|
|
59
|
+
]
|
|
60
|
+
|
|
61
|
+
print(f"{icons.warning} The following tags already exist: {unavailable_tags}")
|
|
62
|
+
if not available_tags:
|
|
63
|
+
print(f"{icons.info} No new tags to create.")
|
|
64
|
+
return
|
|
65
|
+
|
|
66
|
+
payload = [{"displayName": name} for name in available_tags]
|
|
67
|
+
|
|
68
|
+
for tag in tags:
|
|
69
|
+
_base_api(
|
|
70
|
+
request="/v1/admin/bulkCreateTags",
|
|
71
|
+
client="fabric_sp",
|
|
72
|
+
method="post",
|
|
73
|
+
payload=payload,
|
|
74
|
+
status_codes=201,
|
|
75
|
+
)
|
|
76
|
+
|
|
77
|
+
print(f"{icons.green_dot} The '{available_tags}' tag(s) have been created.")
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
def delete_tag(tag: str | UUID):
|
|
81
|
+
"""
|
|
82
|
+
Deletes a tag.
|
|
83
|
+
|
|
84
|
+
This is a wrapper function for the following API: `Tags - Delete Tag <https://learn.microsoft.com/rest/api/fabric/admin/tags/delete-tag>`_.
|
|
85
|
+
|
|
86
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
87
|
+
|
|
88
|
+
Parameters
|
|
89
|
+
----------
|
|
90
|
+
tag : str | uuid.UUID
|
|
91
|
+
The name or ID of the tag to delete.
|
|
92
|
+
"""
|
|
93
|
+
|
|
94
|
+
tag_id = resolve_tag_id(tag)
|
|
95
|
+
|
|
96
|
+
_base_api(request=f"/v1/admin/tags/{tag_id}", client="fabric_sp", method="delete")
|
|
97
|
+
|
|
98
|
+
print(f"{icons.green_dot} The '{tag}' tag has been deleted.")
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
def update_tag(name: str, tag: str | UUID):
|
|
102
|
+
"""
|
|
103
|
+
Updates the name of a tag.
|
|
104
|
+
|
|
105
|
+
This is a wrapper function for the following API: `Tags - Update Tag <https://learn.microsoft.com/rest/api/fabric/admin/tags/update-tag>`_.
|
|
106
|
+
|
|
107
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
108
|
+
|
|
109
|
+
Parameters
|
|
110
|
+
----------
|
|
111
|
+
name : str
|
|
112
|
+
The new name of the tag.
|
|
113
|
+
tag : str | uuid.UUID
|
|
114
|
+
The name or ID of the tag to update.
|
|
115
|
+
"""
|
|
116
|
+
|
|
117
|
+
tag_id = resolve_tag_id(tag)
|
|
118
|
+
|
|
119
|
+
_base_api(
|
|
120
|
+
request=f"/v1/admin/tags/{tag_id}",
|
|
121
|
+
client="fabric_sp",
|
|
122
|
+
method="patch",
|
|
123
|
+
payload={"displayName": name},
|
|
124
|
+
)
|
|
125
|
+
|
|
126
|
+
print(f"{icons.green_dot} The '{tag}' tag has been renamed to '{name}'.")
|
sempy_labs/admin/_tenant.py
CHANGED
|
@@ -32,7 +32,7 @@ def list_tenant_settings() -> pd.DataFrame:
|
|
|
32
32
|
"Enabled": "bool",
|
|
33
33
|
"Can Specify Security Groups": "bool",
|
|
34
34
|
"Tenant Setting Group": "string",
|
|
35
|
-
"Enabled Security Groups": "
|
|
35
|
+
"Enabled Security Groups": "list",
|
|
36
36
|
}
|
|
37
37
|
df = _create_dataframe(columns=columns)
|
|
38
38
|
|
|
@@ -86,9 +86,9 @@ def list_capacity_tenant_settings_overrides(
|
|
|
86
86
|
"Setting Title": "string",
|
|
87
87
|
"Setting Enabled": "bool",
|
|
88
88
|
"Can Specify Security Groups": "bool",
|
|
89
|
-
"Enabled Security Groups": "
|
|
89
|
+
"Enabled Security Groups": "list",
|
|
90
90
|
"Tenant Setting Group": "string",
|
|
91
|
-
"Tenant Setting Properties": "
|
|
91
|
+
"Tenant Setting Properties": "list",
|
|
92
92
|
"Delegate to Workspace": "bool",
|
|
93
93
|
"Delegated From": "string",
|
|
94
94
|
}
|
|
@@ -395,7 +395,7 @@ def list_workspaces_tenant_settings_overrides() -> pd.DataFrame:
|
|
|
395
395
|
"Title": "string",
|
|
396
396
|
"Enabled": "bool",
|
|
397
397
|
"Can Specify Security Groups": "bool",
|
|
398
|
-
"Enabled Security Groups": "
|
|
398
|
+
"Enabled Security Groups": "list",
|
|
399
399
|
"Tenant Setting Group": "string",
|
|
400
400
|
"Delegated From": "string",
|
|
401
401
|
}
|
|
@@ -454,7 +454,7 @@ def list_domain_tenant_settings_overrides() -> pd.DataFrame:
|
|
|
454
454
|
"Title": "string",
|
|
455
455
|
"Enabled": "bool",
|
|
456
456
|
"Can Specify Security Groups": "bool",
|
|
457
|
-
"Enabled Security Groups": "
|
|
457
|
+
"Enabled Security Groups": "list",
|
|
458
458
|
"Tenant Setting Group": "string",
|
|
459
459
|
"Delegated To Workspace": "bool",
|
|
460
460
|
"Delegated From": "string",
|
|
@@ -3,6 +3,7 @@ from sempy_labs._helper_functions import (
|
|
|
3
3
|
_base_api,
|
|
4
4
|
resolve_lakehouse_name_and_id,
|
|
5
5
|
resolve_item_name_and_id,
|
|
6
|
+
_get_fabric_context_setting,
|
|
6
7
|
)
|
|
7
8
|
from typing import Optional
|
|
8
9
|
import sempy_labs._icons as icons
|
|
@@ -56,34 +57,36 @@ def generate_shared_expression(
|
|
|
56
57
|
item=item_name, type=item_type, workspace=workspace_id
|
|
57
58
|
)
|
|
58
59
|
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
prop = response.json().get("properties")
|
|
60
|
+
if use_sql_endpoint:
|
|
61
|
+
item_type_rest = f"{item_type.lower()}s"
|
|
62
|
+
response = _base_api(
|
|
63
|
+
request=f"/v1/workspaces/{workspace_id}/{item_type_rest}/{item_id}"
|
|
64
|
+
)
|
|
65
65
|
|
|
66
|
-
|
|
67
|
-
sqlprop = prop.get("sqlEndpointProperties")
|
|
68
|
-
sqlEPCS = sqlprop.get("connectionString")
|
|
69
|
-
sqlepid = sqlprop.get("id")
|
|
70
|
-
provStatus = sqlprop.get("provisioningStatus")
|
|
71
|
-
elif item_type == "Warehouse":
|
|
72
|
-
sqlEPCS = prop.get("connectionString")
|
|
73
|
-
sqlepid = item_id
|
|
74
|
-
provStatus = None
|
|
66
|
+
prop = response.json().get("properties")
|
|
75
67
|
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
68
|
+
if item_type == "Lakehouse":
|
|
69
|
+
sqlprop = prop.get("sqlEndpointProperties")
|
|
70
|
+
sqlEPCS = sqlprop.get("connectionString")
|
|
71
|
+
sqlepid = sqlprop.get("id")
|
|
72
|
+
provStatus = sqlprop.get("provisioningStatus")
|
|
73
|
+
elif item_type == "Warehouse":
|
|
74
|
+
sqlEPCS = prop.get("connectionString")
|
|
75
|
+
sqlepid = item_id
|
|
76
|
+
provStatus = None
|
|
80
77
|
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
78
|
+
if provStatus == "InProgress":
|
|
79
|
+
raise ValueError(
|
|
80
|
+
f"{icons.red_dot} The SQL Endpoint for the '{item_name}' {item_type.lower()} within the '{workspace_name}' workspace has not yet been provisioned. Please wait until it has been provisioned."
|
|
81
|
+
)
|
|
84
82
|
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
else:
|
|
83
|
+
start_expr = "let\n\tdatabase = "
|
|
84
|
+
end_expr = "\nin\n\tdatabase"
|
|
85
|
+
mid_expr = f'Sql.Database("{sqlEPCS}", "{sqlepid}")'
|
|
89
86
|
return f"{start_expr}{mid_expr}{end_expr}"
|
|
87
|
+
else:
|
|
88
|
+
# Build DL/OL expression
|
|
89
|
+
env = _get_fabric_context_setting("spark.trident.pbienv").lower()
|
|
90
|
+
env = "" if env == "prod" else f"{env}-"
|
|
91
|
+
|
|
92
|
+
return f"""let\n\tSource = AzureStorage.DataLake("https://{env}onelake.dfs.fabric.microsoft.com/{workspace_id}/{item_id}")\nin\n\tSource"""
|