semantic-link-labs 0.9.5__py3-none-any.whl → 0.9.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.9.5.dist-info → semantic_link_labs-0.9.6.dist-info}/METADATA +3 -2
- {semantic_link_labs-0.9.5.dist-info → semantic_link_labs-0.9.6.dist-info}/RECORD +48 -47
- {semantic_link_labs-0.9.5.dist-info → semantic_link_labs-0.9.6.dist-info}/WHEEL +1 -1
- sempy_labs/__init__.py +15 -1
- sempy_labs/_capacities.py +37 -1
- sempy_labs/_capacity_migration.py +11 -14
- sempy_labs/_connections.py +2 -4
- sempy_labs/_dataflows.py +2 -2
- sempy_labs/_dax_query_view.py +55 -0
- sempy_labs/_delta_analyzer.py +16 -14
- sempy_labs/_environments.py +8 -1
- sempy_labs/_eventhouses.py +5 -1
- sempy_labs/_external_data_shares.py +4 -10
- sempy_labs/_generate_semantic_model.py +2 -1
- sempy_labs/_graphQL.py +5 -1
- sempy_labs/_helper_functions.py +272 -51
- sempy_labs/_kql_databases.py +5 -1
- sempy_labs/_list_functions.py +5 -37
- sempy_labs/_managed_private_endpoints.py +9 -2
- sempy_labs/_mirrored_databases.py +3 -1
- sempy_labs/_ml_experiments.py +1 -1
- sempy_labs/_model_bpa.py +2 -11
- sempy_labs/_model_bpa_bulk.py +23 -27
- sempy_labs/_one_lake_integration.py +2 -1
- sempy_labs/_semantic_models.py +20 -0
- sempy_labs/_sql.py +6 -2
- sempy_labs/_sqldatabase.py +61 -100
- sempy_labs/_vertipaq.py +8 -11
- sempy_labs/_warehouses.py +14 -3
- sempy_labs/_workspace_identity.py +6 -0
- sempy_labs/_workspaces.py +42 -2
- sempy_labs/admin/_basic_functions.py +3 -2
- sempy_labs/admin/_scanner.py +2 -2
- sempy_labs/directlake/_directlake_schema_compare.py +2 -1
- sempy_labs/directlake/_directlake_schema_sync.py +65 -19
- sempy_labs/directlake/_dl_helper.py +0 -6
- sempy_labs/directlake/_generate_shared_expression.py +10 -11
- sempy_labs/directlake/_guardrails.py +2 -1
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +12 -25
- sempy_labs/directlake/_update_directlake_partition_entity.py +2 -2
- sempy_labs/lakehouse/_shortcuts.py +7 -5
- sempy_labs/migration/_migration_validation.py +0 -4
- sempy_labs/report/_download_report.py +4 -6
- sempy_labs/report/_generate_report.py +6 -6
- sempy_labs/report/_report_functions.py +2 -1
- sempy_labs/report/_report_rebind.py +8 -6
- {semantic_link_labs-0.9.5.dist-info → semantic_link_labs-0.9.6.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.9.5.dist-info → semantic_link_labs-0.9.6.dist-info}/top_level.txt +0 -0
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import sempy.fabric as fabric
|
|
2
1
|
from uuid import UUID
|
|
3
2
|
import pandas as pd
|
|
4
3
|
from typing import Optional, List
|
|
@@ -7,6 +6,7 @@ from sempy_labs._helper_functions import (
|
|
|
7
6
|
resolve_workspace_name_and_id,
|
|
8
7
|
_base_api,
|
|
9
8
|
_create_dataframe,
|
|
9
|
+
resolve_item_id,
|
|
10
10
|
)
|
|
11
11
|
|
|
12
12
|
|
|
@@ -39,9 +39,7 @@ def create_external_data_share(
|
|
|
39
39
|
"""
|
|
40
40
|
|
|
41
41
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
42
|
-
item_id =
|
|
43
|
-
item_name=item_name, type=item_type, workspace=workspace_id
|
|
44
|
-
)
|
|
42
|
+
item_id = resolve_item_id(item=item_name, type=item_type, workspace=workspace_id)
|
|
45
43
|
|
|
46
44
|
if isinstance(paths, str):
|
|
47
45
|
paths = [paths]
|
|
@@ -85,9 +83,7 @@ def revoke_external_data_share(
|
|
|
85
83
|
"""
|
|
86
84
|
|
|
87
85
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
88
|
-
item_id =
|
|
89
|
-
item_name=item_name, type=item_type, workspace=workspace_id
|
|
90
|
-
)
|
|
86
|
+
item_id = resolve_item_id(item=item_name, type=item_type, workspace=workspace_id)
|
|
91
87
|
|
|
92
88
|
_base_api(
|
|
93
89
|
request=f"/v1/workspaces/{workspace_id}/items/{item_id}/externalDataShares/{external_data_share_id}/revoke",
|
|
@@ -124,9 +120,7 @@ def list_external_data_shares_in_item(
|
|
|
124
120
|
"""
|
|
125
121
|
|
|
126
122
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
127
|
-
item_id =
|
|
128
|
-
item_name=item_name, type=item_type, workspace=workspace_id
|
|
129
|
-
)
|
|
123
|
+
item_id = resolve_item_id(item=item_name, type=item_type, workspace=workspace_id)
|
|
130
124
|
|
|
131
125
|
columns = {
|
|
132
126
|
"External Data Share Id": "string",
|
|
@@ -11,6 +11,7 @@ from sempy_labs._helper_functions import (
|
|
|
11
11
|
_decode_b64,
|
|
12
12
|
_base_api,
|
|
13
13
|
_mount,
|
|
14
|
+
resolve_workspace_id,
|
|
14
15
|
)
|
|
15
16
|
from sempy_labs.lakehouse._lakehouse import lakehouse_attached
|
|
16
17
|
import sempy_labs._icons as icons
|
|
@@ -285,7 +286,7 @@ def deploy_semantic_model(
|
|
|
285
286
|
|
|
286
287
|
if target_workspace is None:
|
|
287
288
|
target_workspace_name = source_workspace_name
|
|
288
|
-
target_workspace_id =
|
|
289
|
+
target_workspace_id = resolve_workspace_id(workspace=target_workspace_name)
|
|
289
290
|
else:
|
|
290
291
|
(target_workspace_name, target_workspace_id) = resolve_workspace_name_and_id(
|
|
291
292
|
target_workspace
|
sempy_labs/_graphQL.py
CHANGED
|
@@ -15,6 +15,8 @@ def list_graphql_apis(workspace: Optional[str | UUID]) -> pd.DataFrame:
|
|
|
15
15
|
|
|
16
16
|
This is a wrapper function for the following API: `Items - List GraphQLApis <https://learn.microsoft.com/rest/api/fabric/graphqlapi/items/list-graphqlapi-s>`_.
|
|
17
17
|
|
|
18
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
19
|
+
|
|
18
20
|
Parameters
|
|
19
21
|
----------
|
|
20
22
|
workspace : str | uuid.UUID, default=None
|
|
@@ -38,7 +40,9 @@ def list_graphql_apis(workspace: Optional[str | UUID]) -> pd.DataFrame:
|
|
|
38
40
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
39
41
|
|
|
40
42
|
responses = _base_api(
|
|
41
|
-
request=f"/v1/workspaces/{workspace_id}/GraphQLApis",
|
|
43
|
+
request=f"/v1/workspaces/{workspace_id}/GraphQLApis",
|
|
44
|
+
uses_pagination=True,
|
|
45
|
+
client="fabric_sp",
|
|
42
46
|
)
|
|
43
47
|
|
|
44
48
|
for r in responses:
|
sempy_labs/_helper_functions.py
CHANGED
|
@@ -4,7 +4,7 @@ import json
|
|
|
4
4
|
import base64
|
|
5
5
|
import time
|
|
6
6
|
import uuid
|
|
7
|
-
from sempy.fabric.exceptions import FabricHTTPException
|
|
7
|
+
from sempy.fabric.exceptions import FabricHTTPException, WorkspaceNotFoundException
|
|
8
8
|
import pandas as pd
|
|
9
9
|
from functools import wraps
|
|
10
10
|
import datetime
|
|
@@ -39,6 +39,7 @@ def create_abfss_path(
|
|
|
39
39
|
lakehouse_id: UUID,
|
|
40
40
|
lakehouse_workspace_id: UUID,
|
|
41
41
|
delta_table_name: Optional[str] = None,
|
|
42
|
+
schema: Optional[str] = None,
|
|
42
43
|
) -> str:
|
|
43
44
|
"""
|
|
44
45
|
Creates an abfss path for a delta table in a Fabric lakehouse.
|
|
@@ -51,6 +52,8 @@ def create_abfss_path(
|
|
|
51
52
|
ID of the Fabric workspace.
|
|
52
53
|
delta_table_name : str, default=None
|
|
53
54
|
Name of the delta table name.
|
|
55
|
+
schema : str, default=None
|
|
56
|
+
The schema of the delta table.
|
|
54
57
|
|
|
55
58
|
Returns
|
|
56
59
|
-------
|
|
@@ -62,6 +65,8 @@ def create_abfss_path(
|
|
|
62
65
|
path = f"abfss://{lakehouse_workspace_id}@{fp}/{lakehouse_id}"
|
|
63
66
|
|
|
64
67
|
if delta_table_name is not None:
|
|
68
|
+
if schema is not None:
|
|
69
|
+
path += f"/{schema}"
|
|
65
70
|
path += f"/Tables/{delta_table_name}"
|
|
66
71
|
|
|
67
72
|
return path
|
|
@@ -178,9 +183,7 @@ def resolve_report_name(report_id: UUID, workspace: Optional[str | UUID] = None)
|
|
|
178
183
|
The name of the Power BI report.
|
|
179
184
|
"""
|
|
180
185
|
|
|
181
|
-
return
|
|
182
|
-
item_id=report_id, type="Report", workspace=workspace
|
|
183
|
-
)
|
|
186
|
+
return resolve_item_name(item_id=report_id, type="Report", workspace=workspace)
|
|
184
187
|
|
|
185
188
|
|
|
186
189
|
def delete_item(
|
|
@@ -305,37 +308,84 @@ def get_item_definition(
|
|
|
305
308
|
|
|
306
309
|
|
|
307
310
|
def resolve_item_id(
|
|
308
|
-
item: str | UUID, type: str, workspace: Optional[str] = None
|
|
311
|
+
item: str | UUID, type: Optional[str] = None, workspace: Optional[str | UUID] = None
|
|
309
312
|
) -> UUID:
|
|
310
313
|
|
|
314
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
315
|
+
item_id = None
|
|
316
|
+
|
|
311
317
|
if _is_valid_uuid(item):
|
|
312
|
-
|
|
318
|
+
# Check (optional)
|
|
319
|
+
item_id = item
|
|
320
|
+
try:
|
|
321
|
+
_base_api(
|
|
322
|
+
request=f"/v1/workspaces/{workspace_id}/items/{item_id}",
|
|
323
|
+
client="fabric_sp",
|
|
324
|
+
)
|
|
325
|
+
except FabricHTTPException:
|
|
326
|
+
raise ValueError(
|
|
327
|
+
f"{icons.red_dot} The '{item_id}' item was not found in the '{workspace_name}' workspace."
|
|
328
|
+
)
|
|
313
329
|
else:
|
|
314
|
-
|
|
330
|
+
if type is None:
|
|
331
|
+
raise ValueError(
|
|
332
|
+
f"{icons.red_dot} The 'type' parameter is required if specifying an item name."
|
|
333
|
+
)
|
|
334
|
+
responses = _base_api(
|
|
335
|
+
request=f"/v1/workspaces/{workspace_id}/items?type={type}",
|
|
336
|
+
client="fabric_sp",
|
|
337
|
+
uses_pagination=True,
|
|
338
|
+
)
|
|
339
|
+
for r in responses:
|
|
340
|
+
for v in r.get("value", []):
|
|
341
|
+
display_name = v.get("displayName")
|
|
342
|
+
if display_name == item:
|
|
343
|
+
item_id = v.get("id")
|
|
344
|
+
break
|
|
345
|
+
|
|
346
|
+
if item_id is None:
|
|
347
|
+
raise ValueError(
|
|
348
|
+
f"{icons.red_dot} There's no item '{item}' of type '{type}' in the '{workspace_name}' workspace."
|
|
349
|
+
)
|
|
350
|
+
|
|
351
|
+
return item_id
|
|
315
352
|
|
|
316
353
|
|
|
317
354
|
def resolve_item_name_and_id(
|
|
318
355
|
item: str | UUID, type: Optional[str] = None, workspace: Optional[str | UUID] = None
|
|
319
356
|
) -> Tuple[str, UUID]:
|
|
320
357
|
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
item_id=item_id, type=type, workspace=workspace_id
|
|
358
|
+
workspace_id = resolve_workspace_id(workspace)
|
|
359
|
+
item_id = resolve_item_id(item=item, type=type, workspace=workspace_id)
|
|
360
|
+
item_name = (
|
|
361
|
+
_base_api(
|
|
362
|
+
request=f"/v1/workspaces/{workspace_id}/items/{item_id}", client="fabric_sp"
|
|
327
363
|
)
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
364
|
+
.json()
|
|
365
|
+
.get("displayName")
|
|
366
|
+
)
|
|
367
|
+
|
|
368
|
+
return item_name, item_id
|
|
369
|
+
|
|
370
|
+
|
|
371
|
+
def resolve_item_name(item_id: UUID, workspace: Optional[str | UUID] = None) -> str:
|
|
372
|
+
|
|
373
|
+
workspace_id = resolve_workspace_id(workspace)
|
|
374
|
+
try:
|
|
375
|
+
item_name = (
|
|
376
|
+
_base_api(
|
|
377
|
+
request=f"/v1/workspaces/{workspace_id}/items/{item_id}",
|
|
378
|
+
client="fabric_sp",
|
|
332
379
|
)
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
380
|
+
.json()
|
|
381
|
+
.get("displayName")
|
|
382
|
+
)
|
|
383
|
+
except FabricHTTPException:
|
|
384
|
+
raise ValueError(
|
|
385
|
+
f"{icons.red_dot} The '{item_id}' item was not found in the '{workspace_id}' workspace."
|
|
336
386
|
)
|
|
337
387
|
|
|
338
|
-
return item_name
|
|
388
|
+
return item_name
|
|
339
389
|
|
|
340
390
|
|
|
341
391
|
def resolve_lakehouse_name_and_id(
|
|
@@ -346,19 +396,18 @@ def resolve_lakehouse_name_and_id(
|
|
|
346
396
|
type = "Lakehouse"
|
|
347
397
|
|
|
348
398
|
if lakehouse is None:
|
|
349
|
-
lakehouse_id =
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
lakehouse_id =
|
|
355
|
-
|
|
356
|
-
item_id=lakehouse_id, type=type, workspace=workspace_id
|
|
399
|
+
lakehouse_id = _get_fabric_context_setting(name="trident.lakehouse.id")
|
|
400
|
+
if lakehouse_id == "":
|
|
401
|
+
raise ValueError(
|
|
402
|
+
f"{icons.red_dot} Cannot resolve a lakehouse. Please enter a valid lakehouse or make sure a lakehouse is attached to the notebook."
|
|
403
|
+
)
|
|
404
|
+
(lakehouse_name, lakehouse_id) = resolve_item_name_and_id(
|
|
405
|
+
item=lakehouse_id, type=type, workspace=workspace_id
|
|
357
406
|
)
|
|
407
|
+
|
|
358
408
|
else:
|
|
359
|
-
lakehouse_name =
|
|
360
|
-
|
|
361
|
-
item_name=lakehouse, type=type, workspace=workspace_id
|
|
409
|
+
(lakehouse_name, lakehouse_id) = resolve_item_name_and_id(
|
|
410
|
+
item=lakehouse, type=type, workspace=workspace_id
|
|
362
411
|
)
|
|
363
412
|
|
|
364
413
|
return lakehouse_name, lakehouse_id
|
|
@@ -420,7 +469,7 @@ def resolve_dataset_name(
|
|
|
420
469
|
The name of the semantic model.
|
|
421
470
|
"""
|
|
422
471
|
|
|
423
|
-
return
|
|
472
|
+
return resolve_item_name(
|
|
424
473
|
item_id=dataset_id, type="SemanticModel", workspace=workspace
|
|
425
474
|
)
|
|
426
475
|
|
|
@@ -448,9 +497,13 @@ def resolve_lakehouse_name(
|
|
|
448
497
|
"""
|
|
449
498
|
|
|
450
499
|
if lakehouse_id is None:
|
|
451
|
-
lakehouse_id =
|
|
500
|
+
lakehouse_id = _get_fabric_context_setting(name="trident.lakehouse.id")
|
|
501
|
+
if lakehouse_id == "":
|
|
502
|
+
raise ValueError(
|
|
503
|
+
f"{icons.red_dot} Cannot resolve a lakehouse. Please enter a valid lakehouse or make sure a lakehouse is attached to the notebook."
|
|
504
|
+
)
|
|
452
505
|
|
|
453
|
-
return
|
|
506
|
+
return resolve_item_name(
|
|
454
507
|
item_id=lakehouse_id, type="Lakehouse", workspace=workspace
|
|
455
508
|
)
|
|
456
509
|
|
|
@@ -477,12 +530,14 @@ def resolve_lakehouse_id(
|
|
|
477
530
|
"""
|
|
478
531
|
|
|
479
532
|
if lakehouse is None:
|
|
480
|
-
lakehouse_id =
|
|
481
|
-
|
|
482
|
-
|
|
533
|
+
lakehouse_id = _get_fabric_context_setting(name="trident.lakehouse.id")
|
|
534
|
+
if lakehouse_id == "":
|
|
535
|
+
raise ValueError(
|
|
536
|
+
f"{icons.red_dot} Cannot resolve a lakehouse. Please enter a valid lakehouse or make sure a lakehouse is attached to the notebook."
|
|
537
|
+
)
|
|
483
538
|
else:
|
|
484
|
-
lakehouse_id =
|
|
485
|
-
|
|
539
|
+
lakehouse_id = resolve_item_id(
|
|
540
|
+
item=lakehouse, type="Lakehouse", workspace=workspace
|
|
486
541
|
)
|
|
487
542
|
|
|
488
543
|
return lakehouse_id
|
|
@@ -751,6 +806,55 @@ def language_validate(language: str):
|
|
|
751
806
|
return lang
|
|
752
807
|
|
|
753
808
|
|
|
809
|
+
def resolve_workspace_id(
|
|
810
|
+
workspace: Optional[str | UUID] = None,
|
|
811
|
+
) -> UUID:
|
|
812
|
+
if workspace is None:
|
|
813
|
+
workspace_id = _get_fabric_context_setting(name="trident.workspace.id")
|
|
814
|
+
elif _is_valid_uuid(workspace):
|
|
815
|
+
# Check (optional)
|
|
816
|
+
workspace_id = workspace
|
|
817
|
+
try:
|
|
818
|
+
_base_api(request=f"/v1/workspaces/{workspace_id}", client="fabric_sp")
|
|
819
|
+
except FabricHTTPException:
|
|
820
|
+
raise ValueError(
|
|
821
|
+
f"{icons.red_dot} The '{workspace_id}' workspace was not found."
|
|
822
|
+
)
|
|
823
|
+
else:
|
|
824
|
+
responses = _base_api(
|
|
825
|
+
request="/v1/workspaces", client="fabric_sp", uses_pagination=True
|
|
826
|
+
)
|
|
827
|
+
workspace_id = None
|
|
828
|
+
for r in responses:
|
|
829
|
+
for v in r.get("value", []):
|
|
830
|
+
display_name = v.get("displayName")
|
|
831
|
+
if display_name == workspace:
|
|
832
|
+
workspace_id = v.get("id")
|
|
833
|
+
break
|
|
834
|
+
|
|
835
|
+
if workspace_id is None:
|
|
836
|
+
raise WorkspaceNotFoundException(workspace)
|
|
837
|
+
|
|
838
|
+
return workspace_id
|
|
839
|
+
|
|
840
|
+
|
|
841
|
+
def resolve_workspace_name(workspace_id: Optional[UUID] = None) -> str:
|
|
842
|
+
|
|
843
|
+
if workspace_id is None:
|
|
844
|
+
workspace_id = _get_fabric_context_setting(name="trident.workspace.id")
|
|
845
|
+
|
|
846
|
+
try:
|
|
847
|
+
response = _base_api(
|
|
848
|
+
request=f"/v1/workspaces/{workspace_id}", client="fabric_sp"
|
|
849
|
+
).json()
|
|
850
|
+
except FabricHTTPException:
|
|
851
|
+
raise ValueError(
|
|
852
|
+
f"{icons.red_dot} The '{workspace_id}' workspace was not found."
|
|
853
|
+
)
|
|
854
|
+
|
|
855
|
+
return response.get("displayName")
|
|
856
|
+
|
|
857
|
+
|
|
754
858
|
def resolve_workspace_name_and_id(
|
|
755
859
|
workspace: Optional[str | UUID] = None,
|
|
756
860
|
) -> Tuple[str, str]:
|
|
@@ -771,16 +875,29 @@ def resolve_workspace_name_and_id(
|
|
|
771
875
|
"""
|
|
772
876
|
|
|
773
877
|
if workspace is None:
|
|
774
|
-
workspace_id =
|
|
775
|
-
workspace_name =
|
|
878
|
+
workspace_id = _get_fabric_context_setting(name="trident.workspace.id")
|
|
879
|
+
workspace_name = resolve_workspace_name(workspace_id)
|
|
776
880
|
elif _is_valid_uuid(workspace):
|
|
777
881
|
workspace_id = workspace
|
|
778
|
-
workspace_name =
|
|
882
|
+
workspace_name = resolve_workspace_name(workspace_id)
|
|
779
883
|
else:
|
|
780
|
-
|
|
781
|
-
|
|
884
|
+
responses = _base_api(
|
|
885
|
+
request="/v1/workspaces", client="fabric_sp", uses_pagination=True
|
|
886
|
+
)
|
|
887
|
+
workspace_id = None
|
|
888
|
+
workspace_name = None
|
|
889
|
+
for r in responses:
|
|
890
|
+
for v in r.get("value", []):
|
|
891
|
+
display_name = v.get("displayName")
|
|
892
|
+
if display_name == workspace:
|
|
893
|
+
workspace_name = workspace
|
|
894
|
+
workspace_id = v.get("id")
|
|
895
|
+
break
|
|
896
|
+
|
|
897
|
+
if workspace_name is None or workspace_id is None:
|
|
898
|
+
raise WorkspaceNotFoundException(workspace)
|
|
782
899
|
|
|
783
|
-
return
|
|
900
|
+
return workspace_name, workspace_id
|
|
784
901
|
|
|
785
902
|
|
|
786
903
|
def _extract_json(dataframe: pd.DataFrame) -> dict:
|
|
@@ -893,7 +1010,7 @@ def resolve_dataset_from_report(
|
|
|
893
1010
|
dfR = _get_report(report=report, workspace=workspace)
|
|
894
1011
|
dataset_id = dfR["Dataset Id"].iloc[0]
|
|
895
1012
|
dataset_workspace_id = dfR["Dataset Workspace Id"].iloc[0]
|
|
896
|
-
dataset_workspace =
|
|
1013
|
+
dataset_workspace = resolve_workspace_name(workspace_id=dataset_workspace_id)
|
|
897
1014
|
dataset_name = resolve_dataset_name(
|
|
898
1015
|
dataset_id=dataset_id, workspace=dataset_workspace
|
|
899
1016
|
)
|
|
@@ -926,12 +1043,13 @@ def resolve_workspace_capacity(
|
|
|
926
1043
|
Tuple[uuid.UUID, str]
|
|
927
1044
|
capacity Id; capacity came.
|
|
928
1045
|
"""
|
|
1046
|
+
from sempy_labs._capacities import list_capacities
|
|
929
1047
|
|
|
930
1048
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
931
1049
|
filter_condition = urllib.parse.quote(workspace_id)
|
|
932
1050
|
dfW = fabric.list_workspaces(filter=f"id eq '{filter_condition}'")
|
|
933
1051
|
capacity_id = dfW["Capacity Id"].iloc[0]
|
|
934
|
-
dfC =
|
|
1052
|
+
dfC = list_capacities()
|
|
935
1053
|
dfC_filt = dfC[dfC["Id"] == capacity_id]
|
|
936
1054
|
if len(dfC_filt) == 1:
|
|
937
1055
|
capacity_name = dfC_filt["Display Name"].iloc[0]
|
|
@@ -989,8 +1107,10 @@ def get_capacity_name(workspace: Optional[str | UUID] = None) -> str:
|
|
|
989
1107
|
The capacity name.
|
|
990
1108
|
"""
|
|
991
1109
|
|
|
1110
|
+
from sempy_labs._capacities import list_capacities
|
|
1111
|
+
|
|
992
1112
|
capacity_id = get_capacity_id(workspace)
|
|
993
|
-
dfC =
|
|
1113
|
+
dfC = list_capacities()
|
|
994
1114
|
dfC_filt = dfC[dfC["Id"] == capacity_id]
|
|
995
1115
|
if dfC_filt.empty:
|
|
996
1116
|
raise ValueError(
|
|
@@ -1016,11 +1136,12 @@ def resolve_capacity_name(capacity_id: Optional[UUID] = None) -> str:
|
|
|
1016
1136
|
str
|
|
1017
1137
|
The capacity name.
|
|
1018
1138
|
"""
|
|
1139
|
+
from sempy_labs._capacities import list_capacities
|
|
1019
1140
|
|
|
1020
1141
|
if capacity_id is None:
|
|
1021
1142
|
return get_capacity_name()
|
|
1022
1143
|
|
|
1023
|
-
dfC =
|
|
1144
|
+
dfC = list_capacities()
|
|
1024
1145
|
dfC_filt = dfC[dfC["Id"] == capacity_id]
|
|
1025
1146
|
|
|
1026
1147
|
if dfC_filt.empty:
|
|
@@ -1047,6 +1168,7 @@ def resolve_capacity_id(capacity: Optional[str | UUID] = None, **kwargs) -> UUID
|
|
|
1047
1168
|
uuid.UUID
|
|
1048
1169
|
The capacity Id.
|
|
1049
1170
|
"""
|
|
1171
|
+
from sempy_labs._capacities import list_capacities
|
|
1050
1172
|
|
|
1051
1173
|
if "capacity_name" in kwargs:
|
|
1052
1174
|
capacity = kwargs["capacity_name"]
|
|
@@ -1059,7 +1181,7 @@ def resolve_capacity_id(capacity: Optional[str | UUID] = None, **kwargs) -> UUID
|
|
|
1059
1181
|
if _is_valid_uuid(capacity):
|
|
1060
1182
|
return capacity
|
|
1061
1183
|
|
|
1062
|
-
dfC =
|
|
1184
|
+
dfC = list_capacities()
|
|
1063
1185
|
dfC_filt = dfC[dfC["Display Name"] == capacity]
|
|
1064
1186
|
|
|
1065
1187
|
if dfC_filt.empty:
|
|
@@ -1558,6 +1680,7 @@ def _convert_data_type(input_data_type: str) -> str:
|
|
|
1558
1680
|
"date": "DateTime",
|
|
1559
1681
|
"double": "Double",
|
|
1560
1682
|
"float": "Double",
|
|
1683
|
+
"binary": "Boolean",
|
|
1561
1684
|
}
|
|
1562
1685
|
|
|
1563
1686
|
if "decimal" in input_data_type:
|
|
@@ -1794,3 +1917,101 @@ def _mount(lakehouse, workspace) -> str:
|
|
|
1794
1917
|
)
|
|
1795
1918
|
|
|
1796
1919
|
return local_path
|
|
1920
|
+
|
|
1921
|
+
|
|
1922
|
+
def _get_or_create_workspace(
|
|
1923
|
+
workspace: str,
|
|
1924
|
+
capacity: Optional[str | UUID] = None,
|
|
1925
|
+
description: Optional[str] = None,
|
|
1926
|
+
) -> Tuple[str, UUID]:
|
|
1927
|
+
|
|
1928
|
+
capacity_id = resolve_capacity_id(capacity)
|
|
1929
|
+
dfW = fabric.list_workspaces()
|
|
1930
|
+
dfW_filt_name = dfW[dfW["Name"] == workspace]
|
|
1931
|
+
dfW_filt_id = dfW[dfW["Id"] == workspace]
|
|
1932
|
+
|
|
1933
|
+
# Workspace already exists
|
|
1934
|
+
if (not dfW_filt_name.empty) or (not dfW_filt_id.empty):
|
|
1935
|
+
print(f"{icons.green_dot} The '{workspace}' workspace already exists.")
|
|
1936
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
1937
|
+
return (workspace_name, workspace_id)
|
|
1938
|
+
|
|
1939
|
+
# Do not create workspace with name of an ID
|
|
1940
|
+
if _is_valid_uuid(workspace):
|
|
1941
|
+
raise ValueError(f"{icons.warning} Must enter a workspace name, not an ID.")
|
|
1942
|
+
|
|
1943
|
+
print(f"{icons.in_progress} Creating the '{workspace}' workspace...")
|
|
1944
|
+
workspace_id = fabric.create_workspace(
|
|
1945
|
+
display_name=workspace, capacity_id=capacity_id, description=description
|
|
1946
|
+
)
|
|
1947
|
+
print(
|
|
1948
|
+
f"{icons.green_dot} The '{workspace}' workspace has been successfully created."
|
|
1949
|
+
)
|
|
1950
|
+
|
|
1951
|
+
return (workspace, workspace_id)
|
|
1952
|
+
|
|
1953
|
+
|
|
1954
|
+
def _get_or_create_lakehouse(
|
|
1955
|
+
lakehouse: str,
|
|
1956
|
+
workspace: Optional[str | UUID] = None,
|
|
1957
|
+
description: Optional[str] = None,
|
|
1958
|
+
) -> Tuple[str, UUID]:
|
|
1959
|
+
|
|
1960
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
1961
|
+
|
|
1962
|
+
dfI = fabric.list_items(type="Lakehouse", workspace=workspace)
|
|
1963
|
+
dfI_filt_name = dfI[dfI["Display Name"] == lakehouse]
|
|
1964
|
+
dfI_filt_id = dfI[dfI["Id"] == lakehouse]
|
|
1965
|
+
|
|
1966
|
+
if (not dfI_filt_name.empty) or (not dfI_filt_id.empty):
|
|
1967
|
+
print(f"{icons.green_dot} The '{lakehouse}' lakehouse already exists.")
|
|
1968
|
+
(lakehouse_name, lakehouse_id) = resolve_lakehouse_name_and_id(
|
|
1969
|
+
lakehouse=lakehouse, workspace=workspace
|
|
1970
|
+
)
|
|
1971
|
+
return (lakehouse_name, lakehouse_id)
|
|
1972
|
+
if _is_valid_uuid(lakehouse):
|
|
1973
|
+
raise ValueError(f"{icons.warning} Must enter a lakehouse name, not an ID.")
|
|
1974
|
+
|
|
1975
|
+
print(f"{icons.in_progress} Creating the '{lakehouse}' lakehouse...")
|
|
1976
|
+
lakehouse_id = fabric.create_lakehouse(
|
|
1977
|
+
display_name=lakehouse, workspace=workspace, description=description
|
|
1978
|
+
)
|
|
1979
|
+
print(
|
|
1980
|
+
f"{icons.green_dot} The '{lakehouse}' lakehouse has been successfully created within the '{workspace_name}' workspace."
|
|
1981
|
+
)
|
|
1982
|
+
|
|
1983
|
+
return (lakehouse, lakehouse_id)
|
|
1984
|
+
|
|
1985
|
+
|
|
1986
|
+
def _get_or_create_warehouse(
|
|
1987
|
+
warehouse: str,
|
|
1988
|
+
workspace: Optional[str | UUID] = None,
|
|
1989
|
+
description: Optional[str] = None,
|
|
1990
|
+
) -> Tuple[str, UUID]:
|
|
1991
|
+
|
|
1992
|
+
from sempy_labs._warehouses import create_warehouse
|
|
1993
|
+
|
|
1994
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
1995
|
+
|
|
1996
|
+
dfI = fabric.list_items(type="Warehouse", workspace=workspace)
|
|
1997
|
+
dfI_filt_name = dfI[dfI["Display Name"] == warehouse]
|
|
1998
|
+
dfI_filt_id = dfI[dfI["Id"] == warehouse]
|
|
1999
|
+
|
|
2000
|
+
if (not dfI_filt_name.empty) or (not dfI_filt_id.empty):
|
|
2001
|
+
print(f"{icons.green_dot} The '{warehouse}' warehouse already exists.")
|
|
2002
|
+
(warehouse_name, warehouse_id) = resolve_item_name_and_id(
|
|
2003
|
+
warehouse=warehouse, type="Warehouse", workspace=workspace
|
|
2004
|
+
)
|
|
2005
|
+
return (warehouse_name, warehouse_id)
|
|
2006
|
+
if _is_valid_uuid(warehouse):
|
|
2007
|
+
raise ValueError(f"{icons.warning} Must enter a warehouse name, not an ID.")
|
|
2008
|
+
|
|
2009
|
+
print(f"{icons.in_progress} Creating the '{warehouse}' warehouse...")
|
|
2010
|
+
warehouse_id = create_warehouse(
|
|
2011
|
+
display_name=warehouse, workspace=workspace, description=description
|
|
2012
|
+
)
|
|
2013
|
+
print(
|
|
2014
|
+
f"{icons.green_dot} The '{warehouse}' warehouse has been successfully created within the '{workspace_name}' workspace."
|
|
2015
|
+
)
|
|
2016
|
+
|
|
2017
|
+
return (warehouse, warehouse_id)
|
sempy_labs/_kql_databases.py
CHANGED
|
@@ -17,6 +17,8 @@ def list_kql_databases(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
|
17
17
|
|
|
18
18
|
This is a wrapper function for the following API: `Items - List KQL Databases <https://learn.microsoft.com/rest/api/fabric/kqldatabase/items/list-kql-databases>`_.
|
|
19
19
|
|
|
20
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
21
|
+
|
|
20
22
|
Parameters
|
|
21
23
|
----------
|
|
22
24
|
workspace : str | uuid.UUID, default=None
|
|
@@ -44,7 +46,9 @@ def list_kql_databases(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
|
44
46
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
45
47
|
|
|
46
48
|
responses = _base_api(
|
|
47
|
-
request=f"v1/workspaces/{workspace_id}/kqlDatabases",
|
|
49
|
+
request=f"v1/workspaces/{workspace_id}/kqlDatabases",
|
|
50
|
+
uses_pagination=True,
|
|
51
|
+
client="fabric_sp",
|
|
48
52
|
)
|
|
49
53
|
|
|
50
54
|
for r in responses:
|
sempy_labs/_list_functions.py
CHANGED
|
@@ -605,6 +605,8 @@ def list_lakehouses(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
|
605
605
|
"""
|
|
606
606
|
Shows the lakehouses within a workspace.
|
|
607
607
|
|
|
608
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
609
|
+
|
|
608
610
|
Parameters
|
|
609
611
|
----------
|
|
610
612
|
workspace : str | uuid.UUID, default=None
|
|
@@ -633,7 +635,9 @@ def list_lakehouses(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
|
633
635
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
634
636
|
|
|
635
637
|
responses = _base_api(
|
|
636
|
-
request=f"/v1/workspaces/{workspace_id}/lakehouses",
|
|
638
|
+
request=f"/v1/workspaces/{workspace_id}/lakehouses",
|
|
639
|
+
uses_pagination=True,
|
|
640
|
+
client="fabric_sp",
|
|
637
641
|
)
|
|
638
642
|
|
|
639
643
|
for r in responses:
|
|
@@ -1172,42 +1176,6 @@ def list_shortcuts(
|
|
|
1172
1176
|
return list_shortcuts(lakehouse=lakehouse, workspace=workspace, path=path)
|
|
1173
1177
|
|
|
1174
1178
|
|
|
1175
|
-
def list_capacities() -> pd.DataFrame:
|
|
1176
|
-
"""
|
|
1177
|
-
Shows the capacities and their properties.
|
|
1178
|
-
|
|
1179
|
-
Returns
|
|
1180
|
-
-------
|
|
1181
|
-
pandas.DataFrame
|
|
1182
|
-
A pandas dataframe showing the capacities and their properties
|
|
1183
|
-
"""
|
|
1184
|
-
|
|
1185
|
-
columns = {
|
|
1186
|
-
"Id": "string",
|
|
1187
|
-
"Display Name": "string",
|
|
1188
|
-
"Sku": "string",
|
|
1189
|
-
"Region": "string",
|
|
1190
|
-
"State": "string",
|
|
1191
|
-
"Admins": "string",
|
|
1192
|
-
}
|
|
1193
|
-
df = _create_dataframe(columns=columns)
|
|
1194
|
-
|
|
1195
|
-
response = _base_api(request="/v1.0/myorg/capacities")
|
|
1196
|
-
|
|
1197
|
-
for i in response.json().get("value", []):
|
|
1198
|
-
new_data = {
|
|
1199
|
-
"Id": i.get("id").lower(),
|
|
1200
|
-
"Display Name": i.get("displayName"),
|
|
1201
|
-
"Sku": i.get("sku"),
|
|
1202
|
-
"Region": i.get("region"),
|
|
1203
|
-
"State": i.get("state"),
|
|
1204
|
-
"Admins": [i.get("admins", [])],
|
|
1205
|
-
}
|
|
1206
|
-
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
1207
|
-
|
|
1208
|
-
return df
|
|
1209
|
-
|
|
1210
|
-
|
|
1211
1179
|
def list_reports_using_semantic_model(
|
|
1212
1180
|
dataset: str | UUID, workspace: Optional[str | UUID] = None
|
|
1213
1181
|
) -> pd.DataFrame:
|