semantic-link-labs 0.12.2__py3-none-any.whl → 0.12.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: semantic-link-labs
3
- Version: 0.12.2
3
+ Version: 0.12.3
4
4
  Summary: Semantic Link Labs for Microsoft Fabric
5
5
  Author: Microsoft Corporation
6
6
  License: MIT License
@@ -26,7 +26,7 @@ Dynamic: license-file
26
26
  # Semantic Link Labs
27
27
 
28
28
  [![PyPI version](https://badge.fury.io/py/semantic-link-labs.svg)](https://badge.fury.io/py/semantic-link-labs)
29
- [![Read The Docs](https://readthedocs.org/projects/semantic-link-labs/badge/?version=0.12.2&style=flat)](https://readthedocs.org/projects/semantic-link-labs/)
29
+ [![Read The Docs](https://readthedocs.org/projects/semantic-link-labs/badge/?version=0.12.3&style=flat)](https://readthedocs.org/projects/semantic-link-labs/)
30
30
  [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
31
31
  [![Downloads](https://static.pepy.tech/badge/semantic-link-labs)](https://pepy.tech/project/semantic-link-labs)
32
32
 
@@ -154,6 +154,7 @@ An even better way to ensure the semantic-link-labs library is available in your
154
154
  2. Select your newly created environment within the 'Environment' drop down in the navigation bar at the top of the notebook
155
155
 
156
156
  ## Version History
157
+ * [0.12.3](https://github.com/microsoft/semantic-link-labs/releases/tag/0.12.3) (September 17, 2025)
157
158
  * [0.12.2](https://github.com/microsoft/semantic-link-labs/releases/tag/0.12.2) (September 12, 2025)
158
159
  * [0.12.1](https://github.com/microsoft/semantic-link-labs/releases/tag/0.12.1) (September 4, 2025)
159
160
  * [0.12.0](https://github.com/microsoft/semantic-link-labs/releases/tag/0.12.0) (September 2, 2025)
@@ -1,6 +1,6 @@
1
- semantic_link_labs-0.12.2.dist-info/licenses/LICENSE,sha256=ws_MuBL-SCEBqPBFl9_FqZkaaydIJmxHrJG2parhU4M,1141
2
- sempy_labs/__init__.py,sha256=ftNoWwvm0GfuyNRo3ObrtwqBFtoi71QALNLPECyx84c,16361
3
- sempy_labs/_a_lib_info.py,sha256=i6qs3OhdpTrleupFQc-NjZPBCANJDJeKGl37EJgjlc0,53
1
+ semantic_link_labs-0.12.3.dist-info/licenses/LICENSE,sha256=ws_MuBL-SCEBqPBFl9_FqZkaaydIJmxHrJG2parhU4M,1141
2
+ sempy_labs/__init__.py,sha256=YHGcCDoS9Vr9GPSIZ0ziX0bih9J6qrCDFHFBp4iep5g,16533
3
+ sempy_labs/_a_lib_info.py,sha256=CqHuHL_fnKTvOytnNfVS-2H843UDtuG44yT4bAY9aOA,53
4
4
  sempy_labs/_ai.py,sha256=fiI3RCadJ2jG15vZIWD9rKVhvmGWoD9uth9-PlSPBNs,16196
5
5
  sempy_labs/_authentication.py,sha256=-mSXB3tlVhutOo2nhmQQOllWWVvuy4G9rTM77v4S8lw,8485
6
6
  sempy_labs/_capacities.py,sha256=TQYlbM0LP21G-KsW6QrJ7heBzKL42XxA939VldWiKeg,40436
@@ -23,10 +23,11 @@ sempy_labs/_eventhouses.py,sha256=AJXJ00AOfEaKq0wagZfoLHorztyLHaRx6pkdbunCvFI,59
23
23
  sempy_labs/_eventstreams.py,sha256=o9DPk1nBS3TDbokPP9AEtM18CmtRhaHI7LGZ2sRmTeA,4118
24
24
  sempy_labs/_external_data_shares.py,sha256=qHirKpBGseQOkg3xHOM5450Wpz3vmWfG3xexYjLQT9M,8945
25
25
  sempy_labs/_gateways.py,sha256=Nsb2qSXiAVS0CELhjji4FkKVBdnXhP-6spqqTBZc6sU,18100
26
- sempy_labs/_generate_semantic_model.py,sha256=hK2f61PR55080smJBIrq7BgcoTWal_4i4eQulwHjtos,18236
26
+ sempy_labs/_generate_semantic_model.py,sha256=jx6azjeLRiFTOF1up4u6emwOoD_EqFy1X18LyDcReb8,18236
27
+ sempy_labs/_get_connection_string.py,sha256=55AAckOhWLC2Vz1bIjseGkWrC6pnN3sgIfLD8hAy45w,2989
27
28
  sempy_labs/_git.py,sha256=x8BrvkJNWkP6I-_os_wdNjEZVNViqfINmc-q7sGoQ3Q,17903
28
29
  sempy_labs/_graphQL.py,sha256=WDallUQBiOqJdz0aJmYH3cUXCOW_AqhFLs0EpV8_5Rw,2749
29
- sempy_labs/_helper_functions.py,sha256=dWqifJIgGTjjGTEzNxYsCpsiVv9kvCIrrGBnM8wb3VE,85166
30
+ sempy_labs/_helper_functions.py,sha256=YBsKm2bC29QluFy156nPBZ-h1GDVjkFxLqFF_VQVU2c,85192
30
31
  sempy_labs/_icons.py,sha256=SB9EQeoFCfD4bO6fcYuJOoPRSYenSrW0rI9G5RFsH28,3579
31
32
  sempy_labs/_job_scheduler.py,sha256=iCrtFD3pWwUCmjPwV4XzMbmJn_Bn2DwDvedxN9qQNFo,18868
32
33
  sempy_labs/_kql_databases.py,sha256=Wr338mNf0wzvpBJYHbWHjzUudoeMZMUia_smdkzbPfI,4796
@@ -48,10 +49,10 @@ sempy_labs/_notebooks.py,sha256=_0Ug6SF2bihCEFt6P-6DjoGj3302OlDfXmfOJ-ue5Tg,1439
48
49
  sempy_labs/_one_lake_integration.py,sha256=aThg8Fcei9qTFUTzg2Qwsn-3deTk0ICTCXx53Y23oso,6293
49
50
  sempy_labs/_query_scale_out.py,sha256=tcIEJSHjZ1gh7--WMS8sDqDfF3wbfIEX9Xm2qgahXrc,15348
50
51
  sempy_labs/_refresh_semantic_model.py,sha256=yu92m4Ys6lbW68PP9tVCeUNHXj7cGXvyGr2WYRqgl1g,17365
51
- sempy_labs/_semantic_models.py,sha256=PbDM0u1mv6-aRwUJbxfsGm2yFFBTJvYotLFUHyvMF6g,12585
52
+ sempy_labs/_semantic_models.py,sha256=WGIyzDA6AiuJG6A3VSMMNtVlUbAcHrEKJNTfxyiYosM,16975
52
53
  sempy_labs/_spark.py,sha256=aQAqmRAm04NWH9j4_qmYJAWdIluoWKzHDTBPr01GWbA,19404
53
54
  sempy_labs/_sql.py,sha256=Y7yRO8j0P6h68VNaqmWVSyfLrI9L9TMBLykovDkgIzY,8274
54
- sempy_labs/_sql_endpoints.py,sha256=eD2i5r7l9FA9YIW4i0PIpsG3H5mr1HPXJ9g6Lae1uNQ,6766
55
+ sempy_labs/_sql_endpoints.py,sha256=EUiJxWdnivxYw5ZQxam9kipxy67RZfUVSJUpKnaUw0Q,6679
55
56
  sempy_labs/_sqldatabase.py,sha256=vmTsB1IAluQ99cf8fmcPO2z0SjAjTOM8OCD5nuJdSOI,6908
56
57
  sempy_labs/_tags.py,sha256=tqQlj7AvbaniN8mZl59g145Ofj_wdA6Bnrna0PzlwI4,5897
57
58
  sempy_labs/_translations.py,sha256=6A8CPmH_xvsONX4dOG5XSZ-XeJuAy5VokFJql6uf_Ak,1432
@@ -59,7 +60,7 @@ sempy_labs/_user_delegation_key.py,sha256=dj540zd_IGNt2GQ_a69_8IBoyZdpldx_3z6NxN
59
60
  sempy_labs/_utils.py,sha256=X7wcjg809ZyEgf6fE0mZIv9qe1n1oQX_hHXEHgR4u0U,2737
60
61
  sempy_labs/_vertipaq.py,sha256=1UvB79xOxeGdRFINsUsreXxtZtiatHlACAfbQhv45as,38536
61
62
  sempy_labs/_vpax.py,sha256=4rtXXGVoadvdu7uiU9PVsgKszST3XH-K56zmWdMmZEg,15471
62
- sempy_labs/_warehouses.py,sha256=ep4ZMa_1CNbWuQ3AGbGu8udOZ_SIochFJa4fCLqgkL0,9757
63
+ sempy_labs/_warehouses.py,sha256=l5oJQTwqi0yW6ewHM2mS9TmRy7w1IWeeyQjDHJgWnuU,7788
63
64
  sempy_labs/_workloads.py,sha256=K2KPY1_e6SDqz_NQDBrrMlRwzEyVV5dqd1shBs8Bu6E,4731
64
65
  sempy_labs/_workspace_identity.py,sha256=rLzrNjnWbpjNn4bu7xW0nSjG9t1nbc4xG6BdkZNKP1Q,2605
65
66
  sempy_labs/_workspaces.py,sha256=djjY7zLLKqsN0UJqzVpCp3j6_k81RP3VimCv1IM8Eb4,18334
@@ -136,8 +137,9 @@ sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py,sha256=dX
136
137
  sempy_labs/directlake/_update_directlake_partition_entity.py,sha256=EQpePqgTsetDG5JJrL5pfYjibmsuzrYk6x2mj0PDfEY,8984
137
138
  sempy_labs/directlake/_warm_cache.py,sha256=ks_rX2WOUk77rLsvwsCYyQOx-5U-pts27_guLYgMw6w,9236
138
139
  sempy_labs/dotnet_lib/dotnet.runtime.config.json,sha256=syhDFQv6cEmZnE1WtFjNe3NwhsIsnd-CFULv-vEWOFI,167
139
- sempy_labs/graph/__init__.py,sha256=OwFxe_JvIETazMCPw0Cx-tHXh5sGOiEPwawCeR6O7jg,794
140
+ sempy_labs/graph/__init__.py,sha256=LLWs0t_rdu4oGBz9948Ob1UPDscDjhTWAswO8B4YthY,891
140
141
  sempy_labs/graph/_groups.py,sha256=T3uzMepJC3SYISV4C9y4sVY7r_qH609sScBMljhC5rY,17763
142
+ sempy_labs/graph/_sensitivity_labels.py,sha256=tTTBQDevbaG5cdmCjgzbXIigamOi_Tx9o_2Wkfkc0bY,2907
141
143
  sempy_labs/graph/_teams.py,sha256=UH5ETsReD0pzmdgqQFkcX_95o1aUAv2lAajRJc0RIZY,3175
142
144
  sempy_labs/graph/_users.py,sha256=2QwwUYfaJnPHPY6M2a9QBWYwwkVDjpcbOQd_Bo9GjJ8,15181
143
145
  sempy_labs/lakehouse/__init__.py,sha256=jOCjgkIZgwl373pb6lmEwqIkbifwJUKY3K0eZ8LAVws,1245
@@ -209,10 +211,10 @@ sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visua
209
211
  sempy_labs/theme/__init__.py,sha256=JN0z8w_Hc7VUIchRbpY-rjU6879msrFiSTRtMMlr_5g,185
210
212
  sempy_labs/theme/_org_themes.py,sha256=ArLqr1KYI6CT1_mxKOsZlVPaTO0KSkkQ1LbFy4A1fqg,3323
211
213
  sempy_labs/tom/__init__.py,sha256=ZwSpgYDP5VamZTnYP3a1cYHiaKdktOvlOBSOY1816zY,107
212
- sempy_labs/tom/_model.py,sha256=Izf1YSh2nE4VZq9K92cefJN8_onmqP7DD_o4oiOUCOQ,223020
214
+ sempy_labs/tom/_model.py,sha256=8ArPJJJ8Wgo6FYWfEPWlVl60Umq8INdU6NOyMXaYavY,223643
213
215
  sempy_labs/variable_library/__init__.py,sha256=qyTw5vNldnwYv-TotQSFupwznKIQfcws1UxGjf1RNNo,437
214
216
  sempy_labs/variable_library/_functions.py,sha256=eoB3hUKFGdGMSBNDEsEF9bVoELZp5AnyDxp5BsLGeDc,13733
215
- semantic_link_labs-0.12.2.dist-info/METADATA,sha256=2ft_8tljZ0wn-asR56HeweU56aup-69e2a0WJFytrSY,27646
216
- semantic_link_labs-0.12.2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
217
- semantic_link_labs-0.12.2.dist-info/top_level.txt,sha256=kiQX1y42Dbein1l3Q8jMUYyRulDjdlc2tMepvtrvixQ,11
218
- semantic_link_labs-0.12.2.dist-info/RECORD,,
217
+ semantic_link_labs-0.12.3.dist-info/METADATA,sha256=HhegWEKJLmk-NZMGq9L4gRnrODhbC3HzSvlZNZnRw3M,27747
218
+ semantic_link_labs-0.12.3.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
219
+ semantic_link_labs-0.12.3.dist-info/top_level.txt,sha256=kiQX1y42Dbein1l3Q8jMUYyRulDjdlc2tMepvtrvixQ,11
220
+ semantic_link_labs-0.12.3.dist-info/RECORD,,
sempy_labs/__init__.py CHANGED
@@ -31,6 +31,8 @@ from ._semantic_models import (
31
31
  delete_semantic_model,
32
32
  update_semantic_model_refresh_schedule,
33
33
  list_semantic_model_datasources,
34
+ bind_semantic_model_connection,
35
+ unbind_semantic_model_connection,
34
36
  )
35
37
  from ._graphQL import (
36
38
  list_graphql_apis,
@@ -100,7 +102,6 @@ from ._warehouses import (
100
102
  delete_warehouse,
101
103
  get_warehouse_columns,
102
104
  get_warehouse_tables,
103
- get_warehouse_connection_string,
104
105
  )
105
106
  from ._data_pipelines import (
106
107
  list_data_pipelines,
@@ -351,6 +352,9 @@ from ._user_delegation_key import (
351
352
  from ._data_access_security import (
352
353
  list_data_access_roles,
353
354
  )
355
+ from ._get_connection_string import (
356
+ get_connection_string,
357
+ )
354
358
 
355
359
  __all__ = [
356
360
  "resolve_warehouse_id",
@@ -603,6 +607,8 @@ __all__ = [
603
607
  "get_item_definition",
604
608
  "get_workspace_network_communication_policy",
605
609
  "set_workspace_network_communication_policy",
606
- "get_warehouse_connection_string",
610
+ "get_connection_string",
607
611
  "list_data_access_roles",
612
+ "bind_semantic_model_connection",
613
+ "unbind_semantic_model_connection",
608
614
  ]
sempy_labs/_a_lib_info.py CHANGED
@@ -1,2 +1,2 @@
1
1
  lib_name = "semanticlinklabs"
2
- lib_version = "0.12.2"
2
+ lib_version = "0.12.3"
@@ -22,7 +22,7 @@ from uuid import UUID
22
22
  @log
23
23
  def create_blank_semantic_model(
24
24
  dataset: str,
25
- compatibility_level: int = 1605,
25
+ compatibility_level: int = 1702,
26
26
  workspace: Optional[str | UUID] = None,
27
27
  overwrite: bool = True,
28
28
  ):
@@ -33,7 +33,7 @@ def create_blank_semantic_model(
33
33
  ----------
34
34
  dataset : str
35
35
  Name of the semantic model.
36
- compatibility_level : int, default=1605
36
+ compatibility_level : int, default=1702
37
37
  The compatibility level of the semantic model.
38
38
  workspace : str | uuid.UUID, default=None
39
39
  The Fabric workspace name or ID.
@@ -0,0 +1,84 @@
1
+ from sempy_labs._helper_functions import (
2
+ resolve_item_id,
3
+ _base_api,
4
+ resolve_workspace_id,
5
+ )
6
+ from typing import Optional, Literal
7
+ import sempy_labs._icons as icons
8
+ from uuid import UUID
9
+ from sempy._utils._log import log
10
+
11
+
12
+ @log
13
+ def get_connection_string(
14
+ item: str | UUID,
15
+ type: Literal["Lakehouse", "Warehouse", "SQLEndpoint"],
16
+ workspace: Optional[str | UUID] = None,
17
+ guest_tenant_id: Optional[UUID] = None,
18
+ private_link_type: Optional[str] = None,
19
+ ) -> str:
20
+ """
21
+ Returns the SQL connection string of the specified item.
22
+
23
+ Parameters
24
+ ----------
25
+ item : str | uuid.UUID
26
+ The name or ID of the item (Lakehouse or MirroredDatabase).
27
+ type : Literal['Lakehouse', 'Warehouse', 'SQLEndpoint']
28
+ The type of the item. Must be 'Lakehouse' or 'MirroredDatabase'.
29
+ workspace : str | uuid.UUID, default=None
30
+ The Fabric workspace name or ID.
31
+ Defaults to None which resolves to the workspace of the attached lakehouse
32
+ or if no lakehouse attached, resolves to the workspace of the notebook.
33
+ guest_tenant_id : uuid.UUID, default=None
34
+ The guest tenant ID if the end user's tenant is different from the warehouse tenant.
35
+ private_link_type : str, default=None
36
+ Indicates the type of private link this connection string uses. Must be either 'Workspace' or 'None' or left as None.
37
+
38
+ Returns
39
+ -------
40
+ str
41
+ Returns the SQL connection string of the specified item.
42
+ """
43
+ workspace_id = resolve_workspace_id(workspace)
44
+ item_id = resolve_item_id(item=item, type=type, workspace=workspace)
45
+
46
+ type_dict = {
47
+ "Warehouse": "warehouses",
48
+ "SQLEndpoint": "sqlEndpoints",
49
+ }
50
+ type_for_url = type_dict.get(type)
51
+
52
+ if type == "Lakehouse":
53
+ response = _base_api(
54
+ f"/v1/workspaces/{workspace_id}/lakehouses/{item_id}", client="fabric_sp"
55
+ ).json()
56
+ return (
57
+ response.get("properties", {})
58
+ .get("sqlEndpointProperties", {})
59
+ .get("connectionString", {})
60
+ )
61
+ if type in ["SQLEndpoint", "Warehouse"]:
62
+ url = f"/v1/workspaces/{workspace_id}/{type_for_url}/{item_id}/connectionString"
63
+ else:
64
+ raise ValueError(
65
+ f"{icons.red_dot} The type must be 'Lakehouse', 'Warehouse' or 'SQLEndpoint'."
66
+ )
67
+
68
+ if private_link_type is not None and private_link_type not in ["Workspace", "None"]:
69
+ raise ValueError(
70
+ f"{icons.red_dot} private_link_type must be 'Workspace' or 'None' or left as None."
71
+ )
72
+
73
+ if guest_tenant_id or private_link_type:
74
+ params = []
75
+ if guest_tenant_id:
76
+ params.append(f"guestTenantId={guest_tenant_id}")
77
+ if private_link_type:
78
+ params.append(f"privateLinkType={private_link_type}")
79
+ param_str = "?" + "&".join(params)
80
+ url += param_str
81
+
82
+ response = _base_api(request=url, client="fabric_sp")
83
+
84
+ return response.json().get("connectionString")
@@ -549,6 +549,7 @@ def get_item_definition(
549
549
  definition["definition"]["parts"].append(
550
550
  {"path": path, "payload": decoded_payload}
551
551
  )
552
+ return definition
552
553
  else:
553
554
  return result
554
555
 
@@ -348,3 +348,121 @@ def list_semantic_model_datasources(
348
348
  df = pd.DataFrame(rows, columns=list(columns.keys()))
349
349
 
350
350
  return df
351
+
352
+
353
+ @log
354
+ def bind_semantic_model_connection(
355
+ dataset: str | UUID,
356
+ connection_id: UUID,
357
+ connectivity_type: str,
358
+ connection_type: str,
359
+ connection_path: str,
360
+ workspace: Optional[str | UUID] = None,
361
+ ):
362
+ """
363
+ Binds a semantic model data source reference to a data connection.
364
+ This API can also be used to unbind data source references.
365
+
366
+ This is a wrapper function for the following API: `Items - Bind Semantic Model Connection <https://learn.microsoft.com/rest/api/fabric/semanticmodel/items/bind-semantic-model-connection>`_.
367
+
368
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
369
+
370
+ Parameters
371
+ ----------
372
+ dataset : str | uuid.UUID
373
+ Name or ID of the semantic model.
374
+ connection_id : uuid.UUID
375
+ The object ID of the connection.
376
+ connectivity_type : str
377
+ The connectivity type of the connection. Additional connectivity types may be added over time.
378
+ connection_type : str
379
+ The `type <https://learn.microsoft.com/rest/api/fabric/semanticmodel/items/bind-semantic-model-connection?tabs=HTTP#connectivitytype>`_ of the connection.
380
+ connection_path : str
381
+ The path of the connection.
382
+ workspace : str | uuid.UUID, default=None
383
+ The workspace name or ID.
384
+ Defaults to None which resolves to the workspace of the attached lakehouse
385
+ or if no lakehouse attached, resolves to the workspace of the notebook.
386
+ """
387
+
388
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
389
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(
390
+ dataset=dataset, workspace=workspace_id
391
+ )
392
+
393
+ payload = {
394
+ "connectionBinding": {
395
+ "id": str(connection_id),
396
+ "connectivityType": connectivity_type,
397
+ "connectionDetails": {
398
+ "type": connection_type,
399
+ "path": connection_path,
400
+ },
401
+ }
402
+ }
403
+
404
+ _base_api(
405
+ request=f"/v1/workspaces/{workspace_id}/semanticModels/{dataset_id}/bindConnection",
406
+ method="post",
407
+ client="fabric_sp",
408
+ payload=payload,
409
+ )
410
+
411
+ print(
412
+ f"{icons.green_dot} Connection '{connection_id}' has been bound to the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
413
+ )
414
+
415
+
416
+ @log
417
+ def unbind_semantic_model_connection(
418
+ dataset: str | UUID,
419
+ connection_type: str,
420
+ connection_path: str,
421
+ workspace: Optional[str | UUID] = None,
422
+ ):
423
+ """
424
+ Unbinds a semantic model data source reference to a data connection.
425
+
426
+ This is a wrapper function for the following API: `Items - Bind Semantic Model Connection <https://learn.microsoft.com/rest/api/fabric/semanticmodel/items/bind-semantic-model-connection>`_.
427
+
428
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
429
+
430
+ Parameters
431
+ ----------
432
+ dataset : str | uuid.UUID
433
+ Name or ID of the semantic model.
434
+ connection_type : str
435
+ The `type <https://learn.microsoft.com/rest/api/fabric/semanticmodel/items/bind-semantic-model-connection?tabs=HTTP#connectivitytype>`_ of the connection.
436
+ connection_path : str
437
+ The path of the connection.
438
+ workspace : str | uuid.UUID, default=None
439
+ The workspace name or ID.
440
+ Defaults to None which resolves to the workspace of the attached lakehouse
441
+ or if no lakehouse attached, resolves to the workspace of the notebook.
442
+ """
443
+
444
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
445
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(
446
+ dataset=dataset, workspace=workspace_id
447
+ )
448
+
449
+ payload = {
450
+ "connectionBinding": {
451
+ "connectivityType": "None",
452
+ "connectionDetails": {
453
+ "type": connection_type,
454
+ "path": connection_path,
455
+ },
456
+ }
457
+ }
458
+
459
+ _base_api(
460
+ request=f"/v1/workspaces/{workspace_id}/semanticModels/{dataset_id}/bindConnection",
461
+ method="post",
462
+ client="fabric_sp",
463
+ payload=payload,
464
+ )
465
+
466
+ print(
467
+ f"{icons.green_dot} The '{dataset_name}' semantic model within the '{workspace_name}' workspace has been unbound from its connection."
468
+ )
@@ -66,7 +66,8 @@ def refresh_sql_endpoint_metadata(
66
66
  item: str | UUID,
67
67
  type: Literal["Lakehouse", "MirroredDatabase"],
68
68
  workspace: Optional[str | UUID] = None,
69
- tables: dict[str, list[str]] = None,
69
+ timeout_unit: Literal["Seconds", "Minutes", "Hours", "Days"] = "Minutes",
70
+ timeout_value: int = 15,
70
71
  ) -> pd.DataFrame:
71
72
  """
72
73
  Refreshes the metadata of a SQL endpoint.
@@ -85,15 +86,10 @@ def refresh_sql_endpoint_metadata(
85
86
  The Fabric workspace name or ID.
86
87
  Defaults to None which resolves to the workspace of the attached lakehouse
87
88
  or if no lakehouse attached, resolves to the workspace of the notebook.
88
- tables : dict[str, list[str]], default=None
89
- A dictionary where the keys are schema names and the values are lists of table names.
90
- If empty, all table metadata will be refreshed.
91
-
92
- Example:
93
- {
94
- "dbo": ["DimDate", "DimGeography"],
95
- "sls": ["FactSales", "FactBudget"],
96
- }
89
+ timeout_unit : Literal['Seconds', 'Minutes', 'Hours', 'Days'], default='Minutes'
90
+ The unit of time for the request duration before timing out. Additional duration types may be added over time.
91
+ timeout_value : int, default=15
92
+ The number of time units in the request duration.
97
93
 
98
94
  Returns
99
95
  -------
@@ -132,14 +128,10 @@ def refresh_sql_endpoint_metadata(
132
128
  else:
133
129
  raise ValueError("Invalid type. Must be 'Lakehouse' or 'MirroredDatabase'.")
134
130
 
135
- payload = {}
136
- if tables:
137
- payload = {
138
- "tableDefinitions": [
139
- {"schema": schema, "tableNames": tables}
140
- for schema, tables in tables.items()
141
- ]
142
- }
131
+ payload = None
132
+ timeout_unit = timeout_unit.capitalize()
133
+ if timeout_unit != "Minutes" and timeout_value != 15:
134
+ payload = {"timeout": {"timeUnit": timeout_unit, "value": timeout_value}}
143
135
 
144
136
  result = _base_api(
145
137
  request=f"v1/workspaces/{workspace_id}/sqlEndpoints/{sql_endpoint_id}/refreshMetadata",
@@ -195,10 +187,8 @@ def refresh_sql_endpoint_metadata(
195
187
  df = df[column_order]
196
188
 
197
189
  printout = f"{icons.green_dot} The metadata of the SQL endpoint for the '{item_name}' {type.lower()} within the '{workspace_name}' workspace has been refreshed"
198
- if tables:
199
- print(f"{printout} for the following tables: {tables}.")
200
- else:
201
- print(f"{printout} for all tables.")
190
+ print(f"{printout} for all tables.")
191
+ _update_dataframe_datatypes(df, columns)
202
192
  else:
203
193
  # If the target item has no tables to refresh the metadata for
204
194
  df = pd.DataFrame(columns=columns.keys())
@@ -206,6 +196,4 @@ def refresh_sql_endpoint_metadata(
206
196
  f"{icons.yellow_dot} The SQL endpoint '{item_name}' {type.lower()} within the '{workspace_name}' workspace has no tables to refresh..."
207
197
  )
208
198
 
209
- _update_dataframe_datatypes(df, columns)
210
-
211
199
  return df
sempy_labs/_warehouses.py CHANGED
@@ -1,5 +1,4 @@
1
- from ._helper_functions import (
2
- resolve_item_id,
1
+ from sempy_labs._helper_functions import (
3
2
  resolve_workspace_name_and_id,
4
3
  _base_api,
5
4
  _create_dataframe,
@@ -233,57 +232,3 @@ def get_warehouse_columns(
233
232
  )
234
233
 
235
234
  return df
236
-
237
-
238
- @log
239
- def get_warehouse_connection_string(
240
- warehouse: str | UUID,
241
- workspace: Optional[str | UUID] = None,
242
- guest_tenant_id: Optional[UUID] = None,
243
- private_link_type: Optional[str] = None,
244
- ) -> str:
245
- """
246
- Returns the SQL connection string of the specified warehouse.
247
-
248
- Parameters
249
- ----------
250
- warehouse : str | uuid.UUID
251
- Name or ID of the Fabric warehouse.
252
- workspace : str | uuid.UUID, default=None
253
- The Fabric workspace name or ID.
254
- Defaults to None which resolves to the workspace of the attached lakehouse
255
- or if no lakehouse attached, resolves to the workspace of the notebook.
256
- guest_tenant_id : uuid.UUID, default=None
257
- The guest tenant ID if the end user's tenant is different from the warehouse tenant.
258
- private_link_type : str, default=None
259
- Indicates the type of private link this connection string uses. Must be either 'Workspace' or 'None' or left as None.
260
-
261
- Returns
262
- -------
263
- str
264
- Returns the SQL connection string of the specified warehouse.
265
- """
266
- workspace_id = resolve_workspace_id(workspace)
267
- warehouse_id = resolve_item_id(
268
- item=warehouse, type="Warehouse", workspace=workspace
269
- )
270
-
271
- url = f"/v1/workspaces/{workspace_id}/warehouses/{warehouse_id}/connectionString"
272
-
273
- if private_link_type is not None and private_link_type not in ["Workspace", "None"]:
274
- raise ValueError(
275
- f"{icons.red_dot} private_link_type must be 'Workspace' or 'None' or left as None."
276
- )
277
-
278
- if guest_tenant_id or private_link_type:
279
- params = []
280
- if guest_tenant_id:
281
- params.append(f"guestTenantId={guest_tenant_id}")
282
- if private_link_type:
283
- params.append(f"privateLinkType={private_link_type}")
284
- param_str = "?" + "&".join(params)
285
- url += param_str
286
-
287
- response = _base_api(request=url, client="fabric_sp")
288
-
289
- return response.json().get("connectionString")
@@ -22,6 +22,9 @@ from ._users import (
22
22
  from ._teams import (
23
23
  list_teams,
24
24
  )
25
+ from ._sensitivity_labels import (
26
+ list_sensitivity_labels,
27
+ )
25
28
 
26
29
  __all__ = [
27
30
  "list_groups",
@@ -42,4 +45,5 @@ __all__ = [
42
45
  "delete_group",
43
46
  "update_user",
44
47
  "update_group",
48
+ "list_sensitivity_labels",
45
49
  ]
@@ -0,0 +1,81 @@
1
+ import pandas as pd
2
+ from uuid import UUID
3
+ from typing import Optional
4
+ from sempy_labs._helper_functions import (
5
+ _base_api,
6
+ _create_dataframe,
7
+ _update_dataframe_datatypes,
8
+ )
9
+ from sempy._utils._log import log
10
+
11
+
12
+ @log
13
+ def list_sensitivity_labels(user: Optional[str | UUID] = None) -> pd.DataFrame:
14
+ """
15
+ Get a list of `sensitivity label <https://learn.microsoft.com/graph/api/resources/security-sensitivitylabel>`_ objects associated with a user or organization.
16
+
17
+ This is a wrapper function for the following API: `List sensitivityLabels https://learn.microsoft.com/graph/api/security-informationprotection-list-sensitivitylabels>`_.
18
+
19
+ Service Principal Authentication is required (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
20
+
21
+ Parameters
22
+ ----------
23
+ user : str | uuid.UUID
24
+ The user ID or user principal name.
25
+
26
+ Returns
27
+ -------
28
+ pandas.DataFrame
29
+ A pandas dataframe showing a list of `sensitivity label <https://learn.microsoft.com/graph/api/resources/security-sensitivitylabel>`_ objects associated with a user or organization.
30
+ """
31
+ from sempy_labs.graph import resolve_user_id
32
+
33
+ url = "/security/informationProtection/sensitivityLabels"
34
+
35
+ if user is not None:
36
+ user_id = resolve_user_id(user=user)
37
+ url = f"users/{user_id}{url}"
38
+
39
+ result = _base_api(request=url, client="graph").json()
40
+
41
+ columns = {
42
+ "Sensitivity Label Id": "str",
43
+ "Sensitivity Label Name": "str",
44
+ "Description": "str",
45
+ "Color": "str",
46
+ "Sensitivity": "int",
47
+ "Tooltip": "str",
48
+ "Is Active": "bool",
49
+ "Is Appliable": "bool",
50
+ "Has Protection": "bool",
51
+ "Parent Sensitivity Label Id": "str",
52
+ "Parent Sensitivity Label Name": "str",
53
+ }
54
+ df = _create_dataframe(columns=columns)
55
+
56
+ rows = []
57
+ for item in result.get("value", []):
58
+ row = {
59
+ "Sensitivity Label Id": item.get("id"),
60
+ "Sensitivity Label Name": item.get("name"),
61
+ "Description": item.get("description"),
62
+ "Color": item.get("color"),
63
+ "Sensitivity": item.get("sensitivity"),
64
+ "Tooltip": item.get("tooltip"),
65
+ "Is Active": item.get("isActive"),
66
+ "Is Appliable": item.get("isAppliable"),
67
+ "Has Protection": item.get("hasProtection"),
68
+ "Parent Sensitivity Label Id": (
69
+ item.get("parent", {}).get("id") if item.get("parent") else None
70
+ ),
71
+ "Parent Sensitivity Label Name": (
72
+ item.get("parent", {}).get("name") if item.get("parent") else None
73
+ ),
74
+ }
75
+ rows.append(row)
76
+
77
+ if rows:
78
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
79
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
80
+
81
+ return df
sempy_labs/tom/_model.py CHANGED
@@ -12,7 +12,6 @@ from sempy_labs._helper_functions import (
12
12
  _make_list_unique,
13
13
  resolve_dataset_name_and_id,
14
14
  resolve_workspace_name_and_id,
15
- _base_api,
16
15
  resolve_workspace_id,
17
16
  resolve_item_id,
18
17
  resolve_lakehouse_id,
@@ -4670,9 +4669,7 @@ class TOMWrapper:
4670
4669
  json=payload,
4671
4670
  )
4672
4671
  if response.status_code != 200:
4673
- raise FabricHTTPException(
4674
- f"Failed to retrieve descriptions: {response.text}"
4675
- )
4672
+ raise FabricHTTPException(response)
4676
4673
 
4677
4674
  for item in response.json().get("modelItems", []):
4678
4675
  ms_name = item["urn"]
@@ -5109,7 +5106,9 @@ class TOMWrapper:
5109
5106
  """
5110
5107
  import Microsoft.AnalysisServices.Tabular as TOM
5111
5108
 
5112
- p = next(p for p in self.model.Tables[table_name].Partitions)
5109
+ t = self.model.Tables[table_name]
5110
+
5111
+ p = next(p for p in t.Partitions)
5113
5112
  if p.Mode != TOM.ModeType.DirectLake:
5114
5113
  print(f"{icons.info} The '{table_name}' table is not in Direct Lake mode.")
5115
5114
  return
@@ -5119,9 +5118,7 @@ class TOMWrapper:
5119
5118
  partition_schema = schema or p.Source.SchemaName
5120
5119
 
5121
5120
  # Update name of the Direct Lake partition (will be removed later)
5122
- self.model.Tables[table_name].Partitions[
5123
- partition_name
5124
- ].Name = f"{partition_name}_remove"
5121
+ t.Partitions[partition_name].Name = f"{partition_name}_remove"
5125
5122
 
5126
5123
  source_workspace_id = resolve_workspace_id(workspace=source_workspace)
5127
5124
  if source_type == "Lakehouse":
@@ -5133,21 +5130,41 @@ class TOMWrapper:
5133
5130
  item=source, type=source_type, workspace=source_workspace_id
5134
5131
  )
5135
5132
 
5133
+ column_pairs = []
5134
+ m_filter = None
5135
+ for c in t.Columns:
5136
+ if c.Type == TOM.ColumnType.Data:
5137
+ if c.Name != c.SourceColumn:
5138
+ column_pairs.append((c.SourceColumn, c.Name))
5139
+
5140
+ if column_pairs:
5141
+ m_filter = (
5142
+ f'#"Renamed Columns" = Table.RenameColumns(ToDelta, {{'
5143
+ + ", ".join([f'{{"{old}", "{new}"}}' for old, new in column_pairs])
5144
+ + "})"
5145
+ )
5146
+
5136
5147
  def _generate_m_expression(
5137
- workspace_id, artifact_id, artifact_type, table_name, schema_name
5148
+ workspace_id, artifact_id, artifact_type, table_name, schema_name, m_filter
5138
5149
  ):
5139
5150
  """
5140
- Generates the M expression for the import partition.
5151
+ Generates the M expression for the import partition. Adds a rename step if any columns have been renamed in the model.
5141
5152
  """
5142
5153
 
5143
5154
  full_table_name = (
5144
5155
  f"{schema_name}/{table_name}" if schema_name else table_name
5145
5156
  )
5146
5157
 
5147
- return f"""let\n\tSource = AzureStorage.DataLake("https://onelake.dfs.fabric.microsoft.com/{workspace_id}/{artifact_id}", [HierarchicalNavigation=true]),
5158
+ code = f"""let\n\tSource = AzureStorage.DataLake("https://onelake.dfs.fabric.microsoft.com/{workspace_id}/{artifact_id}", [HierarchicalNavigation=true]),
5148
5159
  Tables = Source{{[Name = "Tables"]}}[Content],
5149
5160
  ExpressionTable = Tables{{[Name = "{full_table_name}"]}}[Content],
5150
- ToDelta = DeltaLake.Table(ExpressionTable)\nin\n\tToDelta"""
5161
+ ToDelta = DeltaLake.Table(ExpressionTable)"""
5162
+ if m_filter is None:
5163
+ code += "\n in\n\tToDelta"
5164
+ else:
5165
+ code += f',\n\t {m_filter} \n in\n\t#"Renamed Columns"'
5166
+
5167
+ return code
5151
5168
 
5152
5169
  m_expression = _generate_m_expression(
5153
5170
  source_workspace_id,
@@ -5155,6 +5172,7 @@ class TOMWrapper:
5155
5172
  source_type,
5156
5173
  partition_entity_name,
5157
5174
  partition_schema,
5175
+ m_filter,
5158
5176
  )
5159
5177
 
5160
5178
  # Add the import partition