semantic-link-labs 0.8.9__py3-none-any.whl → 0.8.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: semantic-link-labs
3
- Version: 0.8.9
3
+ Version: 0.8.10
4
4
  Summary: Semantic Link Labs for Microsoft Fabric
5
5
  Author: Microsoft Corporation
6
6
  License: MIT License
@@ -27,7 +27,7 @@ Requires-Dist: pytest>=8.2.1; extra == "test"
27
27
  # Semantic Link Labs
28
28
 
29
29
  [![PyPI version](https://badge.fury.io/py/semantic-link-labs.svg)](https://badge.fury.io/py/semantic-link-labs)
30
- [![Read The Docs](https://readthedocs.org/projects/semantic-link-labs/badge/?version=0.8.9&style=flat)](https://readthedocs.org/projects/semantic-link-labs/)
30
+ [![Read The Docs](https://readthedocs.org/projects/semantic-link-labs/badge/?version=0.8.10&style=flat)](https://readthedocs.org/projects/semantic-link-labs/)
31
31
  [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
32
32
  [![Downloads](https://static.pepy.tech/badge/semantic-link-labs)](https://pepy.tech/project/semantic-link-labs)
33
33
 
@@ -64,6 +64,7 @@ Check out the video below for an introduction to Semantic Link, Semantic Link La
64
64
  * [Dynamically generate a Direct Lake semantic model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.generate_direct_lake_semantic_model)
65
65
  * [Check why a Direct Lake semantic model would fallback to DirectQuery](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.check_fallback_reason)
66
66
  * [View a measure dependency tree](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.measure_dependency_tree)
67
+ * [View unique columns touched in a single (or multiple) DAX query(ies)](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.get_dax_query_dependencies)
67
68
  * Reports
68
69
  * [Report Best Practice Analyzer (BPA)](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.report.html#sempy_labs.report.run_report_bpa)
69
70
  * [View report metadata](https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Report%20Analysis.ipynb)
@@ -141,6 +142,7 @@ An even better way to ensure the semantic-link-labs library is available in your
141
142
  2. Select your newly created environment within the 'Environment' drop down in the navigation bar at the top of the notebook
142
143
 
143
144
  ## Version History
145
+ * [0.8.10](https://github.com/microsoft/semantic-link-labs/releases/tag/0.8.10) (December 16, 2024)
144
146
  * [0.8.9](https://github.com/microsoft/semantic-link-labs/releases/tag/0.8.9) (December 4, 2024)
145
147
  * [0.8.8](https://github.com/microsoft/semantic-link-labs/releases/tag/0.8.8) (November 28, 2024)
146
148
  * [0.8.7](https://github.com/microsoft/semantic-link-labs/releases/tag/0.8.7) (November 27, 2024)
@@ -1,12 +1,12 @@
1
- sempy_labs/__init__.py,sha256=Xi71QoSKFo8eexzMFaZordi1NE7TtqhiyXRZfltMHR4,12852
1
+ sempy_labs/__init__.py,sha256=WyNDrg52pNpAj0s3H7o1vWNYnIpXZrXrKJVYVeC47GQ,13090
2
2
  sempy_labs/_ai.py,sha256=CzsNw6Wpd2B5Rd0RcY250-_p0L-0gFoMNLEc_KmrobU,16177
3
- sempy_labs/_authentication.py,sha256=LWkl0yoz63tYe5u9DL1js7g88vmkVbcTIqrsaxDHMT0,3851
3
+ sempy_labs/_authentication.py,sha256=a4sPN8IAf7w-Jftm3PvtF62HqWLkCD69tXgze_G59Zg,4642
4
4
  sempy_labs/_capacities.py,sha256=HWX1ivlWpyS7Ea_ny-39kUAQYFGMzo42kWMGdJMINos,25466
5
5
  sempy_labs/_capacity_migration.py,sha256=PCIodWXas9v7q93hqD2m8EanJHPJzke52jbCWOfnLZk,27764
6
6
  sempy_labs/_clear_cache.py,sha256=ttHsXXR6SRRw4eC0cS8I5h38UbWU9YJii1y-uR9R3KM,12493
7
7
  sempy_labs/_connections.py,sha256=-Z3rfLGlUKx5iIGmvwWOICIVZ478ydwvCjxusoJb1RI,17647
8
8
  sempy_labs/_data_pipelines.py,sha256=WdZjTELNuN_7suWj6NrZUxGnMTzAgIxFw8V6YMb8ags,5644
9
- sempy_labs/_dataflows.py,sha256=Wpx5jmTDewTkL2bsVZ5r_PTD0etIBdKXVigTjUF8HAg,8252
9
+ sempy_labs/_dataflows.py,sha256=D-OuuoUhSrGEhW1mAMBeHULfhULXmmTqIoOdV-se3vs,8250
10
10
  sempy_labs/_dax.py,sha256=5lY0p9bS0O2OWViaqTw_9K0WfZsyBW3gK4rIv1bqjjE,9411
11
11
  sempy_labs/_deployment_pipelines.py,sha256=WBBQM85-3-MkXb5OmRPF6U83xLyhKSlYUyhRlkvcl4k,6027
12
12
  sempy_labs/_documentation.py,sha256=yVA8VPEzx_fmljtcvSxtB7-BeupYsfdMXXjp6Fpnyo8,5007
@@ -14,28 +14,28 @@ sempy_labs/_environments.py,sha256=avpLSfZyyQFdEDIIxWv2THLjPZwbs9XGXT7ob9l_-ao,5
14
14
  sempy_labs/_eventhouses.py,sha256=vgIFQkXcBPC4SnlrBzT7SRmembExxkm6n0gdKnc7Hlk,4036
15
15
  sempy_labs/_eventstreams.py,sha256=Rht0eWoZbYF6XKtE3AOUvGgA21smxC9gdN599z-jY3s,4061
16
16
  sempy_labs/_external_data_shares.py,sha256=lUsKy1mexNSmhyFwxSeE2jZKNdDAWDP6iC6UPTXCvyU,6799
17
- sempy_labs/_gateways.py,sha256=CzTS95tLpG1bIG-0XkJWniNQEyNUtRq2go4QJAvjMr4,14617
18
- sempy_labs/_generate_semantic_model.py,sha256=ed5Wivz8ZqdKghpzkJD34onByttCaKVzwsjkndK-Qk4,17061
19
- sempy_labs/_git.py,sha256=C9TYKG4g8q35R1S7iTZyjMtRis033uUWkYJe_a0f3u0,13540
20
- sempy_labs/_helper_functions.py,sha256=Dk1cYfctxn_0RJHRKuqelcpaNJxyhoBZwWbvb0CGumU,38051
17
+ sempy_labs/_gateways.py,sha256=j7REuoG9vGvPcJdII-gOJagprPJctsx3bi-ekUe5q6w,16228
18
+ sempy_labs/_generate_semantic_model.py,sha256=YsPULWDkh2VE-jU3F3jIn0AmZPl3dq03sG-QfuHG2GI,16757
19
+ sempy_labs/_git.py,sha256=gvFR6kCZbegoO_j_hubt-fxuaxRl1KsVldtQNJfnA9U,13870
20
+ sempy_labs/_helper_functions.py,sha256=FtkzTlIJudgGRxrZzjfyon5ehuD8w8KR6sBV74R0XtY,39124
21
21
  sempy_labs/_icons.py,sha256=ez2dx_LCti71S_-eB6WYQ-kOMyiBL8ZJN12-ev5dcmA,3579
22
22
  sempy_labs/_kql_databases.py,sha256=oNX9oKnXu5SLkzl4kTMQguh4In-i-0Forcoy1phOe1s,4621
23
23
  sempy_labs/_kql_querysets.py,sha256=A-79LiLBKxlADPTuSK9ipy_LjXKcsJZwQHknUXFpVl0,4157
24
- sempy_labs/_list_functions.py,sha256=6EKlqwqT0tru1uV7qXCew5lMKgJrGqEnOHu_3q-z7RM,55503
24
+ sempy_labs/_list_functions.py,sha256=_5mxBwfTh-BUFWK2lXNPXSpZO0OtP7FlcWaSxwD-Mgs,60939
25
25
  sempy_labs/_managed_private_endpoints.py,sha256=bCuC9V4yMFBw1BNlsoxARdIEMPAySW-ljHrhvuziQfw,6179
26
26
  sempy_labs/_mirrored_databases.py,sha256=5_5phu50KIvhHNQJ-RQAxd92W4D7GUVMyjAnOb7ZY3Q,14360
27
27
  sempy_labs/_mirrored_warehouses.py,sha256=t2fBH5L0UzNahDB4lATDLvmCqYTU-V93_ZVLb5ZISSg,1764
28
28
  sempy_labs/_ml_experiments.py,sha256=UVh3cwNvpY-otCBIaKW-sgtzyjwAuu8qJDLhZGBHToE,4196
29
29
  sempy_labs/_ml_models.py,sha256=phYLySjN7MO2YYfq7ZQKMS6w18G6L1-7DdNWB4fcLjQ,4044
30
30
  sempy_labs/_model_auto_build.py,sha256=PTQo3dufzLSFcQ5shFkmBWAVSdP7cTJgpUclrcXyNbg,5105
31
- sempy_labs/_model_bpa.py,sha256=Tyj7JV4L0rCmeWL896S58SOA8SHQKNxQ9QnmqunKTfM,21764
32
- sempy_labs/_model_bpa_bulk.py,sha256=wJe8KEMvbU3T--7vOBy3nXE2L6WZEolwrRH4WAxgsAM,16030
31
+ sempy_labs/_model_bpa.py,sha256=nhHAoq9RtOe0lAKRIg9Hr9TBMKxxbGOSCCCs1I8oy1s,21778
32
+ sempy_labs/_model_bpa_bulk.py,sha256=jU-kaeUeE1Slz5HEh3lSbnILzj2tfzMwvaOqOQG16Wg,16027
33
33
  sempy_labs/_model_bpa_rules.py,sha256=96_GkXQGhON-_uyUATgUibk4W9y7e9wl1QciUr96gIQ,45544
34
- sempy_labs/_model_dependencies.py,sha256=AclITMJLRI33DvAqMJrgbnUhCubj7qpsHh1_lCWqcrg,11840
35
- sempy_labs/_notebooks.py,sha256=EUYVeRJrCL9IllQevwRxzkCUU-rzX6KEEH7x7mBYUqc,7422
34
+ sempy_labs/_model_dependencies.py,sha256=PL_-ozj3d2L03xR1S-4b-rGhghKbed3QY47x6i5BnfI,12070
35
+ sempy_labs/_notebooks.py,sha256=DTz0byyNMP-JIEn4h85SJ8zMXNrkoChoeV-QE_TvPhE,8280
36
36
  sempy_labs/_one_lake_integration.py,sha256=eIuLxlw8eXfUH2avKhsyLmXZbTllSwGsz2j_HMAikpQ,6234
37
37
  sempy_labs/_query_scale_out.py,sha256=xoHnuDUgPYsg-NlUplB9ieb0bClcBQeG4veJNo_4TNw,15708
38
- sempy_labs/_refresh_semantic_model.py,sha256=R781zVHTnoLw5mzzcSc39OPBmKpzoBxsOa1KhvqyEgw,17131
38
+ sempy_labs/_refresh_semantic_model.py,sha256=LSfwuViimX6TFq1KlQCMHue7ylzBwaBSrcPzJuvVz2M,17465
39
39
  sempy_labs/_spark.py,sha256=RIJt9b_l5Sp5XrebhvRD0DEBKDTQdA8Rh7fByV27ngQ,20109
40
40
  sempy_labs/_sql.py,sha256=KttKi95iGxTT8UA1QOpT9ygAdwCfHHlcQSQ5d9gml0E,5358
41
41
  sempy_labs/_translations.py,sha256=CVRd_yJ1pjUzxY_6H8tSCLh67bHhxRyS7DICY20Lqlc,16112
@@ -82,17 +82,17 @@ sempy_labs/_bpa_translation/_model/_translations_tr-TR.po,sha256=NdW-X4E0QmeLKM0
82
82
  sempy_labs/_bpa_translation/_model/_translations_uk-UA.po,sha256=3NsFN8hoor_5L6738FjpJ8o4spwp8FNFGbVItHD-_ec,43500
83
83
  sempy_labs/_bpa_translation/_model/_translations_zh-CN.po,sha256=ipMbnet7ZI5mZoC8KonYKVwGmFLHFB_9KIDOoBgSNfo,26815
84
84
  sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po,sha256=5v6tVKGruqneAeMoa6F3tyg_JBL8qOpqOJofWpq2W3U,31518
85
- sempy_labs/admin/__init__.py,sha256=h5kLtAZMzqRRBbLX-l3YGzSZhUu_tMizOHRxeWivNEU,1859
86
- sempy_labs/admin/_basic_functions.py,sha256=Wip4Q4TTFj5SuBpDDcnBaS4Iaqu_pnxqnmMQBo2y34c,35613
85
+ sempy_labs/admin/__init__.py,sha256=YYOwKRfRr6alAez4BImlPcX9bQExATrb856BRq7a3O8,1945
86
+ sempy_labs/admin/_basic_functions.py,sha256=5EEHWDhIFEt94-9yKNecIAMN-KzdBg1uqfdZNoXmnwA,37068
87
87
  sempy_labs/admin/_domains.py,sha256=5mv2SzIZCibvHwd4tgm-Lelj0zi66A2KKzQjDQgT9ms,12385
88
88
  sempy_labs/admin/_external_data_share.py,sha256=ITsPDgRDfgvZn1cjzpUWyR6lpnoOP0-gJVxjRA3Mp8w,3489
89
89
  sempy_labs/admin/_git.py,sha256=OY2F5ICKBXrB1HhlYDWdXQPnhTwSrMfWzEa2xcutClc,2181
90
90
  sempy_labs/admin/_items.py,sha256=LqjBYWL3NZCX8f0H-zzjOzy9zlBC7XR4LiknJv_JLT0,8428
91
- sempy_labs/admin/_scanner.py,sha256=Nnhi3DNv9LA6yieDcg4F5ykepPXN4UGXzTWMMDHiCq4,4494
91
+ sempy_labs/admin/_scanner.py,sha256=OkP2Nc_s-DkYEmfLqiMIf8i3EhVyHfvnT1bPSSnVVss,4476
92
92
  sempy_labs/directlake/__init__.py,sha256=etaj-3wqe5t93mu74tGYjEOQ6gtHWUogidOygiVvlq8,2131
93
93
  sempy_labs/directlake/_directlake_schema_compare.py,sha256=ocHFU6E6HSKgcNLywGM0dx0ie9AXYwk-E7o7EYcqiN4,4422
94
94
  sempy_labs/directlake/_directlake_schema_sync.py,sha256=fhh6Xjd42HjI5x_Ejwq1N4qqnXQsKpXmyPcYl7cNG6A,4151
95
- sempy_labs/directlake/_dl_helper.py,sha256=lnvq99QCtxba0ViAEfL_fjhvlV1QMU-xDF0cU9s6ih4,9413
95
+ sempy_labs/directlake/_dl_helper.py,sha256=tG3b0-BJbk-Kwk2B0fyPwoaMgTXS920L61Qz55COex8,9647
96
96
  sempy_labs/directlake/_generate_shared_expression.py,sha256=EauK1M4fabCZjsHYAWxEYaVJKqxJ99nZQaN2pKdd1lg,3077
97
97
  sempy_labs/directlake/_get_directlake_lakehouse.py,sha256=sovI4ds2SEgkp4Fi465jtJ4seRvQxdYgcixRDvsUwNM,2321
98
98
  sempy_labs/directlake/_get_shared_expression.py,sha256=rJ2twFSAMpjdjXl4zkqei_qxzxmGn5DxiDW2KxLcUog,1081
@@ -157,9 +157,9 @@ sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visua
157
157
  sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json,sha256=wBVuNc8S2NaUA0FC708w6stmR2djNZp8nAsHMqesgsc,293
158
158
  sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json,sha256=mlY6t9OlSe-Y6_QmXJpS1vggU6Y3FjISUKECL8FVSg8,931
159
159
  sempy_labs/tom/__init__.py,sha256=Qbs8leW0fjzvWwOjyWK3Hjeehu7IvpB1beASGsi28bk,121
160
- sempy_labs/tom/_model.py,sha256=tAuj4f_7jPqGtnxq6zAwAeBOvR_29J8cSVm2ilGxIzo,172056
161
- semantic_link_labs-0.8.9.dist-info/LICENSE,sha256=ws_MuBL-SCEBqPBFl9_FqZkaaydIJmxHrJG2parhU4M,1141
162
- semantic_link_labs-0.8.9.dist-info/METADATA,sha256=syErH8junhZMiSn2M04yvVgU9VTVbIIdN5LX7gUqzmQ,20910
163
- semantic_link_labs-0.8.9.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
164
- semantic_link_labs-0.8.9.dist-info/top_level.txt,sha256=kiQX1y42Dbein1l3Q8jMUYyRulDjdlc2tMepvtrvixQ,11
165
- semantic_link_labs-0.8.9.dist-info/RECORD,,
160
+ sempy_labs/tom/_model.py,sha256=rBMI9BTSuTEH1MnJsUIVtsc45x7EOQ2fAxcYkwYgfZw,173575
161
+ semantic_link_labs-0.8.10.dist-info/LICENSE,sha256=ws_MuBL-SCEBqPBFl9_FqZkaaydIJmxHrJG2parhU4M,1141
162
+ semantic_link_labs-0.8.10.dist-info/METADATA,sha256=eITmLRcqLr0Yuitj1Q1LXmWYDH5AA-fbpwWUDGWQuO4,21196
163
+ semantic_link_labs-0.8.10.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
164
+ semantic_link_labs-0.8.10.dist-info/top_level.txt,sha256=kiQX1y42Dbein1l3Q8jMUYyRulDjdlc2tMepvtrvixQ,11
165
+ semantic_link_labs-0.8.10.dist-info/RECORD,,
sempy_labs/__init__.py CHANGED
@@ -8,6 +8,7 @@ from sempy_labs._gateways import (
8
8
  create_vnet_gateway,
9
9
  update_vnet_gateway,
10
10
  update_on_premises_gateway,
11
+ bind_semantic_model_to_gateway,
11
12
  )
12
13
 
13
14
  from sempy_labs._authentication import (
@@ -205,6 +206,8 @@ from sempy_labs._list_functions import (
205
206
  list_lakehouses,
206
207
  list_sql_endpoints,
207
208
  update_item,
209
+ list_server_properties,
210
+ list_semantic_model_errors,
208
211
  )
209
212
  from sempy_labs._helper_functions import (
210
213
  convert_to_friendly_case,
@@ -230,6 +233,7 @@ from sempy_labs._helper_functions import (
230
233
  get_capacity_id,
231
234
  get_capacity_name,
232
235
  resolve_capacity_name,
236
+ get_tenant_id,
233
237
  )
234
238
  from sempy_labs._model_bpa_bulk import (
235
239
  run_model_bpa_bulk,
@@ -458,4 +462,8 @@ __all__ = [
458
462
  "update_vnet_gateway",
459
463
  "update_on_premises_gateway",
460
464
  "get_semantic_model_definition",
465
+ "get_tenant_id",
466
+ "list_server_properties",
467
+ "bind_semantic_model_to_gateway",
468
+ "list_semantic_model_errors",
461
469
  ]
@@ -91,11 +91,13 @@ class ServicePrincipalTokenProvider(TokenProvider):
91
91
 
92
92
  return cls(credential)
93
93
 
94
- def __call__(self, audience: Literal["pbi", "storage"] = "pbi") -> str:
94
+ def __call__(
95
+ self, audience: Literal["pbi", "storage", "azure", "graph"] = "pbi"
96
+ ) -> str:
95
97
  """
96
98
  Parameters
97
99
  ----------
98
- audience : Literal["pbi", "storage"] = "pbi") -> str
100
+ audience : Literal["pbi", "storage", "azure", "graph"] = "pbi") -> str
99
101
  Literal if it's for PBI/Fabric API call or OneLake/Storage Account call.
100
102
  """
101
103
  if audience == "pbi":
@@ -104,5 +106,32 @@ class ServicePrincipalTokenProvider(TokenProvider):
104
106
  ).token
105
107
  elif audience == "storage":
106
108
  return self.credential.get_token("https://storage.azure.com/.default").token
109
+ elif audience == "azure":
110
+ return self.credential.get_token(
111
+ "https://management.azure.com/.default"
112
+ ).token
113
+ elif audience == "graph":
114
+ return self.credential.get_token(
115
+ "https://graph.microsoft.com/.default"
116
+ ).token
107
117
  else:
108
118
  raise NotImplementedError
119
+
120
+
121
+ def _get_headers(
122
+ token_provider: str, audience: Literal["pbi", "storage", "azure", "graph"] = "azure"
123
+ ):
124
+ """
125
+ Generates headers for an API request.
126
+ """
127
+
128
+ token = token_provider(audience=audience)
129
+
130
+ headers = {"Authorization": f"Bearer {token}"}
131
+
132
+ if audience == "graph":
133
+ headers["ConsistencyLevel"] = "eventual"
134
+ else:
135
+ headers["Content-Type"] = "application/json"
136
+
137
+ return headers
sempy_labs/_dataflows.py CHANGED
@@ -42,7 +42,7 @@ def list_dataflows(workspace: Optional[str] = None):
42
42
  "Dataflow Id": v.get("objectId"),
43
43
  "Dataflow Name": v.get("name"),
44
44
  "Configured By": v.get("configuredBy"),
45
- "Users": [v.get("users")],
45
+ "Users": v.get("users"),
46
46
  "Generation": v.get("generation"),
47
47
  }
48
48
  df = pd.concat(
sempy_labs/_gateways.py CHANGED
@@ -6,6 +6,8 @@ from sempy_labs._helper_functions import (
6
6
  pagination,
7
7
  _is_valid_uuid,
8
8
  resolve_capacity_id,
9
+ resolve_workspace_name_and_id,
10
+ resolve_dataset_name_and_id,
9
11
  )
10
12
  from uuid import UUID
11
13
  import sempy_labs._icons as icons
@@ -437,3 +439,47 @@ def update_vnet_gateway(
437
439
  raise FabricHTTPException(response)
438
440
 
439
441
  print(f"{icons.green_dot} The '{gateway}' has been updated accordingly.")
442
+
443
+
444
+ def bind_semantic_model_to_gateway(
445
+ dataset: str | UUID, gateway: str | UUID, workspace: Optional[str | UUID] = None
446
+ ):
447
+ """
448
+ Binds the specified dataset from the specified workspace to the specified gateway.
449
+
450
+ This is a wrapper function for the following API: `Datasets - Bind To Gateway In Group <https://learn.microsoft.com/rest/api/power-bi/datasets/bind-to-gateway-in-group>`_.
451
+
452
+ Parameters
453
+ ----------
454
+ dataset : str | UUID
455
+ The name or ID of the semantic model.
456
+ gateway : str | UUID
457
+ The name or ID of the gateway.
458
+ workspace : str | UUID, default=None
459
+ The Fabric workspace name.
460
+ Defaults to None which resolves to the workspace of the attached lakehouse
461
+ or if no lakehouse attached, resolves to the workspace of the notebook.
462
+ """
463
+
464
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
465
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(
466
+ dataset, workspace=workspace_id
467
+ )
468
+
469
+ gateway_id = _resolve_gateway_id(gateway)
470
+ payload = {
471
+ "gatewayObjectId": gateway_id,
472
+ }
473
+
474
+ client = fabric.FabricRestClient()
475
+ response = client.post(
476
+ f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}/Default.BindToGateway",
477
+ json=payload,
478
+ )
479
+
480
+ if response.status_code != 200:
481
+ raise FabricHTTPException(response)
482
+
483
+ print(
484
+ f"{icons.green_dot} The '{dataset_name}' semantic model within the '{workspace_name}' workspace has been binded to the '{gateway_id}' gateway."
485
+ )
@@ -203,14 +203,7 @@ def update_semantic_model_from_bim(
203
203
  """
204
204
 
205
205
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
206
-
207
- dfD = fabric.list_datasets(workspace=workspace, mode="rest")
208
- dfD_filt = dfD[dfD["Dataset Name"] == dataset]
209
- if len(dfD_filt) == 0:
210
- raise ValueError(
211
- f"{icons.red_dot} The '{dataset}' semantic model within the '{workspace}' workspace does not exist."
212
- )
213
- dataset_id = dfD_filt["Dataset Id"].iloc[0]
206
+ dataset_id = resolve_dataset_id(dataset=dataset, workspace=workspace)
214
207
 
215
208
  client = fabric.FabricRestClient()
216
209
  defPBIDataset = {"version": "1.0", "settings": {}}
@@ -303,12 +296,11 @@ def deploy_semantic_model(
303
296
  bim = get_semantic_model_bim(dataset=source_dataset, workspace=source_workspace)
304
297
 
305
298
  # Create the semantic model if the model does not exist
306
- if len(dfD_filt) == 0:
299
+ if dfD_filt.empty:
307
300
  create_semantic_model_from_bim(
308
301
  dataset=target_dataset,
309
302
  bim_file=bim,
310
303
  workspace=target_workspace,
311
- overwrite=overwrite,
312
304
  )
313
305
  # Update the semantic model if the model exists
314
306
  else:
sempy_labs/_git.py CHANGED
@@ -277,43 +277,48 @@ def commit_to_git(
277
277
  workspace, workspace_id = resolve_workspace_name_and_id(workspace)
278
278
 
279
279
  gs = get_git_status(workspace=workspace)
280
- workspace_head = gs["Workspace Head"].iloc[0]
280
+ if not gs.empty:
281
+ workspace_head = gs["Workspace Head"].iloc[0]
281
282
 
282
- if item_ids is None:
283
- commit_mode = "All"
284
- else:
285
- commit_mode = "Selective"
283
+ if item_ids is None:
284
+ commit_mode = "All"
285
+ else:
286
+ commit_mode = "Selective"
286
287
 
287
- if isinstance(item_ids, str):
288
- item_ids = [item_ids]
288
+ if isinstance(item_ids, str):
289
+ item_ids = [item_ids]
289
290
 
290
- request_body = {
291
- "mode": commit_mode,
292
- "workspaceHead": workspace_head,
293
- "comment": comment,
294
- }
291
+ request_body = {
292
+ "mode": commit_mode,
293
+ "workspaceHead": workspace_head,
294
+ "comment": comment,
295
+ }
295
296
 
296
- if item_ids is not None:
297
- request_body["items"] = [{"objectId": item_id} for item_id in item_ids]
297
+ if item_ids is not None:
298
+ request_body["items"] = [{"objectId": item_id} for item_id in item_ids]
298
299
 
299
- client = fabric.FabricRestClient()
300
- response = client.post(
301
- f"/v1/workspaces/{workspace_id}/git/commitToGit",
302
- json=request_body,
303
- )
300
+ client = fabric.FabricRestClient()
301
+ response = client.post(
302
+ f"/v1/workspaces/{workspace_id}/git/commitToGit",
303
+ json=request_body,
304
+ )
304
305
 
305
- if response.status_code not in [200, 202]:
306
- raise FabricHTTPException(response)
306
+ if response.status_code not in [200, 202]:
307
+ raise FabricHTTPException(response)
307
308
 
308
- lro(client, response)
309
+ lro(client=client, response=response, return_status_code=True)
309
310
 
310
- if commit_mode == "All":
311
- print(
312
- f"{icons.green_dot} All items within the '{workspace}' workspace have been committed to Git."
313
- )
311
+ if commit_mode == "All":
312
+ print(
313
+ f"{icons.green_dot} All items within the '{workspace}' workspace have been committed to Git."
314
+ )
315
+ else:
316
+ print(
317
+ f"{icons.green_dot} The {item_ids} items within the '{workspace}' workspace have been committed to Git."
318
+ )
314
319
  else:
315
320
  print(
316
- f"{icons.green_dot} The {item_ids} items ithin the '{workspace}' workspace have been committed to Git."
321
+ f"{icons.info} Git already up to date: no modified items found within the '{workspace}' workspace."
317
322
  )
318
323
 
319
324
 
@@ -160,14 +160,34 @@ def resolve_report_name(report_id: UUID, workspace: Optional[str] = None) -> str
160
160
  return obj
161
161
 
162
162
 
163
- def resolve_dataset_id(dataset: str, workspace: Optional[str] = None) -> UUID:
163
+ def resolve_dataset_name_and_id(
164
+ dataset: str | UUID, workspace: Optional[str] = None
165
+ ) -> Tuple[str, UUID]:
166
+
167
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
168
+
169
+ if _is_valid_uuid(dataset):
170
+ dataset_id = dataset
171
+ dataset_name = fabric.resolve_item_name(
172
+ item_id=dataset_id, type="SemanticModel", workspace=workspace_id
173
+ )
174
+ else:
175
+ dataset_name = dataset
176
+ dataset_id = fabric.resolve_item_id(
177
+ item_name=dataset, type="SemanticModel", workspace=workspace_id
178
+ )
179
+
180
+ return dataset_name, dataset_id
181
+
182
+
183
+ def resolve_dataset_id(dataset: str | UUID, workspace: Optional[str] = None) -> UUID:
164
184
  """
165
185
  Obtains the ID of the semantic model.
166
186
 
167
187
  Parameters
168
188
  ----------
169
- dataset : str
170
- The name of the semantic model.
189
+ dataset : str | UUID
190
+ The name or ID of the semantic model.
171
191
  workspace : str, default=None
172
192
  The Fabric workspace name.
173
193
  Defaults to None which resolves to the workspace of the attached lakehouse
@@ -179,15 +199,14 @@ def resolve_dataset_id(dataset: str, workspace: Optional[str] = None) -> UUID:
179
199
  The ID of the semantic model.
180
200
  """
181
201
 
182
- if workspace is None:
183
- workspace_id = fabric.get_workspace_id()
184
- workspace = fabric.resolve_workspace_name(workspace_id)
185
-
186
- obj = fabric.resolve_item_id(
187
- item_name=dataset, type="SemanticModel", workspace=workspace
188
- )
202
+ if _is_valid_uuid(dataset):
203
+ dataset_id = dataset
204
+ else:
205
+ dataset_id = fabric.resolve_item_id(
206
+ item_name=dataset, type="SemanticModel", workspace=workspace
207
+ )
189
208
 
190
- return obj
209
+ return dataset_id
191
210
 
192
211
 
193
212
  def resolve_dataset_name(dataset_id: UUID, workspace: Optional[str] = None) -> str:
@@ -761,13 +780,19 @@ def get_capacity_id(workspace: Optional[str] = None) -> UUID:
761
780
  The capacity Id.
762
781
  """
763
782
 
764
- workspace = fabric.resolve_workspace_name(workspace)
765
- filter_condition = urllib.parse.quote(workspace)
766
- dfW = fabric.list_workspaces(filter=f"name eq '{filter_condition}'")
767
- if len(dfW) == 0:
768
- raise ValueError(f"{icons.red_dot} The '{workspace}' does not exist'.")
783
+ if workspace is None:
784
+ capacity_id = _get_x_id(name="trident.capacity.id")
785
+ else:
786
+
787
+ workspace = fabric.resolve_workspace_name(workspace)
788
+ filter_condition = urllib.parse.quote(workspace)
789
+ dfW = fabric.list_workspaces(filter=f"name eq '{filter_condition}'")
790
+ if len(dfW) == 0:
791
+ raise ValueError(f"{icons.red_dot} The '{workspace}' does not exist'.")
792
+
793
+ capacity_id = dfW["Capacity Id"].iloc[0]
769
794
 
770
- return dfW["Capacity Id"].iloc[0]
795
+ return capacity_id
771
796
 
772
797
 
773
798
  def get_capacity_name(workspace: Optional[str] = None) -> str:
@@ -1167,20 +1192,20 @@ def _make_list_unique(my_list):
1167
1192
 
1168
1193
  def _get_partition_map(dataset: str, workspace: Optional[str] = None) -> pd.DataFrame:
1169
1194
 
1170
- if workspace is None:
1171
- workspace = fabric.resolve_workspace_name()
1195
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
1196
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
1172
1197
 
1173
1198
  partitions = fabric.evaluate_dax(
1174
- dataset=dataset,
1175
- workspace=workspace,
1199
+ dataset=dataset_id,
1200
+ workspace=workspace_id,
1176
1201
  dax_string="""
1177
1202
  select [ID] AS [PartitionID], [TableID], [Name] AS [PartitionName] from $system.tmschema_partitions
1178
1203
  """,
1179
1204
  )
1180
1205
 
1181
1206
  tables = fabric.evaluate_dax(
1182
- dataset=dataset,
1183
- workspace=workspace,
1207
+ dataset=dataset_id,
1208
+ workspace=workspace_id,
1184
1209
  dax_string="""
1185
1210
  select [ID] AS [TableID], [Name] AS [TableName] from $system.tmschema_tables
1186
1211
  """,
@@ -1352,3 +1377,15 @@ def _is_valid_uuid(
1352
1377
  return True
1353
1378
  except ValueError:
1354
1379
  return False
1380
+
1381
+
1382
+ def _get_fabric_context_setting(name: str):
1383
+
1384
+ from synapse.ml.internal_utils.session_utils import get_fabric_context
1385
+
1386
+ return get_fabric_context().get(name)
1387
+
1388
+
1389
+ def get_tenant_id():
1390
+
1391
+ _get_fabric_context_setting(name="trident.tenant.id")