semantic-link-labs 0.8.0__py3-none-any.whl → 0.8.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (47) hide show
  1. {semantic_link_labs-0.8.0.dist-info → semantic_link_labs-0.8.2.dist-info}/METADATA +39 -7
  2. {semantic_link_labs-0.8.0.dist-info → semantic_link_labs-0.8.2.dist-info}/RECORD +47 -37
  3. sempy_labs/__init__.py +70 -51
  4. sempy_labs/_ai.py +0 -2
  5. sempy_labs/_capacity_migration.py +1 -2
  6. sempy_labs/_data_pipelines.py +118 -0
  7. sempy_labs/_documentation.py +144 -0
  8. sempy_labs/_eventhouses.py +118 -0
  9. sempy_labs/_eventstreams.py +118 -0
  10. sempy_labs/_generate_semantic_model.py +3 -3
  11. sempy_labs/_git.py +3 -3
  12. sempy_labs/_helper_functions.py +117 -26
  13. sempy_labs/_icons.py +21 -0
  14. sempy_labs/_kql_databases.py +134 -0
  15. sempy_labs/_kql_querysets.py +124 -0
  16. sempy_labs/_list_functions.py +12 -425
  17. sempy_labs/_mirrored_warehouses.py +50 -0
  18. sempy_labs/_ml_experiments.py +122 -0
  19. sempy_labs/_ml_models.py +120 -0
  20. sempy_labs/_model_auto_build.py +0 -4
  21. sempy_labs/_model_bpa.py +11 -11
  22. sempy_labs/_model_bpa_bulk.py +8 -7
  23. sempy_labs/_model_dependencies.py +26 -18
  24. sempy_labs/_notebooks.py +5 -16
  25. sempy_labs/_query_scale_out.py +2 -2
  26. sempy_labs/_refresh_semantic_model.py +7 -19
  27. sempy_labs/_spark.py +10 -10
  28. sempy_labs/_vertipaq.py +16 -18
  29. sempy_labs/_warehouses.py +132 -0
  30. sempy_labs/_workspaces.py +0 -3
  31. sempy_labs/admin/_basic_functions.py +92 -10
  32. sempy_labs/admin/_domains.py +1 -1
  33. sempy_labs/directlake/_directlake_schema_sync.py +1 -1
  34. sempy_labs/directlake/_dl_helper.py +32 -16
  35. sempy_labs/directlake/_guardrails.py +7 -7
  36. sempy_labs/directlake/_update_directlake_partition_entity.py +1 -1
  37. sempy_labs/directlake/_warm_cache.py +1 -1
  38. sempy_labs/lakehouse/_get_lakehouse_tables.py +3 -3
  39. sempy_labs/lakehouse/_lakehouse.py +3 -2
  40. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +5 -0
  41. sempy_labs/report/_generate_report.py +1 -1
  42. sempy_labs/report/_report_bpa.py +13 -3
  43. sempy_labs/report/_reportwrapper.py +14 -16
  44. sempy_labs/tom/_model.py +261 -24
  45. {semantic_link_labs-0.8.0.dist-info → semantic_link_labs-0.8.2.dist-info}/LICENSE +0 -0
  46. {semantic_link_labs-0.8.0.dist-info → semantic_link_labs-0.8.2.dist-info}/WHEEL +0 -0
  47. {semantic_link_labs-0.8.0.dist-info → semantic_link_labs-0.8.2.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,144 @@
1
+ import sempy
2
+ import sempy.fabric as fabric
3
+ import pandas as pd
4
+ from typing import List, Optional
5
+
6
+
7
+ def list_all_items(workspaces: Optional[str | List[str]] = None):
8
+
9
+ df = pd.DataFrame(
10
+ columns=[
11
+ "Workspace Name",
12
+ "Workspace Id",
13
+ "Item Name",
14
+ "Item Type",
15
+ "Description",
16
+ ]
17
+ )
18
+
19
+ if isinstance(workspaces, str):
20
+ workspaces = [workspaces]
21
+
22
+ dfW = fabric.list_workspaces()
23
+ if workspaces is not None:
24
+ dfW = dfW[dfW["Name"].isin(workspaces)]
25
+
26
+ for _, r in dfW.iterrows():
27
+ workspace_name = r["Name"]
28
+ workspace_id = r["Id"]
29
+ dfI = fabric.list_items(workspace=workspace_name)
30
+ for _, r2 in dfI.iterrows():
31
+
32
+ new_data = {
33
+ "Workspace Name": workspace_name,
34
+ "Workspace Id": workspace_id,
35
+ "Item Name": r2["Name"],
36
+ "Item Type": r2["Type"],
37
+ "Description": r2["Description"],
38
+ }
39
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
40
+
41
+ return df
42
+
43
+
44
+ def data_dictionary(dataset: str, workspace: Optional[str | None] = None):
45
+
46
+ from sempy_labs.tom import connect_semantic_model
47
+
48
+ sempy.fabric._client._utils._init_analysis_services()
49
+ import Microsoft.AnalysisServices.Tabular as TOM
50
+
51
+ df = pd.DataFrame(
52
+ columns=[
53
+ "Workspace Name",
54
+ "Model Name",
55
+ "Table Name",
56
+ "Object Type",
57
+ "Object Name",
58
+ "Hidden Flag",
59
+ "Description",
60
+ "Display Folder",
61
+ "Measure Formula",
62
+ ]
63
+ )
64
+
65
+ with connect_semantic_model(
66
+ dataset=dataset, readonly=True, workspace=workspace
67
+ ) as tom:
68
+ for t in tom.model.Tables:
69
+ expr = None
70
+ if tom.is_calculated_table(table_name=t.Name):
71
+ pName = next(p.Name for p in t.Partitions)
72
+ expr = t.Partitions[pName].Source.Expression
73
+
74
+ new_data = {
75
+ "Workspace Name": workspace,
76
+ "Model Name": dataset,
77
+ "Table Name": t.Name,
78
+ "Object Type": t.ObjectType,
79
+ "Object Name": t.Name,
80
+ "Hidden Flag": t.IsHidden,
81
+ "Description": t.Description,
82
+ "Display Folder": None,
83
+ "Measure Formula": expr,
84
+ }
85
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
86
+ cols = [c for c in t.Columns if c.Type != TOM.ColumnType.RowNumber]
87
+ for c in cols:
88
+
89
+ def get_calc_column_expression(table_name, column_name):
90
+ expr = None
91
+ if tom.is_calculated_column(
92
+ table_name=table_name, column_name=column_name
93
+ ):
94
+ expr = c.Expression
95
+ return expr
96
+
97
+ new_data = {
98
+ "Workspace Name": workspace,
99
+ "Model Name": dataset,
100
+ "Table Name": t.Name,
101
+ "Object Type": c.ObjectType,
102
+ "Object Name": c.Name,
103
+ "Hidden Flag": c.IsHidden,
104
+ "Description": c.Description,
105
+ "Display Folder": c.DisplayFolder,
106
+ "Measure Formula": get_calc_column_expression(t.Name, c.Name),
107
+ }
108
+ df = pd.concat(
109
+ [df, pd.DataFrame(new_data, index=[0])], ignore_index=True
110
+ )
111
+ for m in t.Measures:
112
+ new_data = {
113
+ "Workspace Name": workspace,
114
+ "Model Name": dataset,
115
+ "Table Name": t.Name,
116
+ "Object Type": m.ObjectType,
117
+ "Object Name": m.Name,
118
+ "Hidden Flag": m.IsHidden,
119
+ "Description": m.Description,
120
+ "Display Folder": m.DisplayFolder,
121
+ "Measure Formula": m.Expression,
122
+ }
123
+ df = pd.concat(
124
+ [df, pd.DataFrame(new_data, index=[0])], ignore_index=True
125
+ )
126
+
127
+ if t.CalculationGroup is not None:
128
+ for ci in t.CalculationGroup.CalculationItems:
129
+ new_data = {
130
+ "Workspace Name": workspace,
131
+ "Model Name": dataset,
132
+ "Table Name": t.Name,
133
+ "Object Type": "Calculation Item",
134
+ "Object Name": ci.Name,
135
+ "Hidden Flag": t.IsHidden,
136
+ "Description": ci.Description,
137
+ "Display Folder": None,
138
+ "Measure Formula": ci.Expression,
139
+ }
140
+ df = pd.concat(
141
+ [df, pd.DataFrame(new_data, index=[0])], ignore_index=True
142
+ )
143
+
144
+ return df
@@ -0,0 +1,118 @@
1
+ import sempy.fabric as fabric
2
+ import pandas as pd
3
+ import sempy_labs._icons as icons
4
+ from typing import Optional
5
+ from sempy_labs._helper_functions import (
6
+ resolve_workspace_name_and_id,
7
+ lro,
8
+ pagination,
9
+ )
10
+ from sempy.fabric.exceptions import FabricHTTPException
11
+
12
+
13
+ def create_eventhouse(
14
+ name: str, description: Optional[str] = None, workspace: Optional[str] = None
15
+ ):
16
+ """
17
+ Creates a Fabric eventhouse.
18
+
19
+ Parameters
20
+ ----------
21
+ name: str
22
+ Name of the eventhouse.
23
+ description : str, default=None
24
+ A description of the environment.
25
+ workspace : str, default=None
26
+ The Fabric workspace name.
27
+ Defaults to None which resolves to the workspace of the attached lakehouse
28
+ or if no lakehouse attached, resolves to the workspace of the notebook.
29
+ """
30
+
31
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
32
+
33
+ request_body = {"displayName": name}
34
+
35
+ if description:
36
+ request_body["description"] = description
37
+
38
+ client = fabric.FabricRestClient()
39
+ response = client.post(
40
+ f"/v1/workspaces/{workspace_id}/eventhouses", json=request_body
41
+ )
42
+
43
+ lro(client, response, status_codes=[201, 202])
44
+
45
+ print(
46
+ f"{icons.green_dot} The '{name}' eventhouse has been created within the '{workspace}' workspace."
47
+ )
48
+
49
+
50
+ def list_eventhouses(workspace: Optional[str] = None) -> pd.DataFrame:
51
+ """
52
+ Shows the eventhouses within a workspace.
53
+
54
+ Parameters
55
+ ----------
56
+ workspace : str, default=None
57
+ The Fabric workspace name.
58
+ Defaults to None which resolves to the workspace of the attached lakehouse
59
+ or if no lakehouse attached, resolves to the workspace of the notebook.
60
+
61
+ Returns
62
+ -------
63
+ pandas.DataFrame
64
+ A pandas dataframe showing the eventhouses within a workspace.
65
+ """
66
+
67
+ df = pd.DataFrame(columns=["Eventhouse Name", "Eventhouse Id", "Description"])
68
+
69
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
70
+
71
+ client = fabric.FabricRestClient()
72
+ response = client.get(f"/v1/workspaces/{workspace_id}/eventhouses")
73
+ if response.status_code != 200:
74
+ raise FabricHTTPException(response)
75
+
76
+ responses = pagination(client, response)
77
+
78
+ for r in responses:
79
+ for v in r.get("value", []):
80
+ new_data = {
81
+ "Eventhouse Name": v.get("displayName"),
82
+ "Eventhouse Id": v.get("id"),
83
+ "Description": v.get("description"),
84
+ }
85
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
86
+
87
+ return df
88
+
89
+
90
+ def delete_eventhouse(name: str, workspace: Optional[str] = None):
91
+ """
92
+ Deletes a Fabric eventhouse.
93
+
94
+ Parameters
95
+ ----------
96
+ name: str
97
+ Name of the eventhouse.
98
+ workspace : str, default=None
99
+ The Fabric workspace name.
100
+ Defaults to None which resolves to the workspace of the attached lakehouse
101
+ or if no lakehouse attached, resolves to the workspace of the notebook.
102
+ """
103
+
104
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
105
+
106
+ item_id = fabric.resolve_item_id(
107
+ item_name=name, type="Eventhouse", workspace=workspace
108
+ )
109
+
110
+ client = fabric.FabricRestClient()
111
+ response = client.delete(f"/v1/workspaces/{workspace_id}/eventhouses/{item_id}")
112
+
113
+ if response.status_code != 200:
114
+ raise FabricHTTPException(response)
115
+
116
+ print(
117
+ f"{icons.green_dot} The '{name}' eventhouse within the '{workspace}' workspace has been deleted."
118
+ )
@@ -0,0 +1,118 @@
1
+ import sempy.fabric as fabric
2
+ import pandas as pd
3
+ import sempy_labs._icons as icons
4
+ from typing import Optional
5
+ from sempy_labs._helper_functions import (
6
+ resolve_workspace_name_and_id,
7
+ lro,
8
+ pagination,
9
+ )
10
+ from sempy.fabric.exceptions import FabricHTTPException
11
+
12
+
13
+ def list_eventstreams(workspace: Optional[str] = None) -> pd.DataFrame:
14
+ """
15
+ Shows the eventstreams within a workspace.
16
+
17
+ Parameters
18
+ ----------
19
+ workspace : str, default=None
20
+ The Fabric workspace name.
21
+ Defaults to None which resolves to the workspace of the attached lakehouse
22
+ or if no lakehouse attached, resolves to the workspace of the notebook.
23
+
24
+ Returns
25
+ -------
26
+ pandas.DataFrame
27
+ A pandas dataframe showing the eventstreams within a workspace.
28
+ """
29
+
30
+ df = pd.DataFrame(columns=["Eventstream Name", "Eventstream Id", "Description"])
31
+
32
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
33
+
34
+ client = fabric.FabricRestClient()
35
+ response = client.get(f"/v1/workspaces/{workspace_id}/eventstreams")
36
+ if response.status_code != 200:
37
+ raise FabricHTTPException(response)
38
+
39
+ responses = pagination(client, response)
40
+
41
+ for r in responses:
42
+ for v in r.get("value", []):
43
+ new_data = {
44
+ "Eventstream Name": v.get("displayName"),
45
+ "Eventstream Id": v.get("id"),
46
+ "Description": v.get("description"),
47
+ }
48
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
49
+
50
+ return df
51
+
52
+
53
+ def create_eventstream(
54
+ name: str, description: Optional[str] = None, workspace: Optional[str] = None
55
+ ):
56
+ """
57
+ Creates a Fabric eventstream.
58
+
59
+ Parameters
60
+ ----------
61
+ name: str
62
+ Name of the eventstream.
63
+ description : str, default=None
64
+ A description of the environment.
65
+ workspace : str, default=None
66
+ The Fabric workspace name.
67
+ Defaults to None which resolves to the workspace of the attached lakehouse
68
+ or if no lakehouse attached, resolves to the workspace of the notebook.
69
+ """
70
+
71
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
72
+
73
+ request_body = {"displayName": name}
74
+
75
+ if description:
76
+ request_body["description"] = description
77
+
78
+ client = fabric.FabricRestClient()
79
+ response = client.post(
80
+ f"/v1/workspaces/{workspace_id}/eventstreams", json=request_body
81
+ )
82
+
83
+ lro(client, response, status_codes=[201, 202])
84
+
85
+ print(
86
+ f"{icons.green_dot} The '{name}' eventstream has been created within the '{workspace}' workspace."
87
+ )
88
+
89
+
90
+ def delete_eventstream(name: str, workspace: Optional[str] = None):
91
+ """
92
+ Deletes a Fabric eventstream.
93
+
94
+ Parameters
95
+ ----------
96
+ name: str
97
+ Name of the eventstream.
98
+ workspace : str, default=None
99
+ The Fabric workspace name.
100
+ Defaults to None which resolves to the workspace of the attached lakehouse
101
+ or if no lakehouse attached, resolves to the workspace of the notebook.
102
+ """
103
+
104
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
105
+
106
+ item_id = fabric.resolve_item_id(
107
+ item_name=name, type="Eventstream", workspace=workspace
108
+ )
109
+
110
+ client = fabric.FabricRestClient()
111
+ response = client.delete(f"/v1/workspaces/{workspace_id}/eventstreams/{item_id}")
112
+
113
+ if response.status_code != 200:
114
+ raise FabricHTTPException(response)
115
+
116
+ print(
117
+ f"{icons.green_dot} The '{name}' eventstream within the '{workspace}' workspace has been deleted."
118
+ )
@@ -20,7 +20,7 @@ def create_blank_semantic_model(
20
20
  dataset: str,
21
21
  compatibility_level: int = 1605,
22
22
  workspace: Optional[str] = None,
23
- overwrite: Optional[bool] = True,
23
+ overwrite: bool = True,
24
24
  ):
25
25
  """
26
26
  Creates a new blank semantic model (no tables/columns etc.).
@@ -212,8 +212,8 @@ def deploy_semantic_model(
212
212
  source_workspace: Optional[str] = None,
213
213
  target_dataset: Optional[str] = None,
214
214
  target_workspace: Optional[str] = None,
215
- refresh_target_dataset: Optional[bool] = True,
216
- overwrite: Optional[bool] = False,
215
+ refresh_target_dataset: bool = True,
216
+ overwrite: bool = False,
217
217
  ):
218
218
  """
219
219
  Deploys a semantic model based on an existing semantic model.
sempy_labs/_git.py CHANGED
@@ -314,7 +314,7 @@ def update_from_git(
314
314
  remote_commit_hash: str,
315
315
  conflict_resolution_policy: str,
316
316
  workspace_head: Optional[str] = None,
317
- allow_override: Optional[bool] = False,
317
+ allow_override: bool = False,
318
318
  workspace: Optional[str] = None,
319
319
  ):
320
320
  """
@@ -341,9 +341,9 @@ def update_from_git(
341
341
  workspace, workspace_id = resolve_workspace_name_and_id(workspace)
342
342
 
343
343
  conflict_resolution_policies = ["PreferWorkspace", "PreferRemote"]
344
- if "remote" in conflict_resolution_policies.lower():
344
+ if "remote" in [policy.lower() for policy in conflict_resolution_policies]:
345
345
  conflict_resolution_policies = "PreferRemote"
346
- elif "workspace" in conflict_resolution_policies.lower():
346
+ elif "workspace" in [policy.lower() for policy in conflict_resolution_policies]:
347
347
  conflict_resolution_policies = "PreferWorkspace"
348
348
 
349
349
  if conflict_resolution_policy not in conflict_resolution_policies:
@@ -3,6 +3,7 @@ import re
3
3
  import json
4
4
  import base64
5
5
  import time
6
+ import uuid
6
7
  from sempy.fabric.exceptions import FabricHTTPException
7
8
  import pandas as pd
8
9
  from functools import wraps
@@ -16,7 +17,7 @@ from azure.core.credentials import TokenCredential, AccessToken
16
17
 
17
18
  def create_abfss_path(
18
19
  lakehouse_id: UUID, lakehouse_workspace_id: UUID, delta_table_name: str
19
- ):
20
+ ) -> str:
20
21
  """
21
22
  Creates an abfss path for a delta table in a Fabric lakehouse.
22
23
 
@@ -38,7 +39,7 @@ def create_abfss_path(
38
39
  return f"abfss://{lakehouse_workspace_id}@onelake.dfs.fabric.microsoft.com/{lakehouse_id}/Tables/{delta_table_name}"
39
40
 
40
41
 
41
- def format_dax_object_name(table: str, column: str):
42
+ def format_dax_object_name(table: str, column: str) -> str:
42
43
  """
43
44
  Formats a table/column combination to the 'Table Name'[Column Name] format.
44
45
 
@@ -60,7 +61,7 @@ def format_dax_object_name(table: str, column: str):
60
61
 
61
62
  def create_relationship_name(
62
63
  from_table: str, from_column: str, to_table: str, to_column: str
63
- ):
64
+ ) -> str:
64
65
  """
65
66
  Formats a relationship's table/columns into a fully qualified name.
66
67
 
@@ -88,7 +89,7 @@ def create_relationship_name(
88
89
  )
89
90
 
90
91
 
91
- def resolve_report_id(report: str, workspace: Optional[str] = None):
92
+ def resolve_report_id(report: str, workspace: Optional[str] = None) -> UUID:
92
93
  """
93
94
  Obtains the ID of the Power BI report.
94
95
 
@@ -116,7 +117,7 @@ def resolve_report_id(report: str, workspace: Optional[str] = None):
116
117
  return obj
117
118
 
118
119
 
119
- def resolve_report_name(report_id: UUID, workspace: Optional[str] = None):
120
+ def resolve_report_name(report_id: UUID, workspace: Optional[str] = None) -> str:
120
121
  """
121
122
  Obtains the name of the Power BI report.
122
123
 
@@ -146,7 +147,7 @@ def resolve_report_name(report_id: UUID, workspace: Optional[str] = None):
146
147
  return obj
147
148
 
148
149
 
149
- def resolve_dataset_id(dataset: str, workspace: Optional[str] = None):
150
+ def resolve_dataset_id(dataset: str, workspace: Optional[str] = None) -> UUID:
150
151
  """
151
152
  Obtains the ID of the semantic model.
152
153
 
@@ -176,7 +177,7 @@ def resolve_dataset_id(dataset: str, workspace: Optional[str] = None):
176
177
  return obj
177
178
 
178
179
 
179
- def resolve_dataset_name(dataset_id: UUID, workspace: Optional[str] = None):
180
+ def resolve_dataset_name(dataset_id: UUID, workspace: Optional[str] = None) -> str:
180
181
  """
181
182
  Obtains the name of the semantic model.
182
183
 
@@ -208,7 +209,7 @@ def resolve_dataset_name(dataset_id: UUID, workspace: Optional[str] = None):
208
209
 
209
210
  def resolve_lakehouse_name(
210
211
  lakehouse_id: Optional[UUID] = None, workspace: Optional[str] = None
211
- ):
212
+ ) -> str:
212
213
  """
213
214
  Obtains the name of the Fabric lakehouse.
214
215
 
@@ -242,7 +243,7 @@ def resolve_lakehouse_name(
242
243
  return obj
243
244
 
244
245
 
245
- def resolve_lakehouse_id(lakehouse: str, workspace: Optional[str] = None):
246
+ def resolve_lakehouse_id(lakehouse: str, workspace: Optional[str] = None) -> UUID:
246
247
  """
247
248
  Obtains the ID of the Fabric lakehouse.
248
249
 
@@ -321,7 +322,7 @@ def get_direct_lake_sql_endpoint(dataset: str, workspace: Optional[str] = None)
321
322
  return sqlEndpointId
322
323
 
323
324
 
324
- def generate_embedded_filter(filter: str):
325
+ def generate_embedded_filter(filter: str) -> str:
325
326
  """
326
327
  Converts the filter expression to a filter expression which can be used by a Power BI embedded URL.
327
328
 
@@ -390,7 +391,7 @@ def save_as_delta_table(
390
391
  dataframe,
391
392
  delta_table_name: str,
392
393
  write_mode: str,
393
- merge_schema: Optional[bool] = False,
394
+ merge_schema: bool = False,
394
395
  schema: Optional[dict] = None,
395
396
  lakehouse: Optional[str] = None,
396
397
  workspace: Optional[str] = None,
@@ -869,7 +870,7 @@ def lro(
869
870
  response,
870
871
  status_codes: Optional[List[str]] = [200, 202],
871
872
  sleep_time: Optional[int] = 1,
872
- return_status_code: Optional[bool] = False,
873
+ return_status_code: bool = False,
873
874
  ):
874
875
 
875
876
  if response.status_code not in status_codes:
@@ -922,6 +923,19 @@ def pagination(client, response):
922
923
 
923
924
 
924
925
  def resolve_deployment_pipeline_id(deployment_pipeline: str) -> UUID:
926
+ """
927
+ Obtains the Id for a given deployment pipeline.
928
+
929
+ Parameters
930
+ ----------
931
+ deployment_pipeline : str
932
+ The deployment pipeline name
933
+
934
+ Returns
935
+ -------
936
+ UUID
937
+ The deployment pipeline Id.
938
+ """
925
939
 
926
940
  from sempy_labs._deployment_pipelines import list_deployment_pipelines
927
941
 
@@ -943,7 +957,7 @@ class FabricTokenCredential(TokenCredential):
943
957
  scopes: str,
944
958
  claims: Optional[str] = None,
945
959
  tenant_id: Optional[str] = None,
946
- enable_cae: Optional[bool] = False,
960
+ enable_cae: bool = False,
947
961
  **kwargs: any,
948
962
  ) -> AccessToken:
949
963
 
@@ -968,15 +982,26 @@ def get_adls_client(account_name):
968
982
  return service_client
969
983
 
970
984
 
971
- def resolve_warehouse_id(warehouse: str, workspace: Optional[str]):
985
+ def resolve_warehouse_id(warehouse: str, workspace: Optional[str]) -> UUID:
986
+ """
987
+ Obtains the Id for a given warehouse.
988
+
989
+ Parameters
990
+ ----------
991
+ warehouse : str
992
+ The warehouse name
993
+
994
+ Returns
995
+ -------
996
+ UUID
997
+ The warehouse Id.
998
+ """
972
999
 
973
1000
  workspace = fabric.resolve_workspace_name(workspace)
974
- warehouse_id = fabric.resolve_item_id(
1001
+ return fabric.resolve_item_id(
975
1002
  item_name=warehouse, type="Warehouse", workspace=workspace
976
1003
  )
977
1004
 
978
- return warehouse_id
979
-
980
1005
 
981
1006
  def get_language_codes(languages: str | List[str]):
982
1007
 
@@ -1024,6 +1049,7 @@ def get_azure_token_credentials(
1024
1049
 
1025
1050
  def convert_to_alphanumeric_lowercase(input_string):
1026
1051
 
1052
+ # Removes non-alphanumeric characters
1027
1053
  cleaned_string = re.sub(r"[^a-zA-Z0-9]", "", input_string)
1028
1054
  cleaned_string = cleaned_string.lower()
1029
1055
 
@@ -1038,16 +1064,81 @@ def resolve_environment_id(environment: str, workspace: Optional[str] = None) ->
1038
1064
  ----------
1039
1065
  environment: str
1040
1066
  Name of the environment.
1067
+
1068
+ Returns
1069
+ -------
1070
+ UUID
1071
+ The environment Id.
1041
1072
  """
1042
- from sempy_labs._environments import list_environments
1043
1073
 
1044
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
1074
+ workspace = fabric.resolve_workspace_name(workspace)
1075
+ return fabric.resolve_item_id(
1076
+ item_name=environment, type="Environment", workspace=workspace
1077
+ )
1045
1078
 
1046
- dfE = list_environments(workspace=workspace)
1047
- dfE_filt = dfE[dfE["Environment Name"] == environment]
1048
- if len(dfE_filt) == 0:
1049
- raise ValueError(
1050
- f"{icons.red_dot} The '{environment}' environment does not exist within the '{workspace}' workspace."
1051
- )
1052
1079
 
1053
- return dfE_filt["Environment Id"].iloc[0]
1080
+ def make_clickable(val):
1081
+
1082
+ return f'<a target="_blank" href="{val}">{val}</a>'
1083
+
1084
+
1085
+ def convert_to_friendly_case(text: str) -> str:
1086
+ """
1087
+ Converts a string of pascal/camel/snake case to business-friendly case.
1088
+
1089
+ Parameters
1090
+ ----------
1091
+ text : str
1092
+ The text to convert.
1093
+
1094
+ Returns
1095
+ -------
1096
+ str
1097
+ Text converted into a business-friendly text.
1098
+ """
1099
+ if text is not None:
1100
+ text = text.replace("_", " ")
1101
+ # Insert space before each capital letter, avoiding double spaces
1102
+ text = re.sub(r"(?<!\s)(?=[A-Z])", " ", text)
1103
+ # Strip leading/trailing whitespace and capitalize the first letter of each word
1104
+ text = text.strip().title()
1105
+
1106
+ return text
1107
+
1108
+
1109
+ def resolve_notebook_id(notebook: str, workspace: Optional[str] = None) -> UUID:
1110
+ """
1111
+ Obtains the notebook Id for a given notebook.
1112
+
1113
+ Parameters
1114
+ ----------
1115
+ notebook: str
1116
+ Name of the notebook.
1117
+
1118
+ Returns
1119
+ -------
1120
+ UUID
1121
+ The notebook Id.
1122
+ """
1123
+
1124
+ workspace = fabric.resolve_workspace_name(workspace)
1125
+ return fabric.resolve_item_id(
1126
+ item_name=notebook, type="Notebook", workspace=workspace
1127
+ )
1128
+
1129
+
1130
+ def generate_guid():
1131
+
1132
+ return str(uuid.uuid4())
1133
+
1134
+
1135
+ def get_max_run_id(lakehouse: str, table_name: str) -> int:
1136
+
1137
+ from pyspark.sql import SparkSession
1138
+
1139
+ spark = SparkSession.builder.getOrCreate()
1140
+ query = f"SELECT MAX(RunId) FROM {lakehouse}.{table_name}"
1141
+ dfSpark = spark.sql(query)
1142
+ max_run_id = dfSpark.collect()[0][0]
1143
+
1144
+ return max_run_id