semantic-link-labs 0.7.4__py3-none-any.whl → 0.8.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (59) hide show
  1. {semantic_link_labs-0.7.4.dist-info → semantic_link_labs-0.8.1.dist-info}/METADATA +43 -7
  2. {semantic_link_labs-0.7.4.dist-info → semantic_link_labs-0.8.1.dist-info}/RECORD +59 -40
  3. {semantic_link_labs-0.7.4.dist-info → semantic_link_labs-0.8.1.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +116 -58
  5. sempy_labs/_ai.py +0 -2
  6. sempy_labs/_capacities.py +39 -3
  7. sempy_labs/_capacity_migration.py +623 -0
  8. sempy_labs/_clear_cache.py +8 -8
  9. sempy_labs/_connections.py +15 -13
  10. sempy_labs/_data_pipelines.py +118 -0
  11. sempy_labs/_documentation.py +144 -0
  12. sempy_labs/_eventhouses.py +118 -0
  13. sempy_labs/_eventstreams.py +118 -0
  14. sempy_labs/_generate_semantic_model.py +3 -3
  15. sempy_labs/_git.py +23 -24
  16. sempy_labs/_helper_functions.py +140 -47
  17. sempy_labs/_icons.py +40 -0
  18. sempy_labs/_kql_databases.py +134 -0
  19. sempy_labs/_kql_querysets.py +124 -0
  20. sempy_labs/_list_functions.py +218 -421
  21. sempy_labs/_mirrored_warehouses.py +50 -0
  22. sempy_labs/_ml_experiments.py +122 -0
  23. sempy_labs/_ml_models.py +120 -0
  24. sempy_labs/_model_auto_build.py +0 -4
  25. sempy_labs/_model_bpa.py +10 -12
  26. sempy_labs/_model_bpa_bulk.py +8 -7
  27. sempy_labs/_model_dependencies.py +26 -18
  28. sempy_labs/_notebooks.py +5 -16
  29. sempy_labs/_query_scale_out.py +6 -5
  30. sempy_labs/_refresh_semantic_model.py +7 -19
  31. sempy_labs/_spark.py +40 -45
  32. sempy_labs/_sql.py +60 -15
  33. sempy_labs/_vertipaq.py +25 -25
  34. sempy_labs/_warehouses.py +132 -0
  35. sempy_labs/_workspaces.py +0 -3
  36. sempy_labs/admin/__init__.py +53 -0
  37. sempy_labs/admin/_basic_functions.py +888 -0
  38. sempy_labs/admin/_domains.py +411 -0
  39. sempy_labs/directlake/_directlake_schema_sync.py +1 -1
  40. sempy_labs/directlake/_dl_helper.py +32 -16
  41. sempy_labs/directlake/_generate_shared_expression.py +11 -14
  42. sempy_labs/directlake/_guardrails.py +7 -7
  43. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +14 -24
  44. sempy_labs/directlake/_update_directlake_partition_entity.py +1 -1
  45. sempy_labs/directlake/_warm_cache.py +1 -1
  46. sempy_labs/lakehouse/_get_lakehouse_tables.py +3 -3
  47. sempy_labs/lakehouse/_lakehouse.py +3 -2
  48. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +5 -0
  49. sempy_labs/report/__init__.py +9 -6
  50. sempy_labs/report/_generate_report.py +1 -1
  51. sempy_labs/report/_report_bpa.py +369 -0
  52. sempy_labs/report/_report_bpa_rules.py +113 -0
  53. sempy_labs/report/_report_helper.py +254 -0
  54. sempy_labs/report/_report_list_functions.py +95 -0
  55. sempy_labs/report/_report_rebind.py +0 -4
  56. sempy_labs/report/_reportwrapper.py +2037 -0
  57. sempy_labs/tom/_model.py +333 -22
  58. {semantic_link_labs-0.7.4.dist-info → semantic_link_labs-0.8.1.dist-info}/LICENSE +0 -0
  59. {semantic_link_labs-0.7.4.dist-info → semantic_link_labs-0.8.1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,50 @@
1
+ import sempy.fabric as fabric
2
+ import pandas as pd
3
+ from typing import Optional
4
+ from sempy_labs._helper_functions import (
5
+ resolve_workspace_name_and_id,
6
+ pagination,
7
+ )
8
+ from sempy.fabric.exceptions import FabricHTTPException
9
+
10
+
11
+ def list_mirrored_warehouses(workspace: Optional[str] = None) -> pd.DataFrame:
12
+ """
13
+ Shows the mirrored warehouses within a workspace.
14
+
15
+ Parameters
16
+ ----------
17
+ workspace : str, default=None
18
+ The Fabric workspace name.
19
+ Defaults to None which resolves to the workspace of the attached lakehouse
20
+ or if no lakehouse attached, resolves to the workspace of the notebook.
21
+
22
+ Returns
23
+ -------
24
+ pandas.DataFrame
25
+ A pandas dataframe showing the mirrored warehouses within a workspace.
26
+ """
27
+
28
+ df = pd.DataFrame(
29
+ columns=["Mirrored Warehouse Name", "Mirrored Warehouse Id", "Description"]
30
+ )
31
+
32
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
33
+
34
+ client = fabric.FabricRestClient()
35
+ response = client.get(f"/v1/workspaces/{workspace_id}/mirroredWarehouses")
36
+ if response.status_code != 200:
37
+ raise FabricHTTPException(response)
38
+ responses = pagination(client, response)
39
+
40
+ for r in responses:
41
+ for v in r.get("value", []):
42
+
43
+ new_data = {
44
+ "Mirrored Warehouse Name": v.get("displayName"),
45
+ "Mirrored Warehouse Id": v.get("id"),
46
+ "Description": v.get("description"),
47
+ }
48
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
49
+
50
+ return df
@@ -0,0 +1,122 @@
1
+ import sempy.fabric as fabric
2
+ import pandas as pd
3
+ import sempy_labs._icons as icons
4
+ from typing import Optional
5
+ from sempy_labs._helper_functions import (
6
+ resolve_workspace_name_and_id,
7
+ lro,
8
+ pagination,
9
+ )
10
+ from sempy.fabric.exceptions import FabricHTTPException
11
+
12
+
13
+ def list_ml_experiments(workspace: Optional[str] = None) -> pd.DataFrame:
14
+ """
15
+ Shows the ML experiments within a workspace.
16
+
17
+ Parameters
18
+ ----------
19
+ workspace : str, default=None
20
+ The Fabric workspace name.
21
+ Defaults to None which resolves to the workspace of the attached lakehouse
22
+ or if no lakehouse attached, resolves to the workspace of the notebook.
23
+
24
+ Returns
25
+ -------
26
+ pandas.DataFrame
27
+ A pandas dataframe showing the ML models within a workspace.
28
+ """
29
+
30
+ df = pd.DataFrame(columns=["ML Experiment Name", "ML Experiment Id", "Description"])
31
+
32
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
33
+
34
+ client = fabric.FabricRestClient()
35
+ response = client.get(f"/v1/workspaces/{workspace_id}/mlExperiments")
36
+ if response.status_code != 200:
37
+ raise FabricHTTPException(response)
38
+
39
+ responses = pagination(client, response)
40
+
41
+ for r in responses:
42
+ for v in r.get("value", []):
43
+ model_id = v.get("id")
44
+ modelName = v.get("displayName")
45
+ desc = v.get("description")
46
+
47
+ new_data = {
48
+ "ML Experiment Name": modelName,
49
+ "ML Experiment Id": model_id,
50
+ "Description": desc,
51
+ }
52
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
53
+
54
+ return df
55
+
56
+
57
+ def create_ml_experiment(
58
+ name: str, description: Optional[str] = None, workspace: Optional[str] = None
59
+ ):
60
+ """
61
+ Creates a Fabric ML experiment.
62
+
63
+ Parameters
64
+ ----------
65
+ name: str
66
+ Name of the ML experiment.
67
+ description : str, default=None
68
+ A description of the environment.
69
+ workspace : str, default=None
70
+ The Fabric workspace name.
71
+ Defaults to None which resolves to the workspace of the attached lakehouse
72
+ or if no lakehouse attached, resolves to the workspace of the notebook.
73
+ """
74
+
75
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
76
+
77
+ request_body = {"displayName": name}
78
+
79
+ if description:
80
+ request_body["description"] = description
81
+
82
+ client = fabric.FabricRestClient()
83
+ response = client.post(
84
+ f"/v1/workspaces/{workspace_id}/mlExperiments", json=request_body
85
+ )
86
+
87
+ lro(client, response, status_codes=[201, 202])
88
+
89
+ print(
90
+ f"{icons.green_dot} The '{name}' ML experiment has been created within the '{workspace}' workspace."
91
+ )
92
+
93
+
94
+ def delete_ml_experiment(name: str, workspace: Optional[str] = None):
95
+ """
96
+ Deletes a Fabric ML experiment.
97
+
98
+ Parameters
99
+ ----------
100
+ name: str
101
+ Name of the ML experiment.
102
+ workspace : str, default=None
103
+ The Fabric workspace name.
104
+ Defaults to None which resolves to the workspace of the attached lakehouse
105
+ or if no lakehouse attached, resolves to the workspace of the notebook.
106
+ """
107
+
108
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
109
+
110
+ item_id = fabric.resolve_item_id(
111
+ item_name=name, type="MLExperiment", workspace=workspace
112
+ )
113
+
114
+ client = fabric.FabricRestClient()
115
+ response = client.delete(f"/v1/workspaces/{workspace_id}/mlExperiments/{item_id}")
116
+
117
+ if response.status_code != 200:
118
+ raise FabricHTTPException(response)
119
+
120
+ print(
121
+ f"{icons.green_dot} The '{name}' ML experiment within the '{workspace}' workspace has been deleted."
122
+ )
@@ -0,0 +1,120 @@
1
+ import sempy.fabric as fabric
2
+ import pandas as pd
3
+ import sempy_labs._icons as icons
4
+ from typing import Optional
5
+ from sempy_labs._helper_functions import (
6
+ resolve_workspace_name_and_id,
7
+ lro,
8
+ pagination,
9
+ )
10
+ from sempy.fabric.exceptions import FabricHTTPException
11
+
12
+
13
+ def list_ml_models(workspace: Optional[str] = None) -> pd.DataFrame:
14
+ """
15
+ Shows the ML models within a workspace.
16
+
17
+ Parameters
18
+ ----------
19
+ workspace : str, default=None
20
+ The Fabric workspace name.
21
+ Defaults to None which resolves to the workspace of the attached lakehouse
22
+ or if no lakehouse attached, resolves to the workspace of the notebook.
23
+
24
+ Returns
25
+ -------
26
+ pandas.DataFrame
27
+ A pandas dataframe showing the ML models within a workspace.
28
+ """
29
+
30
+ df = pd.DataFrame(columns=["ML Model Name", "ML Model Id", "Description"])
31
+
32
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
33
+
34
+ client = fabric.FabricRestClient()
35
+ response = client.get(f"/v1/workspaces/{workspace_id}/mlModels")
36
+ if response.status_code != 200:
37
+ raise FabricHTTPException(response)
38
+
39
+ responses = pagination(client, response)
40
+
41
+ for r in responses:
42
+ for v in r.get("value", []):
43
+ model_id = v.get("id")
44
+ modelName = v.get("displayName")
45
+ desc = v.get("description")
46
+
47
+ new_data = {
48
+ "ML Model Name": modelName,
49
+ "ML Model Id": model_id,
50
+ "Description": desc,
51
+ }
52
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
53
+
54
+ return df
55
+
56
+
57
+ def create_ml_model(
58
+ name: str, description: Optional[str] = None, workspace: Optional[str] = None
59
+ ):
60
+ """
61
+ Creates a Fabric ML model.
62
+
63
+ Parameters
64
+ ----------
65
+ name: str
66
+ Name of the ML model.
67
+ description : str, default=None
68
+ A description of the environment.
69
+ workspace : str, default=None
70
+ The Fabric workspace name.
71
+ Defaults to None which resolves to the workspace of the attached lakehouse
72
+ or if no lakehouse attached, resolves to the workspace of the notebook.
73
+ """
74
+
75
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
76
+
77
+ request_body = {"displayName": name}
78
+
79
+ if description:
80
+ request_body["description"] = description
81
+
82
+ client = fabric.FabricRestClient()
83
+ response = client.post(f"/v1/workspaces/{workspace_id}/mlModels", json=request_body)
84
+
85
+ lro(client, response, status_codes=[201, 202])
86
+
87
+ print(
88
+ f"{icons.green_dot} The '{name}' ML model has been created within the '{workspace}' workspace."
89
+ )
90
+
91
+
92
+ def delete_ml_model(name: str, workspace: Optional[str] = None):
93
+ """
94
+ Deletes a Fabric ML model.
95
+
96
+ Parameters
97
+ ----------
98
+ name: str
99
+ Name of the ML model.
100
+ workspace : str, default=None
101
+ The Fabric workspace name.
102
+ Defaults to None which resolves to the workspace of the attached lakehouse
103
+ or if no lakehouse attached, resolves to the workspace of the notebook.
104
+ """
105
+
106
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
107
+
108
+ item_id = fabric.resolve_item_id(
109
+ item_name=name, type="MLModel", workspace=workspace
110
+ )
111
+
112
+ client = fabric.FabricRestClient()
113
+ response = client.delete(f"/v1/workspaces/{workspace_id}/mlModels/{item_id}")
114
+
115
+ if response.status_code != 200:
116
+ raise FabricHTTPException(response)
117
+
118
+ print(
119
+ f"{icons.green_dot} The '{name}' ML model within the '{workspace}' workspace has been deleted."
120
+ )
@@ -34,10 +34,6 @@ def model_auto_build(
34
34
  The Fabric workspace used by the lakehouse.
35
35
  Defaults to None which resolves to the workspace of the attached lakehouse
36
36
  or if no lakehouse attached, resolves to the workspace of the notebook.
37
-
38
- Returns
39
- -------
40
-
41
37
  """
42
38
 
43
39
  workspace = fabric.resolve_workspace_name(workspace)
sempy_labs/_model_bpa.py CHANGED
@@ -3,7 +3,6 @@ import pandas as pd
3
3
  import warnings
4
4
  import datetime
5
5
  from IPython.display import display, HTML
6
- from pyspark.sql import SparkSession
7
6
  from sempy_labs._model_dependencies import get_model_calc_dependencies
8
7
  from sempy_labs._helper_functions import (
9
8
  format_dax_object_name,
@@ -13,6 +12,7 @@ from sempy_labs._helper_functions import (
13
12
  resolve_workspace_capacity,
14
13
  resolve_dataset_id,
15
14
  get_language_codes,
15
+ get_max_run_id,
16
16
  )
17
17
  from sempy_labs.lakehouse import get_lakehouse_tables, lakehouse_attached
18
18
  from sempy_labs.tom import connect_semantic_model
@@ -30,9 +30,9 @@ def run_model_bpa(
30
30
  dataset: str,
31
31
  rules: Optional[pd.DataFrame] = None,
32
32
  workspace: Optional[str] = None,
33
- export: Optional[bool] = False,
34
- return_dataframe: Optional[bool] = False,
35
- extended: Optional[bool] = False,
33
+ export: bool = False,
34
+ return_dataframe: bool = False,
35
+ extended: bool = False,
36
36
  language: Optional[str] = None,
37
37
  **kwargs,
38
38
  ):
@@ -151,6 +151,7 @@ def run_model_bpa(
151
151
  def translate_using_spark(rule_file):
152
152
 
153
153
  from synapse.ml.services import Translate
154
+ from pyspark.sql import SparkSession
154
155
 
155
156
  rules_temp = rule_file.copy()
156
157
  rules_temp = rules_temp.drop(["Expression", "URL", "Severity"], axis=1)
@@ -346,15 +347,11 @@ def run_model_bpa(
346
347
 
347
348
  dfExport["Severity"].replace(icons.severity_mapping, inplace=True)
348
349
 
349
- spark = SparkSession.builder.getOrCreate()
350
- query = f"SELECT MAX(RunId) FROM {lakehouse}.{delta_table_name}"
351
-
352
350
  if len(lakeT_filt) == 0:
353
351
  runId = 1
354
352
  else:
355
- dfSpark = spark.sql(query)
356
- maxRunId = dfSpark.collect()[0][0]
357
- runId = maxRunId + 1
353
+ max_run_id = get_max_run_id(table_name=delta_table_name)
354
+ runId = max_run_id + 1
358
355
 
359
356
  now = datetime.datetime.now()
360
357
  dfD = fabric.list_datasets(workspace=workspace, mode="rest")
@@ -506,7 +503,7 @@ def run_model_bpa(
506
503
  content_html += f'<td>{row["Object Type"]}</td>'
507
504
  content_html += f'<td>{row["Object Name"]}</td>'
508
505
  content_html += f'<td style="text-align: center;">{row["Severity"]}</td>'
509
- #content_html += f'<td>{row["Severity"]}</td>'
506
+ # content_html += f'<td>{row["Severity"]}</td>'
510
507
  content_html += "</tr>"
511
508
  content_html += "</table>"
512
509
 
@@ -514,4 +511,5 @@ def run_model_bpa(
514
511
  tab_html += "</div>"
515
512
 
516
513
  # Display the tabs, tab contents, and run the script
517
- return display(HTML(styles + tab_html + content_html + script))
514
+ if not export:
515
+ return display(HTML(styles + tab_html + content_html + script))
@@ -1,14 +1,17 @@
1
1
  import sempy.fabric as fabric
2
2
  import pandas as pd
3
3
  import datetime
4
- from pyspark.sql import SparkSession
5
4
  from sempy_labs._helper_functions import (
6
5
  resolve_lakehouse_name,
7
6
  save_as_delta_table,
8
7
  resolve_workspace_capacity,
9
8
  retry,
9
+ get_max_run_id,
10
+ )
11
+ from sempy_labs.lakehouse import (
12
+ get_lakehouse_tables,
13
+ lakehouse_attached,
10
14
  )
11
- from sempy_labs.lakehouse import get_lakehouse_tables, lakehouse_attached
12
15
  from sempy_labs._model_bpa import run_model_bpa
13
16
  from typing import Optional, List
14
17
  from sempy._utils._log import log
@@ -18,7 +21,7 @@ import sempy_labs._icons as icons
18
21
  @log
19
22
  def run_model_bpa_bulk(
20
23
  rules: Optional[pd.DataFrame] = None,
21
- extended: Optional[bool] = False,
24
+ extended: bool = False,
22
25
  language: Optional[str] = None,
23
26
  workspace: Optional[str | List[str]] = None,
24
27
  skip_models: Optional[str | List[str]] = ["ModelBPA", "Fabric Capacity Metrics"],
@@ -78,7 +81,6 @@ def run_model_bpa_bulk(
78
81
  ]
79
82
  now = datetime.datetime.now()
80
83
  output_table = "modelbparesults"
81
- spark = SparkSession.builder.getOrCreate()
82
84
  lakehouse_workspace = fabric.resolve_workspace_name()
83
85
  lakehouse_id = fabric.get_lakehouse_id()
84
86
  lakehouse = resolve_lakehouse_name(
@@ -90,9 +92,8 @@ def run_model_bpa_bulk(
90
92
  if len(lakeT_filt) == 0:
91
93
  runId = 1
92
94
  else:
93
- dfSpark = spark.table(f"`{lakehouse_id}`.{output_table}").select(F.max("RunId"))
94
- maxRunId = dfSpark.collect()[0][0]
95
- runId = maxRunId + 1
95
+ max_run_id = get_max_run_id(table_name=output_table)
96
+ runId = max_run_id + 1
96
97
 
97
98
  if isinstance(workspace, str):
98
99
  workspace = [workspace]
@@ -74,7 +74,7 @@ def get_measure_dependencies(
74
74
 
75
75
  for index, dependency in dep_filt.iterrows():
76
76
  d = True
77
- if dependency[5] == "Measure":
77
+ if dependency.iloc[5] == "Measure":
78
78
  d = False
79
79
  df = pd.concat(
80
80
  [
@@ -85,12 +85,14 @@ def get_measure_dependencies(
85
85
  "Table Name": r["Table Name"],
86
86
  "Object Name": r["Object Name"],
87
87
  "Object Type": r["Object Type"],
88
- "Referenced Object": dependency[4],
89
- "Referenced Table": dependency[3],
90
- "Referenced Object Type": dependency[5],
88
+ "Referenced Object": dependency.iloc[4],
89
+ "Referenced Table": dependency.iloc[3],
90
+ "Referenced Object Type": dependency.iloc[
91
+ 5
92
+ ],
91
93
  "Done": d,
92
94
  "Full Object Name": r["Full Object Name"],
93
- "Referenced Full Object Name": dependency[
95
+ "Referenced Full Object Name": dependency.iloc[
94
96
  7
95
97
  ],
96
98
  "Parent Node": rObj,
@@ -110,12 +112,14 @@ def get_measure_dependencies(
110
112
  "Table Name": r["Table Name"],
111
113
  "Object Name": r["Object Name"],
112
114
  "Object Type": r["Object Type"],
113
- "Referenced Object": dependency[4],
114
- "Referenced Table": dependency[3],
115
- "Referenced Object Type": dependency[5],
115
+ "Referenced Object": dependency.iloc[4],
116
+ "Referenced Table": dependency.iloc[3],
117
+ "Referenced Object Type": dependency.iloc[
118
+ 5
119
+ ],
116
120
  "Done": d,
117
121
  "Full Object Name": r["Full Object Name"],
118
- "Referenced Full Object Name": dependency[
122
+ "Referenced Full Object Name": dependency.iloc[
119
123
  7
120
124
  ],
121
125
  "Parent Node": rObj,
@@ -203,7 +207,7 @@ def get_model_calc_dependencies(
203
207
 
204
208
  for index, dependency in dep_filt.iterrows():
205
209
  d = True
206
- if dependency[5] in objs:
210
+ if dependency.iloc[5] in objs:
207
211
  d = False
208
212
  df = pd.concat(
209
213
  [
@@ -214,12 +218,14 @@ def get_model_calc_dependencies(
214
218
  "Table Name": r["Table Name"],
215
219
  "Object Name": r["Object Name"],
216
220
  "Object Type": r["Object Type"],
217
- "Referenced Object": dependency[4],
218
- "Referenced Table": dependency[3],
219
- "Referenced Object Type": dependency[5],
221
+ "Referenced Object": dependency.iloc[4],
222
+ "Referenced Table": dependency.iloc[3],
223
+ "Referenced Object Type": dependency.iloc[
224
+ 5
225
+ ],
220
226
  "Done": d,
221
227
  "Full Object Name": r["Full Object Name"],
222
- "Referenced Full Object Name": dependency[
228
+ "Referenced Full Object Name": dependency.iloc[
223
229
  7
224
230
  ],
225
231
  "Parent Node": rObj,
@@ -239,12 +245,14 @@ def get_model_calc_dependencies(
239
245
  "Table Name": r["Table Name"],
240
246
  "Object Name": r["Object Name"],
241
247
  "Object Type": r["Object Type"],
242
- "Referenced Object": dependency[5],
243
- "Referenced Table": dependency[4],
244
- "Referenced Object Type": dependency[6],
248
+ "Referenced Object": dependency.iloc[5],
249
+ "Referenced Table": dependency.iloc[4],
250
+ "Referenced Object Type": dependency.iloc[
251
+ 6
252
+ ],
245
253
  "Done": d,
246
254
  "Full Object Name": r["Full Object Name"],
247
- "Referenced Full Object Name": dependency[
255
+ "Referenced Full Object Name": dependency.iloc[
248
256
  7
249
257
  ],
250
258
  "Parent Node": rObj,
sempy_labs/_notebooks.py CHANGED
@@ -8,13 +8,14 @@ from sempy_labs._helper_functions import (
8
8
  resolve_workspace_name_and_id,
9
9
  lro,
10
10
  _decode_b64,
11
+ resolve_notebook_id,
11
12
  )
12
13
  from sempy.fabric.exceptions import FabricHTTPException
13
14
 
14
15
 
15
16
  def get_notebook_definition(
16
- notebook_name: str, workspace: Optional[str] = None, decode: Optional[bool] = True
17
- ):
17
+ notebook_name: str, workspace: Optional[str] = None, decode: bool = True
18
+ ) -> str:
18
19
  """
19
20
  Obtains the notebook definition.
20
21
 
@@ -32,21 +33,12 @@ def get_notebook_definition(
32
33
 
33
34
  Returns
34
35
  -------
35
- ipynb
36
+ str
36
37
  The notebook definition.
37
38
  """
38
39
 
39
40
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
40
-
41
- dfI = fabric.list_items(workspace=workspace, type="Notebook")
42
- dfI_filt = dfI[dfI["Display Name"] == notebook_name]
43
-
44
- if len(dfI_filt) == 0:
45
- raise ValueError(
46
- f"{icons.red_dot} The '{notebook_name}' notebook does not exist within the '{workspace}' workspace."
47
- )
48
-
49
- notebook_id = dfI_filt["Id"].iloc[0]
41
+ notebook_id = resolve_notebook_id(notebook=notebook_name, workspace=workspace)
50
42
  client = fabric.FabricRestClient()
51
43
  response = client.post(
52
44
  f"v1/workspaces/{workspace_id}/notebooks/{notebook_id}/getDefinition",
@@ -90,9 +82,6 @@ def import_notebook_from_web(
90
82
  The name of the workspace.
91
83
  Defaults to None which resolves to the workspace of the attached lakehouse
92
84
  or if no lakehouse attached, resolves to the workspace of the notebook.
93
-
94
- Returns
95
- -------
96
85
  """
97
86
 
98
87
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
@@ -181,8 +181,8 @@ def disable_qso(dataset: str, workspace: Optional[str] = None) -> pd.DataFrame:
181
181
 
182
182
  def set_qso(
183
183
  dataset: str,
184
- auto_sync: Optional[bool] = True,
185
- max_read_only_replicas: Optional[int] = -1,
184
+ auto_sync: bool = True,
185
+ max_read_only_replicas: int = -1,
186
186
  workspace: Optional[str] = None,
187
187
  ) -> pd.DataFrame:
188
188
  """
@@ -339,7 +339,6 @@ def list_qso_settings(
339
339
  if dataset is not None:
340
340
  dataset_id = resolve_dataset_id(dataset, workspace)
341
341
 
342
- workspace_id = fabric.get_workspace_id()
343
342
  df = pd.DataFrame(
344
343
  columns=[
345
344
  "Dataset Id",
@@ -413,10 +412,12 @@ def set_workspace_default_storage_format(
413
412
  dfW = fabric.list_workspaces(filter=f"name eq '{workspace}'")
414
413
  if len(dfW) == 0:
415
414
  raise ValueError()
416
- current_storage_format = dfW['Default Dataset Storage Format'].iloc[0]
415
+ current_storage_format = dfW["Default Dataset Storage Format"].iloc[0]
417
416
 
418
417
  if current_storage_format == storage_format:
419
- print(f"{icons.info} The '{workspace}' is already set to a default storage format of '{current_storage_format}'.")
418
+ print(
419
+ f"{icons.info} The '{workspace}' is already set to a default storage format of '{current_storage_format}'."
420
+ )
420
421
  return
421
422
 
422
423
  request_body = {
@@ -13,10 +13,10 @@ def refresh_semantic_model(
13
13
  dataset: str,
14
14
  tables: Optional[Union[str, List[str]]] = None,
15
15
  partitions: Optional[Union[str, List[str]]] = None,
16
- refresh_type: Optional[str] = None,
17
- retry_count: Optional[int] = 0,
18
- apply_refresh_policy: Optional[bool] = True,
19
- max_parallelism: Optional[int] = 10,
16
+ refresh_type: str = "full",
17
+ retry_count: int = 0,
18
+ apply_refresh_policy: bool = True,
19
+ max_parallelism: int = 10,
20
20
  workspace: Optional[str] = None,
21
21
  ):
22
22
  """
@@ -30,7 +30,7 @@ def refresh_semantic_model(
30
30
  A string or a list of tables to refresh.
31
31
  partitions: str, List[str], default=None
32
32
  A string or a list of partitions to refresh. Partitions must be formatted as such: 'Table Name'[Partition Name].
33
- refresh_type : str, default='full'
33
+ refresh_type : str, default="full"
34
34
  The type of processing to perform. Types align with the TMSL refresh command types: full, clearValues, calculate, dataOnly, automatic, and defragment. The add type isn't supported. Defaults to "full".
35
35
  retry_count : int, default=0
36
36
  Number of times the operation retries before failing.
@@ -48,9 +48,6 @@ def refresh_semantic_model(
48
48
 
49
49
  workspace = fabric.resolve_workspace_name(workspace)
50
50
 
51
- if refresh_type is None:
52
- refresh_type = "full"
53
-
54
51
  if isinstance(tables, str):
55
52
  tables = [tables]
56
53
  if isinstance(partitions, str):
@@ -74,18 +71,9 @@ def refresh_semantic_model(
74
71
  refresh_type.lower().replace("only", "Only").replace("values", "Values")
75
72
  )
76
73
 
77
- refreshTypes = [
78
- "full",
79
- "automatic",
80
- "dataOnly",
81
- "calculate",
82
- "clearValues",
83
- "defragment",
84
- ]
85
-
86
- if refresh_type not in refreshTypes:
74
+ if refresh_type not in icons.refreshTypes:
87
75
  raise ValueError(
88
- f"{icons.red_dot} Invalid refresh type. Refresh type must be one of these values: {refreshTypes}."
76
+ f"{icons.red_dot} Invalid refresh type. Refresh type must be one of these values: {icons.refreshTypes}."
89
77
  )
90
78
 
91
79
  if len(objects) == 0: