semantic-link-labs 0.9.1__py3-none-any.whl → 0.9.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (87) hide show
  1. {semantic_link_labs-0.9.1.dist-info → semantic_link_labs-0.9.3.dist-info}/METADATA +67 -8
  2. {semantic_link_labs-0.9.1.dist-info → semantic_link_labs-0.9.3.dist-info}/RECORD +87 -80
  3. sempy_labs/__init__.py +14 -12
  4. sempy_labs/_ai.py +8 -5
  5. sempy_labs/_capacities.py +120 -142
  6. sempy_labs/_capacity_migration.py +61 -94
  7. sempy_labs/_clear_cache.py +9 -8
  8. sempy_labs/_connections.py +107 -104
  9. sempy_labs/_data_pipelines.py +47 -49
  10. sempy_labs/_dataflows.py +45 -51
  11. sempy_labs/_dax.py +228 -6
  12. sempy_labs/_delta_analyzer.py +321 -0
  13. sempy_labs/_deployment_pipelines.py +72 -66
  14. sempy_labs/_environments.py +39 -36
  15. sempy_labs/_eventhouses.py +35 -35
  16. sempy_labs/_eventstreams.py +38 -39
  17. sempy_labs/_external_data_shares.py +29 -42
  18. sempy_labs/_gateways.py +103 -99
  19. sempy_labs/_generate_semantic_model.py +22 -30
  20. sempy_labs/_git.py +46 -66
  21. sempy_labs/_graphQL.py +95 -0
  22. sempy_labs/_helper_functions.py +227 -36
  23. sempy_labs/_job_scheduler.py +47 -59
  24. sempy_labs/_kql_databases.py +27 -34
  25. sempy_labs/_kql_querysets.py +23 -30
  26. sempy_labs/_list_functions.py +264 -167
  27. sempy_labs/_managed_private_endpoints.py +52 -47
  28. sempy_labs/_mirrored_databases.py +110 -134
  29. sempy_labs/_mirrored_warehouses.py +13 -13
  30. sempy_labs/_ml_experiments.py +36 -36
  31. sempy_labs/_ml_models.py +37 -38
  32. sempy_labs/_model_bpa.py +2 -2
  33. sempy_labs/_model_bpa_rules.py +8 -6
  34. sempy_labs/_model_dependencies.py +2 -0
  35. sempy_labs/_notebooks.py +28 -29
  36. sempy_labs/_one_lake_integration.py +2 -0
  37. sempy_labs/_query_scale_out.py +63 -81
  38. sempy_labs/_refresh_semantic_model.py +12 -14
  39. sempy_labs/_spark.py +54 -79
  40. sempy_labs/_sql.py +7 -11
  41. sempy_labs/_translations.py +2 -2
  42. sempy_labs/_vertipaq.py +11 -6
  43. sempy_labs/_warehouses.py +30 -33
  44. sempy_labs/_workloads.py +15 -20
  45. sempy_labs/_workspace_identity.py +13 -17
  46. sempy_labs/_workspaces.py +49 -48
  47. sempy_labs/admin/__init__.py +2 -0
  48. sempy_labs/admin/_basic_functions.py +244 -281
  49. sempy_labs/admin/_domains.py +186 -103
  50. sempy_labs/admin/_external_data_share.py +26 -31
  51. sempy_labs/admin/_git.py +17 -22
  52. sempy_labs/admin/_items.py +34 -48
  53. sempy_labs/admin/_scanner.py +61 -49
  54. sempy_labs/directlake/_directlake_schema_compare.py +2 -0
  55. sempy_labs/directlake/_dl_helper.py +10 -11
  56. sempy_labs/directlake/_generate_shared_expression.py +4 -5
  57. sempy_labs/directlake/_get_directlake_lakehouse.py +1 -0
  58. sempy_labs/directlake/_list_directlake_model_calc_tables.py +1 -0
  59. sempy_labs/directlake/_show_unsupported_directlake_objects.py +2 -0
  60. sempy_labs/directlake/_warm_cache.py +2 -0
  61. sempy_labs/graph/__init__.py +33 -0
  62. sempy_labs/graph/_groups.py +402 -0
  63. sempy_labs/graph/_teams.py +113 -0
  64. sempy_labs/graph/_users.py +191 -0
  65. sempy_labs/lakehouse/__init__.py +4 -0
  66. sempy_labs/lakehouse/_get_lakehouse_columns.py +12 -12
  67. sempy_labs/lakehouse/_get_lakehouse_tables.py +16 -22
  68. sempy_labs/lakehouse/_lakehouse.py +104 -7
  69. sempy_labs/lakehouse/_shortcuts.py +42 -20
  70. sempy_labs/migration/__init__.py +4 -0
  71. sempy_labs/migration/_direct_lake_to_import.py +66 -0
  72. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +3 -2
  73. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +1 -0
  74. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +1 -0
  75. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +2 -0
  76. sempy_labs/migration/_refresh_calc_tables.py +2 -2
  77. sempy_labs/report/_download_report.py +8 -13
  78. sempy_labs/report/_generate_report.py +49 -46
  79. sempy_labs/report/_paginated.py +20 -26
  80. sempy_labs/report/_report_functions.py +52 -47
  81. sempy_labs/report/_report_list_functions.py +2 -0
  82. sempy_labs/report/_report_rebind.py +6 -10
  83. sempy_labs/report/_reportwrapper.py +187 -220
  84. sempy_labs/tom/_model.py +12 -6
  85. {semantic_link_labs-0.9.1.dist-info → semantic_link_labs-0.9.3.dist-info}/LICENSE +0 -0
  86. {semantic_link_labs-0.9.1.dist-info → semantic_link_labs-0.9.3.dist-info}/WHEEL +0 -0
  87. {semantic_link_labs-0.9.1.dist-info → semantic_link_labs-0.9.3.dist-info}/top_level.txt +0 -0
@@ -1,13 +1,13 @@
1
1
  import sempy.fabric as fabric
2
2
  import pandas as pd
3
- import sempy_labs._icons as icons
4
3
  from typing import Optional
5
4
  from sempy_labs._helper_functions import (
6
5
  resolve_workspace_name_and_id,
7
- lro,
8
- pagination,
6
+ _base_api,
7
+ _print_success,
8
+ resolve_item_id,
9
+ _create_dataframe,
9
10
  )
10
- from sempy.fabric.exceptions import FabricHTTPException
11
11
  from uuid import UUID
12
12
 
13
13
 
@@ -30,16 +30,20 @@ def list_ml_experiments(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
30
30
  A pandas dataframe showing the ML models within a workspace.
31
31
  """
32
32
 
33
- df = pd.DataFrame(columns=["ML Experiment Name", "ML Experiment Id", "Description"])
33
+ columns = {
34
+ "ML Experiment Name": "string",
35
+ "ML Experiment Id": "string",
36
+ "Description": "string",
37
+ }
38
+ df = _create_dataframe(columns=columns)
34
39
 
35
40
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
36
41
 
37
- client = fabric.FabricRestClient()
38
- response = client.get(f"/v1/workspaces/{workspace_id}/mlExperiments")
39
- if response.status_code != 200:
40
- raise FabricHTTPException(response)
41
-
42
- responses = pagination(client, response)
42
+ responses = _base_api(
43
+ request=f"/v1/workspaces/{workspace_id}/mlExperiments",
44
+ status_codes=200,
45
+ uses_pagination=True,
46
+ )
43
47
 
44
48
  for r in responses:
45
49
  for v in r.get("value", []):
@@ -79,20 +83,23 @@ def create_ml_experiment(
79
83
 
80
84
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
81
85
 
82
- request_body = {"displayName": name}
86
+ payload = {"displayName": name}
83
87
 
84
88
  if description:
85
- request_body["description"] = description
86
-
87
- client = fabric.FabricRestClient()
88
- response = client.post(
89
- f"/v1/workspaces/{workspace_id}/mlExperiments", json=request_body
89
+ payload["description"] = description
90
+
91
+ _base_api(
92
+ request=f"/v1/workspaces/{workspace_id}/mlExperiments",
93
+ method="post",
94
+ payload=payload,
95
+ status_codes=[201, 202],
96
+ lro_return_status_code=True,
90
97
  )
91
-
92
- lro(client, response, status_codes=[201, 202])
93
-
94
- print(
95
- f"{icons.green_dot} The '{name}' ML experiment has been created within the '{workspace_name}' workspace."
98
+ _print_success(
99
+ item_name=name,
100
+ item_type="ML experiment",
101
+ workspace_name=workspace_name,
102
+ action="created",
96
103
  )
97
104
 
98
105
 
@@ -112,18 +119,11 @@ def delete_ml_experiment(name: str, workspace: Optional[str | UUID] = None):
112
119
  or if no lakehouse attached, resolves to the workspace of the notebook.
113
120
  """
114
121
 
115
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
116
-
117
- item_id = fabric.resolve_item_id(
118
- item_name=name, type="MLExperiment", workspace=workspace_id
119
- )
120
-
121
- client = fabric.FabricRestClient()
122
- response = client.delete(f"/v1/workspaces/{workspace_id}/mlExperiments/{item_id}")
123
-
124
- if response.status_code != 200:
125
- raise FabricHTTPException(response)
126
-
127
- print(
128
- f"{icons.green_dot} The '{name}' ML experiment within the '{workspace_name}' workspace has been deleted."
122
+ item_id = resolve_item_id(item=name, type="MLExperiment", workspace=workspace)
123
+ fabric.delete_item(item_id=item_id, workspace=workspace)
124
+ _print_success(
125
+ item_name=name,
126
+ item_type="ML Experiment",
127
+ workspace_name=workspace,
128
+ action="deleted",
129
129
  )
sempy_labs/_ml_models.py CHANGED
@@ -1,13 +1,13 @@
1
1
  import sempy.fabric as fabric
2
2
  import pandas as pd
3
- import sempy_labs._icons as icons
4
3
  from typing import Optional
5
4
  from sempy_labs._helper_functions import (
6
5
  resolve_workspace_name_and_id,
7
- lro,
8
- pagination,
6
+ _base_api,
7
+ resolve_item_id,
8
+ _print_success,
9
+ _create_dataframe,
9
10
  )
10
- from sempy.fabric.exceptions import FabricHTTPException
11
11
  from uuid import UUID
12
12
 
13
13
 
@@ -30,16 +30,20 @@ def list_ml_models(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
30
30
  A pandas dataframe showing the ML models within a workspace.
31
31
  """
32
32
 
33
- df = pd.DataFrame(columns=["ML Model Name", "ML Model Id", "Description"])
33
+ columns = {
34
+ "ML Model Name": "string",
35
+ "ML Model Id": "string",
36
+ "Description": "string",
37
+ }
38
+ df = _create_dataframe(columns=columns)
34
39
 
35
40
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
36
41
 
37
- client = fabric.FabricRestClient()
38
- response = client.get(f"/v1/workspaces/{workspace_id}/mlModels")
39
- if response.status_code != 200:
40
- raise FabricHTTPException(response)
41
-
42
- responses = pagination(client, response)
42
+ responses = _base_api(
43
+ request=f"/v1/workspaces/{workspace_id}/mlModels",
44
+ status_codes=200,
45
+ uses_pagination=True,
46
+ )
43
47
 
44
48
  for r in responses:
45
49
  for v in r.get("value", []):
@@ -79,22 +83,27 @@ def create_ml_model(
79
83
 
80
84
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
81
85
 
82
- request_body = {"displayName": name}
86
+ payload = {"displayName": name}
83
87
 
84
88
  if description:
85
- request_body["description"] = description
86
-
87
- client = fabric.FabricRestClient()
88
- response = client.post(f"/v1/workspaces/{workspace_id}/mlModels", json=request_body)
89
-
90
- lro(client, response, status_codes=[201, 202])
91
-
92
- print(
93
- f"{icons.green_dot} The '{name}' ML model has been created within the '{workspace_name}' workspace."
89
+ payload["description"] = description
90
+
91
+ _base_api(
92
+ request=f"/v1/workspaces/{workspace_id}/mlModels",
93
+ method="post",
94
+ status_codes=[201, 202],
95
+ payload=payload,
96
+ lro_return_status_code=True,
97
+ )
98
+ _print_success(
99
+ item_name=name,
100
+ item_type="ML Model",
101
+ workspace_name=workspace_name,
102
+ action="created",
94
103
  )
95
104
 
96
105
 
97
- def delete_ml_model(name: str, workspace: Optional[str | UUID] = None):
106
+ def delete_ml_model(name: str | UUID, workspace: Optional[str | UUID] = None):
98
107
  """
99
108
  Deletes a Fabric ML model.
100
109
 
@@ -102,26 +111,16 @@ def delete_ml_model(name: str, workspace: Optional[str | UUID] = None):
102
111
 
103
112
  Parameters
104
113
  ----------
105
- name: str
106
- Name of the ML model.
114
+ name: str | uuid.UUID
115
+ Name or ID of the ML model.
107
116
  workspace : str | uuid.UUID, default=None
108
117
  The Fabric workspace name or ID.
109
118
  Defaults to None which resolves to the workspace of the attached lakehouse
110
119
  or if no lakehouse attached, resolves to the workspace of the notebook.
111
120
  """
112
121
 
113
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
114
-
115
- item_id = fabric.resolve_item_id(
116
- item_name=name, type="MLModel", workspace=workspace
117
- )
118
-
119
- client = fabric.FabricRestClient()
120
- response = client.delete(f"/v1/workspaces/{workspace_id}/mlModels/{item_id}")
121
-
122
- if response.status_code != 200:
123
- raise FabricHTTPException(response)
124
-
125
- print(
126
- f"{icons.green_dot} The '{name}' ML model within the '{workspace_name}' workspace has been deleted."
122
+ item_id = resolve_item_id(item=name, type="MLModel", workspace=workspace)
123
+ fabric.delete_item(item_id=item_id, workspace=workspace)
124
+ _print_success(
125
+ item_name=name, item_type="ML Model", workspace_name=workspace, action="deleted"
127
126
  )
sempy_labs/_model_bpa.py CHANGED
@@ -14,6 +14,7 @@ from sempy_labs._helper_functions import (
14
14
  get_language_codes,
15
15
  _get_column_aggregate,
16
16
  resolve_workspace_name_and_id,
17
+ _create_spark_session,
17
18
  )
18
19
  from sempy_labs.lakehouse import get_lakehouse_tables, lakehouse_attached
19
20
  from sempy_labs.tom import connect_semantic_model
@@ -181,7 +182,6 @@ def run_model_bpa(
181
182
  def translate_using_spark(rule_file):
182
183
 
183
184
  from synapse.ml.services import Translate
184
- from pyspark.sql import SparkSession
185
185
 
186
186
  rules_temp = rule_file.copy()
187
187
  rules_temp = rules_temp.drop(["Expression", "URL", "Severity"], axis=1)
@@ -195,7 +195,7 @@ def run_model_bpa(
195
195
  ]
196
196
  )
197
197
 
198
- spark = SparkSession.builder.getOrCreate()
198
+ spark = _create_spark_session()
199
199
  dfRules = spark.createDataFrame(rules_temp, schema)
200
200
 
201
201
  columns = ["Category", "Rule Name", "Description"]
@@ -416,7 +416,7 @@ def model_bpa_rules(
416
416
  lambda obj, tom: any(
417
417
  re.search(
418
418
  r"USERELATIONSHIP\s*\(\s*.+?(?=])\]\s*,\s*'*"
419
- + obj.Name
419
+ + re.escape(obj.Name)
420
420
  + r"'*\[",
421
421
  m.Expression,
422
422
  flags=re.IGNORECASE,
@@ -455,7 +455,9 @@ def model_bpa_rules(
455
455
  "Warning",
456
456
  "The EVALUATEANDLOG function should not be used in production models",
457
457
  lambda obj, tom: re.search(
458
- r"evaluateandlog\s*\(", obj.Expression, flags=re.IGNORECASE
458
+ r"evaluateandlog\s*\(",
459
+ obj.Expression,
460
+ flags=re.IGNORECASE,
459
461
  ),
460
462
  "The EVALUATEANDLOG function is meant to be used only in development/test environments and should not be used in production models.",
461
463
  "https://pbidax.wordpress.com/2022/08/16/introduce-the-dax-evaluateandlog-function",
@@ -592,13 +594,13 @@ def model_bpa_rules(
592
594
  and not any(
593
595
  re.search(
594
596
  r"USERELATIONSHIP\s*\(\s*\'*"
595
- + obj.FromTable.Name
597
+ + re.escape(obj.FromTable.Name)
596
598
  + r"'*\["
597
- + obj.FromColumn.Name
599
+ + re.escape(obj.FromColumn.Name)
598
600
  + r"\]\s*,\s*'*"
599
- + obj.ToTable.Name
601
+ + re.escape(obj.ToTable.Name)
600
602
  + r"'*\["
601
- + obj.ToColumn.Name
603
+ + re.escape(obj.ToColumn.Name)
602
604
  + r"\]",
603
605
  m.Expression,
604
606
  flags=re.IGNORECASE,
@@ -287,6 +287,8 @@ def measure_dependency_tree(
287
287
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
288
288
  (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
289
289
 
290
+ fabric.refresh_tom_cache(workspace=workspace)
291
+
290
292
  dfM = fabric.list_measures(dataset=dataset_id, workspace=workspace_id)
291
293
  dfM_filt = dfM[dfM["Measure Name"] == measure_name]
292
294
 
sempy_labs/_notebooks.py CHANGED
@@ -7,8 +7,9 @@ import requests
7
7
  from sempy._utils._log import log
8
8
  from sempy_labs._helper_functions import (
9
9
  resolve_workspace_name_and_id,
10
- lro,
11
10
  _decode_b64,
11
+ _base_api,
12
+ resolve_item_id,
12
13
  )
13
14
  from sempy.fabric.exceptions import FabricHTTPException
14
15
  import os
@@ -22,15 +23,13 @@ def _get_notebook_definition_base(
22
23
  ) -> pd.DataFrame:
23
24
 
24
25
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
25
- item_id = fabric.resolve_item_id(
26
- item_name=notebook_name, type="Notebook", workspace=workspace_id
26
+ item_id = resolve_item_id(item=notebook_name, type="Notebook", workspace=workspace)
27
+ result = _base_api(
28
+ request=f"v1/workspaces/{workspace_id}/notebooks/{item_id}/getDefinition",
29
+ method="post",
30
+ lro_return_json=True,
31
+ status_codes=None,
27
32
  )
28
- client = fabric.FabricRestClient()
29
- response = client.post(
30
- f"v1/workspaces/{workspace_id}/notebooks/{item_id}/getDefinition",
31
- )
32
-
33
- result = lro(client, response).json()
34
33
 
35
34
  return pd.json_normalize(result["definition"]["parts"])
36
35
 
@@ -185,10 +184,9 @@ def create_notebook(
185
184
  """
186
185
 
187
186
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
188
- client = fabric.FabricRestClient()
189
- notebook_payload = base64.b64encode(notebook_content)
187
+ notebook_payload = base64.b64encode(notebook_content).decode("utf-8")
190
188
 
191
- request_body = {
189
+ payload = {
192
190
  "displayName": name,
193
191
  "definition": {
194
192
  "format": "ipynb",
@@ -202,11 +200,15 @@ def create_notebook(
202
200
  },
203
201
  }
204
202
  if description is not None:
205
- request_body["description"] = description
206
-
207
- response = client.post(f"v1/workspaces/{workspace_id}/notebooks", json=request_body)
208
-
209
- lro(client, response, status_codes=[201, 202])
203
+ payload["description"] = description
204
+
205
+ _base_api(
206
+ request=f"v1/workspaces/{workspace_id}/notebooks",
207
+ payload=payload,
208
+ method="post",
209
+ lro_return_status_code=True,
210
+ status_codes=[201, 202],
211
+ )
210
212
 
211
213
  print(
212
214
  f"{icons.green_dot} The '{name}' notebook was created within the '{workspace_name}' workspace."
@@ -232,15 +234,11 @@ def update_notebook_definition(
232
234
  """
233
235
 
234
236
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
235
- client = fabric.FabricRestClient()
236
237
  notebook_payload = base64.b64encode(notebook_content)
237
- notebook_id = fabric.resolve_item_id(
238
- item_name=name, type="Notebook", workspace=workspace_id
239
- )
238
+ item_id = resolve_item_id(item=name, type="Notebook", workspace=workspace)
239
+ type = _get_notebook_type(notebook_name=name, workspace=workspace)
240
240
 
241
- type = _get_notebook_type(notebook_name=name, workspace=workspace_id)
242
-
243
- request_body = {
241
+ payload = {
244
242
  "definition": {
245
243
  "parts": [
246
244
  {
@@ -252,13 +250,14 @@ def update_notebook_definition(
252
250
  },
253
251
  }
254
252
 
255
- response = client.post(
256
- f"v1/workspaces/{workspace_id}/notebooks/{notebook_id}/updateDefinition",
257
- json=request_body,
253
+ _base_api(
254
+ request=f"v1/workspaces/{workspace_id}/notebooks/{item_id}/updateDefinition",
255
+ payload=payload,
256
+ method="post",
257
+ lro_return_status_code=True,
258
+ status_codes=None,
258
259
  )
259
260
 
260
- lro(client, response, return_status_code=True)
261
-
262
261
  print(
263
262
  f"{icons.green_dot} The '{name}' notebook was updated within the '{workspace_name}' workspace."
264
263
  )
@@ -37,6 +37,8 @@ def export_model_to_onelake(
37
37
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
38
38
  (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
39
39
 
40
+ fabric.refresh_tom_cache(workspace=workspace)
41
+
40
42
  if destination_workspace is None:
41
43
  destination_workspace = workspace_name
42
44
  destination_workspace_id = workspace_id
@@ -3,11 +3,13 @@ import pandas as pd
3
3
  from sempy_labs._helper_functions import (
4
4
  resolve_workspace_name_and_id,
5
5
  resolve_dataset_name_and_id,
6
+ _update_dataframe_datatypes,
7
+ _base_api,
8
+ _create_dataframe,
6
9
  )
7
10
  from sempy._utils._log import log
8
11
  from typing import Optional, Tuple
9
12
  import sempy_labs._icons as icons
10
- from sempy.fabric.exceptions import FabricHTTPException
11
13
  from uuid import UUID
12
14
 
13
15
 
@@ -31,13 +33,10 @@ def qso_sync(dataset: str | UUID, workspace: Optional[str | UUID] = None):
31
33
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
32
34
  (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
33
35
 
34
- client = fabric.PowerBIRestClient()
35
- response = client.post(
36
- f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}/queryScaleOut/sync"
36
+ _base_api(
37
+ request=f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}/queryScaleOut/sync",
38
+ method="post",
37
39
  )
38
-
39
- if response.status_code != 200:
40
- raise FabricHTTPException(response)
41
40
  print(
42
41
  f"{icons.green_dot} QSO sync initiated for the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
43
42
  )
@@ -67,35 +66,36 @@ def qso_sync_status(
67
66
  2 pandas dataframes showing the query scale-out sync status.
68
67
  """
69
68
 
70
- df = pd.DataFrame(
71
- columns=[
72
- "Scale Out Status",
73
- "Sync Start Time",
74
- "Sync End Time",
75
- "Commit Version",
76
- "Commit Timestamp",
77
- "Target Sync Version",
78
- "Target Sync Timestamp",
79
- "Trigger Reason",
80
- "Min Active Read Version",
81
- "Min Active Read Timestamp",
82
- ]
83
- )
84
- dfRep = pd.DataFrame(
85
- columns=["Replica ID", "Replica Type", "Replica Version", "Replica Timestamp"]
86
- )
69
+ columns = {
70
+ "Scale Out Status": "string",
71
+ "Sync Start Time": "datetime",
72
+ "Sync End Time": "datetime",
73
+ "Commit Version": "int",
74
+ "Commit Timestamp": "datetime",
75
+ "Target Sync Version": "int",
76
+ "Target Sync Timestamp": "datetime",
77
+ "Trigger Reason": "string",
78
+ "Min Active Read Version": "int",
79
+ "Min Active Read Timestamp": "datetime",
80
+ }
81
+ df = _create_dataframe(columns=columns)
82
+
83
+ columns_rep = {
84
+ "Replica ID": "string",
85
+ "Replica Type": "string",
86
+ "Replica Version": "string",
87
+ "Replica Timestamp": "datetime",
88
+ }
89
+
90
+ dfRep = _create_dataframe(columns=columns_rep)
87
91
 
88
92
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
89
93
  (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
90
94
 
91
- client = fabric.PowerBIRestClient()
92
- response = client.get(
93
- f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}/queryScaleOut/syncStatus"
95
+ response = _base_api(
96
+ request=f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}/queryScaleOut/syncStatus"
94
97
  )
95
98
 
96
- if response.status_code != 200:
97
- raise FabricHTTPException(response)
98
-
99
99
  o = response.json()
100
100
  sos = o.get("scaleOutStatus")
101
101
 
@@ -125,17 +125,8 @@ def qso_sync_status(
125
125
  [dfRep, pd.DataFrame(new_data, index=[0])], ignore_index=True
126
126
  )
127
127
 
128
- df["Sync Start Time"] = pd.to_datetime(df["Sync Start Time"])
129
- df["Sync End Time"] = pd.to_datetime(df["Sync End Time"])
130
- df["Commit Timestamp"] = pd.to_datetime(df["Commit Timestamp"])
131
- df["Target Sync Timestamp"] = pd.to_datetime(df["Target Sync Timestamp"])
132
- df["Min Active Read Timestamp"] = pd.to_datetime(
133
- df["Min Active Read Timestamp"]
134
- )
135
- dfRep["Replica Timestamp"] = pd.to_datetime(dfRep["Replica Timestamp"])
136
- df["Commit Version"] = df["Commit Version"].astype("int")
137
- df["Target Sync Version"] = df["Target Sync Version"].astype("int")
138
- df["Min Active Read Version"] = df["Min Active Read Version"].astype("int")
128
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
129
+ _update_dataframe_datatypes(dataframe=dfRep, column_map=columns_rep)
139
130
 
140
131
  return df, dfRep
141
132
  else:
@@ -170,14 +161,13 @@ def disable_qso(
170
161
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
171
162
  (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
172
163
 
173
- request_body = {"queryScaleOutSettings": {"maxReadOnlyReplicas": "0"}}
164
+ payload = {"queryScaleOutSettings": {"maxReadOnlyReplicas": "0"}}
174
165
 
175
- client = fabric.PowerBIRestClient()
176
- response = client.patch(
177
- f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}", json=request_body
166
+ _base_api(
167
+ request=f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}",
168
+ method="patch",
169
+ payload=payload,
178
170
  )
179
- if response.status_code != 200:
180
- raise FabricHTTPException(response)
181
171
 
182
172
  df = list_qso_settings(dataset=dataset_id, workspace=workspace_id)
183
173
 
@@ -233,7 +223,7 @@ def set_qso(
233
223
  disable_qso(dataset=dataset_id, workspace=workspace_id)
234
224
  return
235
225
 
236
- request_body = {
226
+ payload = {
237
227
  "queryScaleOutSettings": {
238
228
  "autoSyncReadOnlyReplicas": auto_sync,
239
229
  "maxReadOnlyReplicas": max_read_only_replicas,
@@ -248,13 +238,11 @@ def set_qso(
248
238
  dataset=dataset_id, storage_format="Large", workspace=workspace_id
249
239
  )
250
240
 
251
- client = fabric.PowerBIRestClient()
252
- response = client.patch(
253
- f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}",
254
- json=request_body,
241
+ _base_api(
242
+ request=f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}",
243
+ method="patch",
244
+ payload=payload,
255
245
  )
256
- if response.status_code != 200:
257
- raise FabricHTTPException(response)
258
246
 
259
247
  df = list_qso_settings(dataset=dataset_id, workspace=workspace_id)
260
248
  print(
@@ -296,9 +284,9 @@ def set_semantic_model_storage_format(
296
284
  storageFormats = ["Small", "Large"]
297
285
 
298
286
  if storage_format == "Large":
299
- request_body = {"targetStorageMode": "PremiumFiles"}
287
+ payload = {"targetStorageMode": "PremiumFiles"}
300
288
  elif storage_format == "Small":
301
- request_body = {"targetStorageMode": "Abf"}
289
+ payload = {"targetStorageMode": "Abf"}
302
290
  else:
303
291
  raise ValueError(
304
292
  f"{icons.red_dot} Invalid storage format value. Valid options: {storageFormats}."
@@ -313,12 +301,11 @@ def set_semantic_model_storage_format(
313
301
  )
314
302
  return
315
303
 
316
- client = fabric.PowerBIRestClient()
317
- response = client.patch(
318
- f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}", json=request_body
304
+ _base_api(
305
+ request=f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}",
306
+ method="patch",
307
+ payload=payload,
319
308
  )
320
- if response.status_code != 200:
321
- raise FabricHTTPException(response)
322
309
  print(
323
310
  f"{icons.green_dot} The semantic model storage format for the '{dataset_name}' semantic model within the '{workspace_name}' workspace has been set to '{storage_format}'."
324
311
  )
@@ -351,17 +338,16 @@ def list_qso_settings(
351
338
  if dataset is not None:
352
339
  (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
353
340
 
354
- df = pd.DataFrame(
355
- columns=[
356
- "Dataset Id",
357
- "Dataset Name",
358
- "Storage Mode",
359
- "QSO Auto Sync Enabled",
360
- "QSO Max Read Only Replicas",
361
- ]
362
- )
363
- client = fabric.PowerBIRestClient()
364
- response = client.get(f"/v1.0/myorg/groups/{workspace_id}/datasets")
341
+ columns = {
342
+ "Dataset Id": "string",
343
+ "Dataset Name": "string",
344
+ "Storage Mode": "string",
345
+ "QSO Auto Sync Enabled": "bool",
346
+ "QSO Max Read Only Replicas": "int",
347
+ }
348
+ df = _create_dataframe(columns=columns)
349
+
350
+ response = _base_api(request=f"/v1.0/myorg/groups/{workspace_id}/datasets")
365
351
 
366
352
  for v in response.json().get("value", []):
367
353
  tsm = v.get("targetStorageMode")
@@ -382,8 +368,7 @@ def list_qso_settings(
382
368
  }
383
369
  df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
384
370
 
385
- df["QSO Auto Sync Enabled"] = df["QSO Auto Sync Enabled"].astype("bool")
386
- df["QSO Max Read Only Replicas"] = df["QSO Max Read Only Replicas"].astype("int")
371
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
387
372
 
388
373
  if dataset is not None:
389
374
  df = df[df["Dataset Id"] == dataset_id]
@@ -411,7 +396,6 @@ def set_workspace_default_storage_format(
411
396
  # https://learn.microsoft.com/en-us/rest/api/power-bi/groups/update-group#defaultdatasetstorageformat
412
397
 
413
398
  storageFormats = ["Small", "Large"]
414
-
415
399
  storage_format = storage_format.capitalize()
416
400
 
417
401
  if storage_format not in storageFormats:
@@ -433,16 +417,14 @@ def set_workspace_default_storage_format(
433
417
  )
434
418
  return
435
419
 
436
- request_body = {
420
+ payload = {
437
421
  "name": workspace_name,
438
422
  "defaultDatasetStorageFormat": storage_format,
439
423
  }
440
424
 
441
- client = fabric.PowerBIRestClient()
442
- response = client.patch(f"/v1.0/myorg/groups/{workspace_id}", json=request_body)
443
-
444
- if response.status_code != 200:
445
- raise FabricHTTPException(response)
425
+ _base_api(
426
+ request=f"/v1.0/myorg/groups/{workspace_id}", method="patch", payload=payload
427
+ )
446
428
 
447
429
  print(
448
430
  f"{icons.green_dot} The default storage format for the '{workspace_name}' workspace has been updated to '{storage_format}."