semantic-link-labs 0.8.3__py3-none-any.whl → 0.8.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (101) hide show
  1. {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.4.dist-info}/METADATA +33 -8
  2. {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.4.dist-info}/RECORD +101 -98
  3. {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.4.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +24 -0
  5. sempy_labs/_bpa_translation/_model/_translations_am-ET.po +24 -5
  6. sempy_labs/_bpa_translation/_model/_translations_ar-AE.po +28 -4
  7. sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +34 -4
  8. sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +33 -4
  9. sempy_labs/_bpa_translation/_model/_translations_cs-CZ.po +31 -4
  10. sempy_labs/_bpa_translation/_model/_translations_da-DK.po +31 -4
  11. sempy_labs/_bpa_translation/_model/_translations_de-DE.po +34 -4
  12. sempy_labs/_bpa_translation/_model/_translations_el-GR.po +36 -4
  13. sempy_labs/_bpa_translation/_model/_translations_es-ES.po +90 -58
  14. sempy_labs/_bpa_translation/_model/_translations_fa-IR.po +31 -5
  15. sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +31 -4
  16. sempy_labs/_bpa_translation/_model/_translations_fr-FR.po +34 -4
  17. sempy_labs/_bpa_translation/_model/_translations_ga-IE.po +34 -4
  18. sempy_labs/_bpa_translation/_model/_translations_he-IL.po +28 -4
  19. sempy_labs/_bpa_translation/_model/_translations_hi-IN.po +32 -4
  20. sempy_labs/_bpa_translation/_model/_translations_hu-HU.po +32 -4
  21. sempy_labs/_bpa_translation/_model/_translations_id-ID.po +32 -4
  22. sempy_labs/_bpa_translation/_model/_translations_is-IS.po +31 -4
  23. sempy_labs/_bpa_translation/_model/_translations_it-IT.po +34 -4
  24. sempy_labs/_bpa_translation/_model/_translations_ja-JP.po +24 -4
  25. sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +72 -56
  26. sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +34 -4
  27. sempy_labs/_bpa_translation/_model/_translations_nl-NL.po +34 -4
  28. sempy_labs/_bpa_translation/_model/_translations_pl-PL.po +95 -71
  29. sempy_labs/_bpa_translation/_model/_translations_pt-BR.po +32 -4
  30. sempy_labs/_bpa_translation/_model/_translations_pt-PT.po +32 -4
  31. sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +33 -4
  32. sempy_labs/_bpa_translation/_model/_translations_ru-RU.po +34 -4
  33. sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +31 -4
  34. sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +32 -4
  35. sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +32 -4
  36. sempy_labs/_bpa_translation/_model/_translations_ta-IN.po +32 -4
  37. sempy_labs/_bpa_translation/_model/_translations_te-IN.po +31 -4
  38. sempy_labs/_bpa_translation/_model/_translations_th-TH.po +31 -4
  39. sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +32 -4
  40. sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +100 -72
  41. sempy_labs/_bpa_translation/_model/_translations_zh-CN.po +23 -5
  42. sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po +32 -4
  43. sempy_labs/_capacities.py +49 -14
  44. sempy_labs/_capacity_migration.py +1 -7
  45. sempy_labs/_data_pipelines.py +6 -0
  46. sempy_labs/_dataflows.py +4 -0
  47. sempy_labs/_deployment_pipelines.py +13 -7
  48. sempy_labs/_environments.py +6 -0
  49. sempy_labs/_eventhouses.py +6 -0
  50. sempy_labs/_eventstreams.py +6 -0
  51. sempy_labs/_external_data_shares.py +6 -4
  52. sempy_labs/_generate_semantic_model.py +26 -3
  53. sempy_labs/_git.py +14 -14
  54. sempy_labs/_helper_functions.py +172 -0
  55. sempy_labs/_icons.py +55 -22
  56. sempy_labs/_kql_databases.py +6 -0
  57. sempy_labs/_kql_querysets.py +6 -0
  58. sempy_labs/_list_functions.py +1 -1
  59. sempy_labs/_managed_private_endpoints.py +166 -0
  60. sempy_labs/_mirrored_warehouses.py +2 -0
  61. sempy_labs/_ml_experiments.py +6 -0
  62. sempy_labs/_ml_models.py +6 -0
  63. sempy_labs/_model_bpa.py +6 -1
  64. sempy_labs/_model_bpa_bulk.py +11 -25
  65. sempy_labs/_model_bpa_rules.py +8 -3
  66. sempy_labs/_notebooks.py +107 -12
  67. sempy_labs/_query_scale_out.py +8 -6
  68. sempy_labs/_refresh_semantic_model.py +299 -49
  69. sempy_labs/_spark.py +12 -5
  70. sempy_labs/_translations.py +2 -0
  71. sempy_labs/_vertipaq.py +58 -67
  72. sempy_labs/_warehouses.py +79 -0
  73. sempy_labs/_workloads.py +128 -0
  74. sempy_labs/_workspace_identity.py +4 -4
  75. sempy_labs/_workspaces.py +14 -1
  76. sempy_labs/admin/_basic_functions.py +85 -43
  77. sempy_labs/admin/_domains.py +18 -18
  78. sempy_labs/directlake/__init__.py +2 -0
  79. sempy_labs/directlake/_directlake_schema_sync.py +2 -1
  80. sempy_labs/directlake/_dl_helper.py +4 -1
  81. sempy_labs/directlake/_get_shared_expression.py +7 -1
  82. sempy_labs/directlake/_guardrails.py +2 -1
  83. sempy_labs/directlake/_show_unsupported_directlake_objects.py +1 -7
  84. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +78 -0
  85. sempy_labs/directlake/_update_directlake_partition_entity.py +13 -32
  86. sempy_labs/lakehouse/_get_lakehouse_tables.py +6 -2
  87. sempy_labs/lakehouse/_shortcuts.py +4 -0
  88. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +3 -2
  89. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +2 -0
  90. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +2 -8
  91. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +17 -0
  92. sempy_labs/migration/_migration_validation.py +2 -0
  93. sempy_labs/migration/_refresh_calc_tables.py +1 -0
  94. sempy_labs/report/__init__.py +4 -1
  95. sempy_labs/report/_generate_report.py +6 -0
  96. sempy_labs/report/_paginated.py +74 -0
  97. sempy_labs/report/_report_functions.py +6 -0
  98. sempy_labs/report/_report_rebind.py +2 -0
  99. sempy_labs/tom/_model.py +64 -33
  100. {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.4.dist-info}/LICENSE +0 -0
  101. {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.4.dist-info}/top_level.txt +0 -0
@@ -59,24 +59,6 @@ def run_model_bpa_bulk(
59
59
 
60
60
  skip_models.extend(["ModelBPA", "Fabric Capacity Metrics"])
61
61
 
62
- cols = [
63
- "Capacity Name",
64
- "Capacity Id",
65
- "Workspace Name",
66
- "Workspace Id",
67
- "Dataset Name",
68
- "Dataset Id",
69
- "Configured By",
70
- "Rule Name",
71
- "Category",
72
- "Severity",
73
- "Object Type",
74
- "Object Name",
75
- "Description",
76
- "URL",
77
- "RunId",
78
- "Timestamp",
79
- ]
80
62
  now = datetime.datetime.now()
81
63
  output_table = "modelbparesults"
82
64
  lakehouse_workspace = fabric.resolve_workspace_name()
@@ -106,7 +88,7 @@ def run_model_bpa_bulk(
106
88
  wksp = r["Name"]
107
89
  wksp_id = r["Id"]
108
90
  capacity_id, capacity_name = resolve_workspace_capacity(workspace=wksp)
109
- df = pd.DataFrame(columns=cols)
91
+ df = pd.DataFrame(columns=list(icons.bpa_schema.keys()))
110
92
  dfD = fabric.list_datasets(workspace=wksp, mode="rest")
111
93
 
112
94
  # Exclude default semantic models
@@ -137,8 +119,8 @@ def run_model_bpa_bulk(
137
119
  rules=rules,
138
120
  extended=extended,
139
121
  )
140
- bpa_df["Capacity Id"] = capacity_id
141
122
  bpa_df["Capacity Name"] = capacity_name
123
+ bpa_df["Capacity Id"] = capacity_id
142
124
  bpa_df["Workspace Name"] = wksp
143
125
  bpa_df["Workspace Id"] = wksp_id
144
126
  bpa_df["Dataset Name"] = dataset_name
@@ -146,7 +128,7 @@ def run_model_bpa_bulk(
146
128
  bpa_df["Configured By"] = config_by
147
129
  bpa_df["Timestamp"] = now
148
130
  bpa_df["RunId"] = runId
149
- bpa_df = bpa_df[cols]
131
+ bpa_df = bpa_df[list(icons.bpa_schema.keys())]
150
132
 
151
133
  bpa_df["RunId"] = bpa_df["RunId"].astype("int")
152
134
 
@@ -166,10 +148,17 @@ def run_model_bpa_bulk(
166
148
  print(
167
149
  f"{icons.in_progress} Saving the Model BPA results of the '{wksp}' workspace to the '{output_table}' within the '{lakehouse}' lakehouse within the '{lakehouse_workspace}' workspace..."
168
150
  )
151
+
152
+ schema = {
153
+ key.replace(" ", "_"): value
154
+ for key, value in icons.bpa_schema.items()
155
+ }
156
+
169
157
  save_as_delta_table(
170
158
  dataframe=df,
171
159
  delta_table_name=output_table,
172
160
  write_mode="append",
161
+ schema=schema,
173
162
  merge_schema=True,
174
163
  )
175
164
  print(
@@ -203,9 +192,6 @@ def create_model_bpa_semantic_model(
203
192
  The workspace in which the lakehouse resides.
204
193
  Defaults to None which resolves to the workspace of the attached lakehouse
205
194
  or if no lakehouse attached, resolves to the workspace of the notebook.
206
-
207
- Returns
208
- -------
209
195
  """
210
196
 
211
197
  from sempy_labs._helper_functions import resolve_lakehouse_name
@@ -244,7 +230,7 @@ def create_model_bpa_semantic_model(
244
230
  tom.model
245
231
 
246
232
  dyn_connect()
247
-
233
+ icons.sll_tags.append("ModelBPABulk")
248
234
  table_exists = False
249
235
  with connect_semantic_model(
250
236
  dataset=dataset, readonly=False, workspace=lakehouse_workspace
@@ -158,6 +158,7 @@ def model_bpa_rules(
158
158
  and r.ToTable.Name == obj.Name
159
159
  for r in tom.used_in_relationships(object=obj)
160
160
  ),
161
+ "When using DirectQuery, dimension tables should be set to Dual mode in order to improve query performance.",
161
162
  "https://learn.microsoft.com/power-bi/transform-model/desktop-storage-mode#propagation-of-the-dual-setting",
162
163
  ),
163
164
  (
@@ -492,6 +493,7 @@ def model_bpa_rules(
492
493
  obj.Expression,
493
494
  flags=re.IGNORECASE,
494
495
  ),
496
+ "Adding a constant value may lead to performance degradation.",
495
497
  ),
496
498
  (
497
499
  "DAX Expressions",
@@ -643,14 +645,15 @@ def model_bpa_rules(
643
645
  "Calculation groups with no calculation items",
644
646
  lambda obj, tom: obj.CalculationGroup is not None
645
647
  and not any(obj.CalculationGroup.CalculationItems),
648
+ "Calculation groups have no function unless they have calculation items.",
646
649
  ),
647
650
  (
648
651
  "Maintenance",
649
- "Column",
652
+ ["Column", "Measure", "Table"],
650
653
  "Info",
651
654
  "Visible objects with no description",
652
655
  lambda obj, tom: obj.IsHidden is False and len(obj.Description) == 0,
653
- "Calculation groups have no function unless they have calculation items.",
656
+ "Add descriptions to objects. These descriptions are shown on hover within the Field List in Power BI Desktop. Additionally, you can leverage these descriptions to create an automated data dictionary.",
654
657
  ),
655
658
  (
656
659
  "Formatting",
@@ -710,6 +713,7 @@ def model_bpa_rules(
710
713
  "Percentages should be formatted with thousands separators and 1 decimal",
711
714
  lambda obj, tom: "%" in obj.FormatString
712
715
  and obj.FormatString != "#,0.0%;-#,0.0%;#,0.0%",
716
+ "For a better user experience, percengage measures should be formatted with a '%' sign.",
713
717
  ),
714
718
  (
715
719
  "Formatting",
@@ -719,6 +723,7 @@ def model_bpa_rules(
719
723
  lambda obj, tom: "$" not in obj.FormatString
720
724
  and "%" not in obj.FormatString
721
725
  and obj.FormatString not in ["#,0", "#,0.0"],
726
+ "For a better user experience, whole numbers should be formatted with commas.",
722
727
  ),
723
728
  (
724
729
  "Formatting",
@@ -731,7 +736,7 @@ def model_bpa_rules(
731
736
  and r.FromCardinality == TOM.RelationshipEndCardinality.Many
732
737
  for r in tom.used_in_relationships(object=obj)
733
738
  ),
734
- "Foreign keys should always be hidden.",
739
+ "Foreign keys should always be hidden as they should not be used by end users.",
735
740
  ),
736
741
  (
737
742
  "Formatting",
sempy_labs/_notebooks.py CHANGED
@@ -18,6 +18,8 @@ def get_notebook_definition(
18
18
  """
19
19
  Obtains the notebook definition.
20
20
 
21
+ This is a wrapper function for the following API: `Items - Get Notebook Definition <https://learn.microsoft.com/rest/api/fabric/notebook/items/get-notebook-definition`_.
22
+
21
23
  Parameters
22
24
  ----------
23
25
  notebook_name : str
@@ -63,6 +65,7 @@ def import_notebook_from_web(
63
65
  url: str,
64
66
  description: Optional[str] = None,
65
67
  workspace: Optional[str] = None,
68
+ overwrite: bool = False,
66
69
  ):
67
70
  """
68
71
  Creates a new notebook within a workspace based on a Jupyter notebook hosted in the web.
@@ -83,16 +86,12 @@ def import_notebook_from_web(
83
86
  The name of the workspace.
84
87
  Defaults to None which resolves to the workspace of the attached lakehouse
85
88
  or if no lakehouse attached, resolves to the workspace of the notebook.
89
+ overwrite : bool, default=False
90
+ If set to True, overwrites the existing notebook in the workspace if it exists.
86
91
  """
87
92
 
88
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
89
- client = fabric.FabricRestClient()
90
- dfI = fabric.list_items(workspace=workspace, type="Notebook")
91
- dfI_filt = dfI[dfI["Display Name"] == notebook_name]
92
- if len(dfI_filt) > 0:
93
- raise ValueError(
94
- f"{icons.red_dot} The '{notebook_name}' already exists within the '{workspace}' workspace."
95
- )
93
+ if workspace is None:
94
+ workspace = fabric.resolve_workspace_name(workspace)
96
95
 
97
96
  # Fix links to go to the raw github file
98
97
  starting_text = "https://github.com/"
@@ -105,11 +104,56 @@ def import_notebook_from_web(
105
104
  response = requests.get(url)
106
105
  if response.status_code != 200:
107
106
  raise FabricHTTPException(response)
108
- file_content = response.content
109
- notebook_payload = base64.b64encode(file_content)
107
+
108
+ dfI = fabric.list_items(workspace=workspace, type="Notebook")
109
+ dfI_filt = dfI[dfI["Display Name"] == notebook_name]
110
+ if len(dfI_filt) == 0:
111
+ create_notebook(
112
+ name=notebook_name,
113
+ notebook_content=response.content,
114
+ workspace=workspace,
115
+ description=description,
116
+ )
117
+ elif len(dfI_filt) > 0 and overwrite:
118
+ update_notebook_definition(
119
+ name=notebook_name, notebook_content=response.content, workspace=workspace
120
+ )
121
+ else:
122
+ raise ValueError(
123
+ f"{icons.red_dot} The '{notebook_name}' already exists within the '{workspace}' workspace and 'overwrite' is set to False."
124
+ )
125
+
126
+
127
+ def create_notebook(
128
+ name: str,
129
+ notebook_content: str,
130
+ description: Optional[str] = None,
131
+ workspace: Optional[str] = None,
132
+ ):
133
+ """
134
+ Creates a new notebook with a definition within a workspace.
135
+
136
+ Parameters
137
+ ----------
138
+ name : str
139
+ The name of the notebook to be created.
140
+ notebook_content : str
141
+ The Jupyter notebook content (not in Base64 format).
142
+ description : str, default=None
143
+ The description of the notebook.
144
+ Defaults to None which does not place a description.
145
+ workspace : str, default=None
146
+ The name of the workspace.
147
+ Defaults to None which resolves to the workspace of the attached lakehouse
148
+ or if no lakehouse attached, resolves to the workspace of the notebook.
149
+ """
150
+
151
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
152
+ client = fabric.FabricRestClient()
153
+ notebook_payload = base64.b64encode(notebook_content)
110
154
 
111
155
  request_body = {
112
- "displayName": notebook_name,
156
+ "displayName": name,
113
157
  "definition": {
114
158
  "format": "ipynb",
115
159
  "parts": [
@@ -129,5 +173,56 @@ def import_notebook_from_web(
129
173
  lro(client, response, status_codes=[201, 202])
130
174
 
131
175
  print(
132
- f"{icons.green_dot} The '{notebook_name}' notebook was created within the '{workspace}' workspace."
176
+ f"{icons.green_dot} The '{name}' notebook was created within the '{workspace}' workspace."
177
+ )
178
+
179
+
180
+ def update_notebook_definition(
181
+ name: str, notebook_content: str, workspace: Optional[str] = None
182
+ ):
183
+ """
184
+ Updates an existing notebook with a new definition.
185
+
186
+ Parameters
187
+ ----------
188
+ name : str
189
+ The name of the notebook to be created.
190
+ notebook_content : str
191
+ The Jupyter notebook content (not in Base64 format).
192
+ workspace : str, default=None
193
+ The name of the workspace.
194
+ Defaults to None which resolves to the workspace of the attached lakehouse
195
+ or if no lakehouse attached, resolves to the workspace of the notebook.
196
+ """
197
+
198
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
199
+ client = fabric.FabricRestClient()
200
+ notebook_payload = base64.b64encode(notebook_content)
201
+ notebook_id = fabric.resolve_item_id(
202
+ item_name=name, type="Notebook", workspace=workspace
203
+ )
204
+
205
+ request_body = {
206
+ "displayName": name,
207
+ "definition": {
208
+ "format": "ipynb",
209
+ "parts": [
210
+ {
211
+ "path": "notebook-content.py",
212
+ "payload": notebook_payload,
213
+ "payloadType": "InlineBase64",
214
+ }
215
+ ],
216
+ },
217
+ }
218
+
219
+ response = client.post(
220
+ f"v1/workspaces/{workspace_id}/notebooks/{notebook_id}/updateDefinition",
221
+ json=request_body,
222
+ )
223
+
224
+ lro(client, response, return_status_code=True)
225
+
226
+ print(
227
+ f"{icons.green_dot} The '{name}' notebook was updated within the '{workspace}' workspace."
133
228
  )
@@ -13,6 +13,8 @@ def qso_sync(dataset: str, workspace: Optional[str] = None):
13
13
  """
14
14
  Triggers a query scale-out sync of read-only replicas for the specified dataset from the specified workspace.
15
15
 
16
+ This is a wrapper function for the following API: `Datasets - Trigger Query Scale Out Sync In Group <https://learn.microsoft.com/rest/api/power-bi/datasets/trigger-query-scale-out-sync-in-group`_.
17
+
16
18
  Parameters
17
19
  ----------
18
20
  dataset : str
@@ -23,8 +25,6 @@ def qso_sync(dataset: str, workspace: Optional[str] = None):
23
25
  or if no lakehouse attached, resolves to the workspace of the notebook.
24
26
  """
25
27
 
26
- # https://learn.microsoft.com/en-us/rest/api/power-bi/datasets/trigger-query-scale-out-sync-in-group
27
-
28
28
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
29
29
  dataset_id = resolve_dataset_id(dataset, workspace)
30
30
 
@@ -46,6 +46,8 @@ def qso_sync_status(
46
46
  """
47
47
  Returns the query scale-out sync status for the specified dataset from the specified workspace.
48
48
 
49
+ This is a wrapper function for the following API: `Datasets - Get Query Scale Out Sync Status In Group <https://learn.microsoft.com/rest/api/power-bi/datasets/get-query-scale-out-sync-status-in-group`_.
50
+
49
51
  Parameters
50
52
  ----------
51
53
  dataset : str
@@ -61,8 +63,6 @@ def qso_sync_status(
61
63
  2 pandas dataframes showing the query scale-out sync status.
62
64
  """
63
65
 
64
- # https://learn.microsoft.com/en-us/rest/api/power-bi/datasets/get-query-scale-out-sync-status-in-group
65
-
66
66
  df = pd.DataFrame(
67
67
  columns=[
68
68
  "Scale Out Status",
@@ -143,6 +143,8 @@ def disable_qso(dataset: str, workspace: Optional[str] = None) -> pd.DataFrame:
143
143
  """
144
144
  Sets the max read-only replicas to 0, disabling query scale out.
145
145
 
146
+ This is a wrapper function for the following API: `Datasets - Update Dataset In Group <https://learn.microsoft.com/rest/api/power-bi/datasets/update-dataset-in-group`_.
147
+
146
148
  Parameters
147
149
  ----------
148
150
  dataset : str
@@ -188,6 +190,8 @@ def set_qso(
188
190
  """
189
191
  Sets the query scale out settings for a semantic model.
190
192
 
193
+ This is a wrapper function for the following API: `Datasets - Update Dataset In Group <https://learn.microsoft.com/rest/api/power-bi/datasets/update-dataset-in-group`_.
194
+
191
195
  Parameters
192
196
  ----------
193
197
  dataset : str
@@ -209,8 +213,6 @@ def set_qso(
209
213
 
210
214
  from sempy_labs._helper_functions import is_default_semantic_model
211
215
 
212
- # https://learn.microsoft.com/en-us/rest/api/power-bi/datasets/update-dataset-in-group
213
-
214
216
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
215
217
  dataset_id = resolve_dataset_id(dataset, workspace)
216
218