semantic-link-labs 0.11.2__py3-none-any.whl → 0.12.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (93) hide show
  1. {semantic_link_labs-0.11.2.dist-info → semantic_link_labs-0.12.0.dist-info}/METADATA +7 -6
  2. {semantic_link_labs-0.11.2.dist-info → semantic_link_labs-0.12.0.dist-info}/RECORD +90 -84
  3. sempy_labs/__init__.py +18 -18
  4. sempy_labs/_a_lib_info.py +1 -1
  5. sempy_labs/_authentication.py +81 -32
  6. sempy_labs/_capacities.py +2 -2
  7. sempy_labs/_capacity_migration.py +4 -4
  8. sempy_labs/_clear_cache.py +1 -1
  9. sempy_labs/_connections.py +107 -70
  10. sempy_labs/_dashboards.py +6 -2
  11. sempy_labs/_data_pipelines.py +1 -1
  12. sempy_labs/_dataflows.py +1 -1
  13. sempy_labs/_dax.py +3 -3
  14. sempy_labs/_delta_analyzer.py +4 -4
  15. sempy_labs/_delta_analyzer_history.py +1 -1
  16. sempy_labs/_deployment_pipelines.py +1 -1
  17. sempy_labs/_environments.py +1 -1
  18. sempy_labs/_eventhouses.py +9 -3
  19. sempy_labs/_eventstreams.py +1 -1
  20. sempy_labs/_external_data_shares.py +56 -2
  21. sempy_labs/_gateways.py +14 -7
  22. sempy_labs/_generate_semantic_model.py +7 -12
  23. sempy_labs/_git.py +1 -1
  24. sempy_labs/_graphQL.py +1 -1
  25. sempy_labs/_helper_functions.py +293 -22
  26. sempy_labs/_job_scheduler.py +12 -1
  27. sempy_labs/_kql_databases.py +1 -1
  28. sempy_labs/_kql_querysets.py +10 -2
  29. sempy_labs/_kusto.py +2 -2
  30. sempy_labs/_labels.py +126 -0
  31. sempy_labs/_list_functions.py +2 -2
  32. sempy_labs/_managed_private_endpoints.py +1 -1
  33. sempy_labs/_mirrored_databases.py +40 -16
  34. sempy_labs/_mirrored_warehouses.py +1 -1
  35. sempy_labs/_ml_experiments.py +1 -1
  36. sempy_labs/_model_bpa.py +6 -6
  37. sempy_labs/_model_bpa_bulk.py +3 -3
  38. sempy_labs/_model_dependencies.py +1 -1
  39. sempy_labs/_mounted_data_factories.py +3 -3
  40. sempy_labs/_notebooks.py +153 -3
  41. sempy_labs/_query_scale_out.py +2 -2
  42. sempy_labs/_refresh_semantic_model.py +1 -1
  43. sempy_labs/_semantic_models.py +15 -3
  44. sempy_labs/_spark.py +1 -1
  45. sempy_labs/_sql.py +3 -3
  46. sempy_labs/_sql_endpoints.py +5 -3
  47. sempy_labs/_sqldatabase.py +5 -1
  48. sempy_labs/_tags.py +3 -1
  49. sempy_labs/_translations.py +7 -360
  50. sempy_labs/_user_delegation_key.py +2 -2
  51. sempy_labs/_utils.py +27 -0
  52. sempy_labs/_vertipaq.py +3 -3
  53. sempy_labs/_vpax.py +1 -1
  54. sempy_labs/_warehouses.py +5 -0
  55. sempy_labs/_workloads.py +1 -1
  56. sempy_labs/_workspace_identity.py +1 -1
  57. sempy_labs/_workspaces.py +145 -11
  58. sempy_labs/admin/__init__.py +6 -0
  59. sempy_labs/admin/_capacities.py +34 -11
  60. sempy_labs/admin/_items.py +2 -2
  61. sempy_labs/admin/_tenant_keys.py +89 -0
  62. sempy_labs/directlake/_dl_helper.py +5 -2
  63. sempy_labs/graph/_users.py +3 -5
  64. sempy_labs/lakehouse/__init__.py +4 -0
  65. sempy_labs/lakehouse/_helper.py +18 -9
  66. sempy_labs/lakehouse/_lakehouse.py +18 -9
  67. sempy_labs/lakehouse/_materialized_lake_views.py +76 -0
  68. sempy_labs/lakehouse/_shortcuts.py +8 -2
  69. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +38 -47
  70. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +12 -22
  71. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +7 -11
  72. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +14 -23
  73. sempy_labs/ml_model/__init__.py +23 -0
  74. sempy_labs/ml_model/_functions.py +427 -0
  75. sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
  76. sempy_labs/report/_bpareporttemplate/.platform +11 -0
  77. sempy_labs/report/_download_report.py +4 -1
  78. sempy_labs/report/_export_report.py +12 -5
  79. sempy_labs/report/_generate_report.py +11 -3
  80. sempy_labs/report/_paginated.py +21 -15
  81. sempy_labs/report/_report_functions.py +19 -11
  82. sempy_labs/report/_report_rebind.py +21 -10
  83. sempy_labs/report/_reportwrapper.py +1 -1
  84. sempy_labs/theme/_org_themes.py +5 -6
  85. sempy_labs/tom/_model.py +13 -19
  86. sempy_labs/variable_library/__init__.py +19 -0
  87. sempy_labs/variable_library/_functions.py +403 -0
  88. sempy_labs/_dax_query_view.py +0 -57
  89. sempy_labs/_ml_models.py +0 -111
  90. sempy_labs/_variable_libraries.py +0 -92
  91. {semantic_link_labs-0.11.2.dist-info → semantic_link_labs-0.12.0.dist-info}/WHEEL +0 -0
  92. {semantic_link_labs-0.11.2.dist-info → semantic_link_labs-0.12.0.dist-info}/licenses/LICENSE +0 -0
  93. {semantic_link_labs-0.11.2.dist-info → semantic_link_labs-0.12.0.dist-info}/top_level.txt +0 -0
sempy_labs/_labels.py ADDED
@@ -0,0 +1,126 @@
1
+ import sempy.fabric as fabric
2
+ import requests
3
+ import pandas as pd
4
+ from typing import Optional, Union
5
+ from uuid import UUID
6
+ from sempy.fabric.exceptions import FabricHTTPException
7
+ from sempy._utils._log import log
8
+
9
+
10
+ @log
11
+ def list_item_labels(workspace: Optional[Union[str, UUID]] = None) -> pd.DataFrame:
12
+ """
13
+ List all items within a workspace and shows their sensitivity labels.
14
+
15
+ NOTE: This function uses an internal API and is subject to change/break without notice.
16
+
17
+ Parameters
18
+ ----------
19
+ workspace : str | uuid.UUID, default=None
20
+ The Fabric workspace name or ID.
21
+ Defaults to None which resolves to the workspace of the attached lakehouse
22
+ or if no lakehouse attached, resolves to the workspace of the notebook.
23
+ Returns
24
+ -------
25
+ pandas.DataFrame
26
+ A pandas dataframe showing a list of all items within a workspace and their sensitivity labels.
27
+ """
28
+
29
+ import notebookutils
30
+
31
+ token = notebookutils.credentials.getToken("pbi")
32
+ headers = {"Authorization": f"Bearer {token}"}
33
+
34
+ # Item types handled in special payload fields
35
+ grouped_types = {
36
+ "dashboards": "Dashboard",
37
+ "reports": "Report",
38
+ "models": "SemanticModel",
39
+ "dataflows": "Dataflow",
40
+ "datamarts": "Datamart",
41
+ }
42
+
43
+ # All other item types go into 'artifacts'
44
+ fabric_items = [
45
+ "Datamart",
46
+ "Lakehouse",
47
+ "Eventhouse",
48
+ "Environment",
49
+ "KQLDatabase",
50
+ "KQLQueryset",
51
+ "KQLDashboard",
52
+ "DataPipeline",
53
+ "Notebook",
54
+ "SparkJobDefinition",
55
+ "MLExperiment",
56
+ "MLModel",
57
+ "Warehouse",
58
+ "Eventstream",
59
+ "SQLEndpoint",
60
+ "MirroredWarehouse",
61
+ "MirroredDatabase",
62
+ "Reflex",
63
+ "GraphQLApi",
64
+ "MountedDataFactory",
65
+ "SQLDatabase",
66
+ "CopyJob",
67
+ "VariableLibrary",
68
+ "Dataflow",
69
+ "ApacheAirflowJob",
70
+ "WarehouseSnapshot",
71
+ "DigitalTwinBuilder",
72
+ "DigitalTwinBuilderFlow",
73
+ "MirroredAzureDatabricksCatalog",
74
+ "DataAgent",
75
+ "UserDataFunction",
76
+ ]
77
+
78
+ dfI = fabric.list_items(workspace=workspace)
79
+
80
+ payload = {
81
+ key: [{"artifactId": i} for i in dfI[dfI["Type"] == value]["Id"].tolist()]
82
+ for key, value in grouped_types.items()
83
+ }
84
+
85
+ # Add generic artifact types
86
+ artifact_ids = dfI[dfI["Type"].isin(fabric_items)]["Id"].tolist()
87
+ if artifact_ids:
88
+ payload["artifacts"] = [{"artifactId": i} for i in artifact_ids]
89
+
90
+ client = fabric.PowerBIRestClient()
91
+ response = client.get("/v1.0/myorg/capacities")
92
+ if response.status_code != 200:
93
+ raise FabricHTTPException("Failed to retrieve URL prefix.")
94
+ context = response.json().get("@odata.context")
95
+ prefix = context.split("/v1.0")[0]
96
+
97
+ response = requests.post(
98
+ f"{prefix}/metadata/informationProtection/artifacts",
99
+ json=payload,
100
+ headers=headers,
101
+ )
102
+ if response.status_code != 200:
103
+ raise FabricHTTPException(f"Failed to retrieve labels: {response.text}")
104
+ result = response.json()
105
+
106
+ label_keys = [
107
+ "artifactInformationProtections",
108
+ "datasetInformationProtections",
109
+ "reportInformationProtections",
110
+ "dashboardInformationProtections",
111
+ ]
112
+
113
+ rows = [
114
+ {
115
+ "Id": item.get("artifactObjectId"),
116
+ "Label Id": item.get("labelId"),
117
+ "Label Name": item.get("name"),
118
+ "Parent Label Name": item.get("parent", {}).get("name"),
119
+ "Label Description": item.get("tooltip"),
120
+ }
121
+ for key in label_keys
122
+ for item in result.get(key, [])
123
+ ]
124
+
125
+ df_labels = pd.DataFrame(rows)
126
+ return dfI.merge(df_labels, on="Id", how="left")
@@ -1,5 +1,5 @@
1
1
  import sempy.fabric as fabric
2
- from ._helper_functions import (
2
+ from sempy_labs._helper_functions import (
3
3
  resolve_workspace_name_and_id,
4
4
  create_relationship_name,
5
5
  format_dax_object_name,
@@ -1131,7 +1131,7 @@ def list_reports_using_semantic_model(
1131
1131
  dataset: str | UUID, workspace: Optional[str | UUID] = None
1132
1132
  ) -> pd.DataFrame:
1133
1133
  """
1134
- Shows a list of all the reports (in all workspaces) which use a given semantic model.
1134
+ Shows a list of all the reports which use a given semantic model. This is limited to the reports which are in the same workspace as the semantic model.
1135
1135
 
1136
1136
  Parameters
1137
1137
  ----------
@@ -1,7 +1,7 @@
1
1
  import pandas as pd
2
2
  import sempy_labs._icons as icons
3
3
  from typing import Optional
4
- from ._helper_functions import (
4
+ from sempy_labs._helper_functions import (
5
5
  resolve_workspace_name_and_id,
6
6
  _is_valid_uuid,
7
7
  _base_api,
@@ -1,6 +1,6 @@
1
1
  import pandas as pd
2
2
  from typing import Optional
3
- from ._helper_functions import (
3
+ from sempy_labs._helper_functions import (
4
4
  resolve_workspace_name_and_id,
5
5
  _update_dataframe_datatypes,
6
6
  _base_api,
@@ -8,7 +8,7 @@ from ._helper_functions import (
8
8
  _create_dataframe,
9
9
  delete_item,
10
10
  create_item,
11
- get_item_definition,
11
+ _get_item_definition,
12
12
  resolve_workspace_id,
13
13
  )
14
14
  import sempy_labs._icons as icons
@@ -91,6 +91,8 @@ def create_mirrored_database(
91
91
 
92
92
  This is a wrapper function for the following API: `Items - Create Mirrored Database <https://learn.microsoft.com/rest/api/fabric/mirroreddatabase/items/create-mirrored-database>`_.
93
93
 
94
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
95
+
94
96
  Parameters
95
97
  ----------
96
98
  name: str
@@ -117,6 +119,8 @@ def delete_mirrored_database(
117
119
 
118
120
  This is a wrapper function for the following API: `Items - Delete Mirrored Database <https://learn.microsoft.com/rest/api/fabric/mirroreddatabase/items/delete-mirrored-database>`_.
119
121
 
122
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
123
+
120
124
  Parameters
121
125
  ----------
122
126
  mirrored_database: str
@@ -139,6 +143,8 @@ def get_mirroring_status(
139
143
 
140
144
  This is a wrapper function for the following API: `Mirroring - Get Mirroring Status <https://learn.microsoft.com/rest/api/fabric/mirroreddatabase/mirroring/get-mirroring-status>`_.
141
145
 
146
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
147
+
142
148
  Parameters
143
149
  ----------
144
150
  mirrored_database: str | uuid.UUID
@@ -154,13 +160,14 @@ def get_mirroring_status(
154
160
  The status of a mirrored database.
155
161
  """
156
162
 
157
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
163
+ workspace_id = resolve_workspace_id(workspace)
158
164
  item_id = resolve_item_id(
159
165
  item=mirrored_database, type="MirroredDatabase", workspace=workspace
160
166
  )
161
167
  response = _base_api(
162
168
  request=f"/v1/workspaces/{workspace_id}/mirroredDatabases/{item_id}/getMirroringStatus",
163
169
  status_codes=200,
170
+ client="fabric_sp",
164
171
  )
165
172
 
166
173
  return response.json().get("status", {})
@@ -175,6 +182,8 @@ def get_tables_mirroring_status(
175
182
 
176
183
  This is a wrapper function for the following API: `Mirroring - Get Tables Mirroring Status <https://learn.microsoft.com/rest/api/fabric/mirroreddatabase/mirroring/get-tables-mirroring-status>`_.
177
184
 
185
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
186
+
178
187
  Parameters
179
188
  ----------
180
189
  mirrored_database: str | uuid.UUID
@@ -190,7 +199,7 @@ def get_tables_mirroring_status(
190
199
  A pandas dataframe showing the mirroring status of the tables.
191
200
  """
192
201
 
193
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
202
+ workspace_id = resolve_workspace_id(workspace)
194
203
  item_id = resolve_item_id(
195
204
  item=mirrored_database, type="MirroredDatabase", workspace=workspace
196
205
  )
@@ -199,6 +208,7 @@ def get_tables_mirroring_status(
199
208
  method="post",
200
209
  status_codes=200,
201
210
  uses_pagination=True,
211
+ client="fabric_sp",
202
212
  )
203
213
 
204
214
  columns = {
@@ -211,21 +221,24 @@ def get_tables_mirroring_status(
211
221
  }
212
222
  df = _create_dataframe(columns=columns)
213
223
 
224
+ rows = []
214
225
  for r in responses:
215
226
  for v in r.get("data", []):
216
227
  m = v.get("metrics", {})
217
- new_data = {
218
- "Source Schema Name": v.get("sourceSchemaName"),
219
- "Source Table Name": v.get("sourceTableName"),
220
- "Status": v.get("status"),
221
- "Processed Bytes": m.get("processedBytes"),
222
- "Processed Rows": m.get("processedRows"),
223
- "Last Sync Date": m.get("lastSyncDateTime"),
224
- }
225
-
226
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
228
+ rows.append(
229
+ {
230
+ "Source Schema Name": v.get("sourceSchemaName"),
231
+ "Source Table Name": v.get("sourceTableName"),
232
+ "Status": v.get("status"),
233
+ "Processed Bytes": m.get("processedBytes"),
234
+ "Processed Rows": m.get("processedRows"),
235
+ "Last Sync Date": m.get("lastSyncDateTime"),
236
+ }
237
+ )
227
238
 
228
- _update_dataframe_datatypes(dataframe=df, column_map=columns)
239
+ if rows:
240
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
241
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
229
242
 
230
243
  return df
231
244
 
@@ -239,6 +252,8 @@ def start_mirroring(
239
252
 
240
253
  This is a wrapper function for the following API: `Mirroring - Start Mirroring <https://learn.microsoft.com/rest/api/fabric/mirroreddatabase/mirroring/start-mirroring>`_.
241
254
 
255
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
256
+
242
257
  Parameters
243
258
  ----------
244
259
  mirrored_database: str | uuid.UUID
@@ -257,6 +272,7 @@ def start_mirroring(
257
272
  request=f"/v1/workspaces/{workspace_id}/mirroredDatabases/{item_id}/startMirroring",
258
273
  method="post",
259
274
  status_codes=200,
275
+ client="fabric_sp",
260
276
  )
261
277
 
262
278
  print(
@@ -273,6 +289,8 @@ def stop_mirroring(
273
289
 
274
290
  This is a wrapper function for the following API: `Mirroring - Stop Mirroring <https://learn.microsoft.com/rest/api/fabric/mirroreddatabase/mirroring/stop-mirroring>`_.
275
291
 
292
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
293
+
276
294
  Parameters
277
295
  ----------
278
296
  mirrored_database: str | uuid.UUID
@@ -291,6 +309,7 @@ def stop_mirroring(
291
309
  request=f"/v1/workspaces/{workspace_id}/mirroredDatabases/{item_id}/stopMirroring",
292
310
  method="post",
293
311
  status_codes=200,
312
+ client="fabric_sp",
294
313
  )
295
314
 
296
315
  print(
@@ -309,6 +328,8 @@ def get_mirrored_database_definition(
309
328
 
310
329
  This is a wrapper function for the following API: `Items - Get Mirrored Database Definition <https://learn.microsoft.com/rest/api/fabric/mirroreddatabase/items/get-mirrored-database-definition>`_.
311
330
 
331
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
332
+
312
333
  Parameters
313
334
  ----------
314
335
  mirrored_database : str | uuid.UUID
@@ -327,7 +348,7 @@ def get_mirrored_database_definition(
327
348
  The mirrored database definition.
328
349
  """
329
350
 
330
- return get_item_definition(
351
+ return _get_item_definition(
331
352
  item=mirrored_database,
332
353
  type="MirroredDatabase",
333
354
  workspace=workspace,
@@ -345,6 +366,8 @@ def update_mirrored_database_definition(
345
366
  """
346
367
  Updates an existing notebook with a new definition.
347
368
 
369
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
370
+
348
371
  Parameters
349
372
  ----------
350
373
  mirrored_database : str | uuid.UUID
@@ -383,6 +406,7 @@ def update_mirrored_database_definition(
383
406
  json=request_body,
384
407
  status_codes=None,
385
408
  lro_return_status_code=True,
409
+ client="fabric_sp",
386
410
  )
387
411
 
388
412
  print(
@@ -1,6 +1,6 @@
1
1
  import pandas as pd
2
2
  from typing import Optional
3
- from ._helper_functions import (
3
+ from sempy_labs._helper_functions import (
4
4
  resolve_workspace_id,
5
5
  _base_api,
6
6
  _create_dataframe,
@@ -1,6 +1,6 @@
1
1
  import pandas as pd
2
2
  from typing import Optional
3
- from ._helper_functions import (
3
+ from sempy_labs._helper_functions import (
4
4
  resolve_workspace_id,
5
5
  _base_api,
6
6
  delete_item,
sempy_labs/_model_bpa.py CHANGED
@@ -3,8 +3,8 @@ import pandas as pd
3
3
  import warnings
4
4
  import datetime
5
5
  from IPython.display import display, HTML
6
- from ._model_dependencies import get_model_calc_dependencies
7
- from ._helper_functions import (
6
+ from sempy_labs._model_dependencies import get_model_calc_dependencies
7
+ from sempy_labs._helper_functions import (
8
8
  format_dax_object_name,
9
9
  create_relationship_name,
10
10
  save_as_delta_table,
@@ -15,9 +15,9 @@ from ._helper_functions import (
15
15
  resolve_workspace_name_and_id,
16
16
  _create_spark_session,
17
17
  )
18
- from .lakehouse import get_lakehouse_tables, lakehouse_attached
19
- from .tom import connect_semantic_model
20
- from ._model_bpa_rules import model_bpa_rules
18
+ from sempy_labs.lakehouse import get_lakehouse_tables, lakehouse_attached
19
+ from sempy_labs.tom import connect_semantic_model
20
+ from sempy_labs._model_bpa_rules import model_bpa_rules
21
21
  from typing import Optional
22
22
  from sempy._utils._log import log
23
23
  import sempy_labs._icons as icons
@@ -391,7 +391,7 @@ def run_model_bpa(
391
391
  lakeT = get_lakehouse_tables()
392
392
  lakeT_filt = lakeT[lakeT["Table Name"] == delta_table_name]
393
393
 
394
- dfExport["Severity"].replace(icons.severity_mapping, inplace=True)
394
+ dfExport["Severity"] = dfExport["Severity"].replace(icons.severity_mapping)
395
395
 
396
396
  if len(lakeT_filt) == 0:
397
397
  runId = 1
@@ -1,7 +1,7 @@
1
1
  import sempy.fabric as fabric
2
2
  import pandas as pd
3
3
  import datetime
4
- from ._helper_functions import (
4
+ from sempy_labs._helper_functions import (
5
5
  save_as_delta_table,
6
6
  resolve_workspace_capacity,
7
7
  retry,
@@ -9,11 +9,11 @@ from ._helper_functions import (
9
9
  resolve_workspace_id,
10
10
  resolve_lakehouse_name_and_id,
11
11
  )
12
- from .lakehouse import (
12
+ from sempy_labs.lakehouse import (
13
13
  get_lakehouse_tables,
14
14
  lakehouse_attached,
15
15
  )
16
- from ._model_bpa import run_model_bpa
16
+ from sempy_labs._model_bpa import run_model_bpa
17
17
  from typing import Optional, List
18
18
  from sempy._utils._log import log
19
19
  import sempy_labs._icons as icons
@@ -1,6 +1,6 @@
1
1
  import sempy.fabric as fabric
2
2
  import pandas as pd
3
- from ._helper_functions import (
3
+ from sempy_labs._helper_functions import (
4
4
  format_dax_object_name,
5
5
  resolve_dataset_name_and_id,
6
6
  resolve_workspace_name_and_id,
@@ -1,11 +1,11 @@
1
1
  import pandas as pd
2
2
  from typing import Optional
3
- from ._helper_functions import (
3
+ from sempy_labs._helper_functions import (
4
4
  resolve_workspace_id,
5
5
  _base_api,
6
6
  _create_dataframe,
7
7
  _update_dataframe_datatypes,
8
- get_item_definition,
8
+ _get_item_definition,
9
9
  delete_item,
10
10
  )
11
11
 
@@ -91,7 +91,7 @@ def get_mounted_data_factory_definition(
91
91
  The 'mountedDataFactory-content.json' file from the mounted data factory definition.
92
92
  """
93
93
 
94
- return get_item_definition(
94
+ return _get_item_definition(
95
95
  item=mounted_data_factory,
96
96
  type="MountedDataFactory",
97
97
  workspace=workspace,
sempy_labs/_notebooks.py CHANGED
@@ -1,21 +1,23 @@
1
1
  import sempy.fabric as fabric
2
2
  import pandas as pd
3
3
  import sempy_labs._icons as icons
4
- from typing import Optional
4
+ from typing import Optional, List
5
5
  import base64
6
6
  import requests
7
7
  from sempy._utils._log import log
8
- from ._helper_functions import (
8
+ from sempy_labs._helper_functions import (
9
9
  resolve_workspace_name_and_id,
10
10
  resolve_workspace_id,
11
11
  _decode_b64,
12
12
  _base_api,
13
13
  resolve_item_id,
14
14
  create_item,
15
+ _create_dataframe,
15
16
  )
16
17
  from sempy.fabric.exceptions import FabricHTTPException
17
- import os
18
+ from os import PathLike
18
19
  from uuid import UUID
20
+ import os
19
21
 
20
22
  _notebook_prefix = "notebook-content."
21
23
 
@@ -114,6 +116,7 @@ def import_notebook_from_web(
114
116
  description: Optional[str] = None,
115
117
  workspace: Optional[str | UUID] = None,
116
118
  overwrite: bool = False,
119
+ folder: Optional[str | PathLike] = None,
117
120
  ):
118
121
  """
119
122
  Creates a new notebook within a workspace based on a Jupyter notebook hosted in the web.
@@ -136,6 +139,9 @@ def import_notebook_from_web(
136
139
  or if no lakehouse attached, resolves to the workspace of the notebook.
137
140
  overwrite : bool, default=False
138
141
  If set to True, overwrites the existing notebook in the workspace if it exists.
142
+ folder : str | os.PathLike, default=None
143
+ The folder within the workspace where the notebook will be created.
144
+ Defaults to None which places the notebook in the root of the workspace.
139
145
  """
140
146
 
141
147
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
@@ -161,6 +167,7 @@ def import_notebook_from_web(
161
167
  workspace=workspace_id,
162
168
  description=description,
163
169
  format="ipynb",
170
+ folder=folder,
164
171
  )
165
172
  elif len(dfI_filt) > 0 and overwrite:
166
173
  print(f"{icons.info} Overwrite of notebooks is currently not supported.")
@@ -181,6 +188,7 @@ def create_notebook(
181
188
  description: Optional[str] = None,
182
189
  workspace: Optional[str | UUID] = None,
183
190
  format: Optional[str] = None,
191
+ folder: Optional[str | PathLike] = None,
184
192
  ):
185
193
  """
186
194
  Creates a new notebook with a definition within a workspace.
@@ -203,6 +211,9 @@ def create_notebook(
203
211
  format : str, default=None
204
212
  If 'ipynb' is provided than notebook_content should be standard ipynb format
205
213
  otherwise notebook_content should be GIT friendly format
214
+ folder : str | os.PathLike, default=None
215
+ The folder within the workspace where the notebook will be created.
216
+ Defaults to None which places the notebook in the root of the workspace.
206
217
  """
207
218
 
208
219
  notebook_payload = base64.b64encode(notebook_content).decode("utf-8")
@@ -226,6 +237,7 @@ def create_notebook(
226
237
  workspace=workspace,
227
238
  description=description,
228
239
  definition=definition_payload,
240
+ folder=folder,
229
241
  )
230
242
 
231
243
 
@@ -287,3 +299,141 @@ def update_notebook_definition(
287
299
  print(
288
300
  f"{icons.green_dot} The '{name}' notebook was updated within the '{workspace_name}' workspace."
289
301
  )
302
+
303
+
304
+ @log
305
+ def list_notebooks(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
306
+ """
307
+ Shows the notebooks within a workspace.
308
+
309
+ Parameters
310
+ ----------
311
+ workspace : str | uuid.UUID, default=None
312
+ The Fabric workspace name or ID.
313
+ Defaults to None which resolves to the workspace of the attached lakehouse
314
+ or if no lakehouse attached, resolves to the workspace of the notebook.
315
+
316
+ Returns
317
+ -------
318
+ pandas.DataFrame
319
+ A pandas dataframe showing the notebooks within a workspace.
320
+ """
321
+
322
+ columns = {
323
+ "Notebook Id": "string",
324
+ "Notebook Name": "string",
325
+ "Description": "string",
326
+ }
327
+ df = _create_dataframe(columns=columns)
328
+
329
+ workspace_id = resolve_workspace_id(workspace)
330
+
331
+ responses = _base_api(
332
+ request=f"/v1/workspaces/{workspace_id}/notebooks", uses_pagination=True
333
+ )
334
+
335
+ rows = []
336
+ for r in responses:
337
+ for v in r.get("value", []):
338
+ rows.append(
339
+ {
340
+ "Notebook Id": v.get("id"),
341
+ "Notebook Name": v.get("displayName"),
342
+ "Description": v.get("description"),
343
+ }
344
+ )
345
+
346
+ if rows:
347
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
348
+
349
+ return df
350
+
351
+
352
+ @log
353
+ def search_notebooks(
354
+ search_string: str,
355
+ notebook: Optional[str | UUID] = None,
356
+ workspace: Optional[str | UUID | List[str | UUID]] = None,
357
+ ) -> pd.DataFrame:
358
+ """
359
+ Searches notebooks within a workspace or across multiple workspaces for a given search string.
360
+
361
+ Parameters
362
+ ----------
363
+ search_string : str
364
+ The string to search for within the notebook definitions.
365
+ notebook : str | uuid.UUID, default=None
366
+ The name or ID of a specific notebook to search within.
367
+ Defaults to None which searches across all notebooks in the specified workspace(s).
368
+ workspace : str | uuid.UUID | list, default=None
369
+ The name or ID of the workspace or a list of workspaces to search within.
370
+ Defaults to None which resolves to the workspace of the attached lakehouse
371
+ or if no lakehouse attached, resolves to the workspace of the notebook.
372
+ If a list is provided, it should contain workspace names or IDs.
373
+
374
+ Returns
375
+ -------
376
+ pandas.DataFrame
377
+ A pandas dataframe showing the notebooks that contain the search string in their definitions.
378
+ The dataframe includes the workspace name, workspace ID, notebook name, and notebook ID.
379
+ """
380
+
381
+ if not workspace:
382
+ workspace_id = resolve_workspace_id(workspace)
383
+ workspace_ids = [workspace_id]
384
+ elif isinstance(workspace, str):
385
+ workspace_id = resolve_workspace_id(workspace)
386
+ workspace_ids = [workspace_id]
387
+ elif isinstance(workspace, list):
388
+ workspace_ids = [resolve_workspace_id(ws) for ws in workspace]
389
+ else:
390
+ raise ValueError(
391
+ "Workspace must be a string, UUID, or a list of strings/UUIDs."
392
+ )
393
+
394
+ dfW = fabric.list_workspaces()
395
+ dfW_filt = dfW[dfW["Id"].isin(workspace_ids)]
396
+
397
+ columns = {
398
+ "Workspace Name": "string",
399
+ "Workspace Id": "string",
400
+ "Notebook Name": "string",
401
+ "Notebook Id": "string",
402
+ }
403
+ df = _create_dataframe(columns=columns)
404
+
405
+ rows = []
406
+ for _, r in dfW_filt.iterrows():
407
+ w_id = r["Id"]
408
+ w_name = r["Name"]
409
+ dfN = list_notebooks(workspace=w_id)
410
+ if notebook is not None:
411
+ item_id = resolve_item_id(item=notebook, type="Notebook", workspace=w_id)
412
+ dfN = dfN[dfN["Notebook Id"] == item_id]
413
+ for _, n in dfN.iterrows():
414
+ notebook_id = n["Notebook Id"]
415
+ notebook_name = n["Notebook Name"]
416
+ definition = _base_api(
417
+ request=f"v1/workspaces/{w_id}/notebooks/{notebook_id}/getDefinition",
418
+ method="post",
419
+ client="fabric_sp",
420
+ status_codes=None,
421
+ lro_return_json=True,
422
+ )
423
+ for part in definition.get("definition").get("parts"):
424
+ payload = _decode_b64(part["payload"])
425
+ if part["path"] == "notebook-content.py":
426
+ if search_string in payload:
427
+ rows.append(
428
+ {
429
+ "Workspace Name": w_name,
430
+ "Workspace Id": w_id,
431
+ "Notebook Name": notebook_name,
432
+ "Notebook Id": notebook_id,
433
+ }
434
+ )
435
+
436
+ if rows:
437
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
438
+
439
+ return df
@@ -1,6 +1,6 @@
1
1
  import sempy.fabric as fabric
2
2
  import pandas as pd
3
- from ._helper_functions import (
3
+ from sempy_labs._helper_functions import (
4
4
  resolve_workspace_name_and_id,
5
5
  resolve_dataset_name_and_id,
6
6
  _update_dataframe_datatypes,
@@ -406,7 +406,7 @@ def set_workspace_default_storage_format(
406
406
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
407
407
 
408
408
  # Check current storage format
409
- dfW = fabric.list_workspaces(filter=f"name eq '{workspace_name}'")
409
+ dfW = fabric.list_workspaces(filter=f"id eq '{workspace_id}'")
410
410
  if len(dfW) == 0:
411
411
  raise ValueError()
412
412
  current_storage_format = dfW["Default Dataset Storage Format"].iloc[0]