semantic-link-labs 0.8.9__py3-none-any.whl → 0.8.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

@@ -7,23 +7,25 @@ from sempy_labs._helper_functions import (
7
7
  pagination,
8
8
  resolve_item_type,
9
9
  format_dax_object_name,
10
+ resolve_dataset_name_and_id,
10
11
  )
11
12
  import pandas as pd
12
13
  from typing import Optional
13
14
  import sempy_labs._icons as icons
14
15
  from sempy.fabric.exceptions import FabricHTTPException
16
+ from uuid import UUID
15
17
 
16
18
 
17
19
  def get_object_level_security(
18
- dataset: str, workspace: Optional[str] = None
20
+ dataset: str | UUID, workspace: Optional[str] = None
19
21
  ) -> pd.DataFrame:
20
22
  """
21
23
  Shows the object level security for the semantic model.
22
24
 
23
25
  Parameters
24
26
  ----------
25
- dataset : str
26
- Name of the semantic model.
27
+ dataset : str | UUID
28
+ Name or ID of the semantic model.
27
29
  workspace : str, default=None
28
30
  The Fabric workspace name.
29
31
  Defaults to None which resolves to the workspace of the attached lakehouse
@@ -37,12 +39,13 @@ def get_object_level_security(
37
39
 
38
40
  from sempy_labs.tom import connect_semantic_model
39
41
 
40
- workspace = fabric.resolve_workspace_name(workspace)
42
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
43
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
41
44
 
42
45
  df = pd.DataFrame(columns=["Role Name", "Object Type", "Table Name", "Object Name"])
43
46
 
44
47
  with connect_semantic_model(
45
- dataset=dataset, readonly=True, workspace=workspace
48
+ dataset=dataset_id, readonly=True, workspace=workspace_id
46
49
  ) as tom:
47
50
 
48
51
  for r in tom.model.Roles:
@@ -82,15 +85,15 @@ def get_object_level_security(
82
85
 
83
86
 
84
87
  def list_tables(
85
- dataset: str, workspace: Optional[str] = None, extended: bool = False
88
+ dataset: str | UUID, workspace: Optional[str] = None, extended: bool = False
86
89
  ) -> pd.DataFrame:
87
90
  """
88
91
  Shows a semantic model's tables and their properties.
89
92
 
90
93
  Parameters
91
94
  ----------
92
- dataset : str
93
- Name of the semantic model.
95
+ dataset : str | UUID
96
+ Name or ID of the semantic model.
94
97
  workspace : str, default=None
95
98
  The Fabric workspace name.
96
99
  Defaults to None which resolves to the workspace of the attached lakehouse
@@ -106,7 +109,8 @@ def list_tables(
106
109
 
107
110
  from sempy_labs.tom import connect_semantic_model
108
111
 
109
- workspace = fabric.resolve_workspace_name(workspace)
112
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
113
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
110
114
 
111
115
  df = pd.DataFrame(
112
116
  columns=[
@@ -121,20 +125,20 @@ def list_tables(
121
125
  )
122
126
 
123
127
  with connect_semantic_model(
124
- dataset=dataset, workspace=workspace, readonly=True
128
+ dataset=dataset_id, workspace=workspace_id, readonly=True
125
129
  ) as tom:
126
130
  if extended:
127
131
  dict_df = fabric.evaluate_dax(
128
- dataset=dataset,
129
- workspace=workspace,
132
+ dataset=dataset_id,
133
+ workspace=workspace_id,
130
134
  dax_string="""
131
135
  EVALUATE SELECTCOLUMNS(FILTER(INFO.STORAGETABLECOLUMNS(), [COLUMN_TYPE] = "BASIC_DATA"),[DIMENSION_NAME],[DICTIONARY_SIZE])
132
136
  """,
133
137
  )
134
138
  dict_sum = dict_df.groupby("[DIMENSION_NAME]")["[DICTIONARY_SIZE]"].sum()
135
139
  data = fabric.evaluate_dax(
136
- dataset=dataset,
137
- workspace=workspace,
140
+ dataset=dataset_id,
141
+ workspace=workspace_id,
138
142
  dax_string="""EVALUATE SELECTCOLUMNS(INFO.STORAGETABLECOLUMNSEGMENTS(),[TABLE_ID],[DIMENSION_NAME],[USED_SIZE])""",
139
143
  )
140
144
  data_sum = (
@@ -162,8 +166,8 @@ def list_tables(
162
166
  .sum()
163
167
  )
164
168
  rc = fabric.evaluate_dax(
165
- dataset=dataset,
166
- workspace=workspace,
169
+ dataset=dataset_id,
170
+ workspace=workspace_id,
167
171
  dax_string="""
168
172
  SELECT [DIMENSION_NAME],[ROWS_COUNT] FROM $SYSTEM.DISCOVER_STORAGE_TABLES
169
173
  WHERE RIGHT ( LEFT ( TABLE_ID, 2 ), 1 ) <> '$'
@@ -616,11 +620,7 @@ def list_dashboards(workspace: Optional[str] = None) -> pd.DataFrame:
616
620
  ]
617
621
  )
618
622
 
619
- if workspace == "None":
620
- workspace_id = fabric.get_workspace_id()
621
- workspace = fabric.resovle_workspace_name(workspace_id)
622
- else:
623
- workspace_id = fabric.resolve_workspace_id(workspace)
623
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
624
624
 
625
625
  client = fabric.PowerBIRestClient()
626
626
  response = client.get(f"/v1.0/myorg/groups/{workspace_id}/dashboards")
@@ -635,8 +635,8 @@ def list_dashboards(workspace: Optional[str] = None) -> pd.DataFrame:
635
635
  "Web URL": v.get("webUrl"),
636
636
  "Embed URL": v.get("embedUrl"),
637
637
  "Data Classification": v.get("dataClassification"),
638
- "Users": [v.get("users")],
639
- "Subscriptions": [v.get("subscriptions")],
638
+ "Users": v.get("users"),
639
+ "Subscriptions": v.get("subscriptions"),
640
640
  }
641
641
  df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
642
642
 
@@ -854,15 +854,15 @@ def update_item(
854
854
 
855
855
 
856
856
  def list_relationships(
857
- dataset: str, workspace: Optional[str] = None, extended: bool = False
857
+ dataset: str | UUID, workspace: Optional[str] = None, extended: bool = False
858
858
  ) -> pd.DataFrame:
859
859
  """
860
860
  Shows a semantic model's relationships and their properties.
861
861
 
862
862
  Parameters
863
863
  ----------
864
- dataset: str
865
- Name of the semantic model.
864
+ dataset: str | UUID
865
+ Name or UUID of the semantic model.
866
866
  workspace : str, default=None
867
867
  The Fabric workspace name.
868
868
  Defaults to None which resolves to the workspace of the attached lakehouse
@@ -876,17 +876,18 @@ def list_relationships(
876
876
  A pandas dataframe showing the object level security for the semantic model.
877
877
  """
878
878
 
879
- workspace = fabric.resolve_workspace_name(workspace)
879
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
880
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
880
881
 
881
- dfR = fabric.list_relationships(dataset=dataset, workspace=workspace)
882
+ dfR = fabric.list_relationships(dataset=dataset_id, workspace=workspace_id)
882
883
  dfR["From Object"] = format_dax_object_name(dfR["From Table"], dfR["From Column"])
883
884
  dfR["To Object"] = format_dax_object_name(dfR["To Table"], dfR["To Column"])
884
885
 
885
886
  if extended:
886
887
  # Used to map the Relationship IDs
887
888
  rel = fabric.evaluate_dax(
888
- dataset=dataset,
889
- workspace=workspace,
889
+ dataset=dataset_id,
890
+ workspace=workspace_id,
890
891
  dax_string="""
891
892
  SELECT
892
893
  [ID] AS [RelationshipID]
@@ -897,8 +898,8 @@ def list_relationships(
897
898
 
898
899
  # USED_SIZE shows the Relationship Size where TABLE_ID starts with R$
899
900
  cs = fabric.evaluate_dax(
900
- dataset=dataset,
901
- workspace=workspace,
901
+ dataset=dataset_id,
902
+ workspace=workspace_id,
902
903
  dax_string="""
903
904
  SELECT
904
905
  [TABLE_ID]
@@ -1574,3 +1575,148 @@ def list_semantic_model_object_report_usage(
1574
1575
  final_df.reset_index(drop=True, inplace=True)
1575
1576
 
1576
1577
  return final_df
1578
+
1579
+
1580
+ def list_server_properties(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
1581
+ """
1582
+ Lists the `properties <https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.serverproperty?view=analysisservices-dotnet>`_ of the Analysis Services instance.
1583
+
1584
+ Parameters
1585
+ ----------
1586
+ workspace : str, default=None
1587
+ The Fabric workspace name.
1588
+ Defaults to None which resolves to the workspace of the attached lakehouse
1589
+ or if no lakehouse attached, resolves to the workspace of the notebook.
1590
+
1591
+ Returns
1592
+ -------
1593
+ pandas.DataFrame
1594
+ A pandas dataframe showing a list of the server properties.
1595
+ """
1596
+
1597
+ tom_server = fabric.create_tom_server(readonly=True, workspace=workspace)
1598
+
1599
+ rows = [
1600
+ {
1601
+ "Name": sp.Name,
1602
+ "Value": sp.Value,
1603
+ "Default Value": sp.DefaultValue,
1604
+ "Is Read Only": sp.IsReadOnly,
1605
+ "Requires Restart": sp.RequiresRestart,
1606
+ "Units": sp.Units,
1607
+ "Category": sp.Category,
1608
+ }
1609
+ for sp in tom_server.ServerProperties
1610
+ ]
1611
+
1612
+ tom_server.Dispose()
1613
+ df = pd.DataFrame(rows)
1614
+
1615
+ bool_cols = ["Is Read Only", "Requires Restart"]
1616
+ df[bool_cols] = df[bool_cols].astype(bool)
1617
+
1618
+ return df
1619
+
1620
+
1621
+ def list_semantic_model_errors(
1622
+ dataset: str | UUID, workspace: Optional[str | UUID]
1623
+ ) -> pd.DataFrame:
1624
+ """
1625
+ Shows a list of a semantic model's errors and their error messages (if they exist).
1626
+
1627
+ Parameters
1628
+ ----------
1629
+ dataset : str | UUID
1630
+ Name or ID of the semantic model.
1631
+ workspace : str | UUID, default=None
1632
+ The Fabric workspace name or ID.
1633
+ Defaults to None which resolves to the workspace of the attached lakehouse
1634
+ or if no lakehouse attached, resolves to the workspace of the notebook.
1635
+
1636
+ Returns
1637
+ -------
1638
+ pandas.DataFrame
1639
+ A pandas dataframe showing a list of the errors and error messages for a given semantic model.
1640
+ """
1641
+
1642
+ from sempy_labs.tom import connect_semantic_model
1643
+
1644
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
1645
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(
1646
+ dataset, workspace=workspace_id
1647
+ )
1648
+
1649
+ error_rows = []
1650
+
1651
+ with connect_semantic_model(
1652
+ dataset=dataset_id, workspace=workspace_id, readonly=True
1653
+ ) as tom:
1654
+ # Define mappings of TOM objects to object types and attributes
1655
+ error_checks = [
1656
+ ("Column", tom.all_columns, lambda o: o.ErrorMessage),
1657
+ ("Partition", tom.all_partitions, lambda o: o.ErrorMessage),
1658
+ (
1659
+ "Partition - Data Coverage Expression",
1660
+ tom.all_partitions,
1661
+ lambda o: (
1662
+ o.DataCoverageDefinition.ErrorMessage
1663
+ if o.DataCoverageDefinition
1664
+ else ""
1665
+ ),
1666
+ ),
1667
+ ("Row Level Security", tom.all_rls, lambda o: o.ErrorMessage),
1668
+ ("Calculation Item", tom.all_calculation_items, lambda o: o.ErrorMessage),
1669
+ ("Measure", tom.all_measures, lambda o: o.ErrorMessage),
1670
+ (
1671
+ "Measure - Detail Rows Expression",
1672
+ tom.all_measures,
1673
+ lambda o: (
1674
+ o.DetailRowsDefinition.ErrorMessage
1675
+ if o.DetailRowsDefinition
1676
+ else ""
1677
+ ),
1678
+ ),
1679
+ (
1680
+ "Measure - Format String Expression",
1681
+ tom.all_measures,
1682
+ lambda o: (
1683
+ o.FormatStringDefinition.ErrorMessage
1684
+ if o.FormatStringDefinition
1685
+ else ""
1686
+ ),
1687
+ ),
1688
+ (
1689
+ "Calculation Group - Multiple or Empty Selection Expression",
1690
+ tom.all_calculation_groups,
1691
+ lambda o: (
1692
+ o.CalculationGroup.MultipleOrEmptySelectionExpression.ErrorMessage
1693
+ if o.CalculationGroup.MultipleOrEmptySelectionExpression
1694
+ else ""
1695
+ ),
1696
+ ),
1697
+ (
1698
+ "Calculation Group - No Selection Expression",
1699
+ tom.all_calculation_groups,
1700
+ lambda o: (
1701
+ o.CalculationGroup.NoSelectionExpression.ErrorMessage
1702
+ if o.CalculationGroup.NoSelectionExpression
1703
+ else ""
1704
+ ),
1705
+ ),
1706
+ ]
1707
+
1708
+ # Iterate over all error checks
1709
+ for object_type, getter, error_extractor in error_checks:
1710
+ for obj in getter():
1711
+ error_message = error_extractor(obj)
1712
+ if error_message: # Only add rows if there's an error message
1713
+ error_rows.append(
1714
+ {
1715
+ "Object Type": object_type,
1716
+ "Table Name": obj.Parent.Name,
1717
+ "Object Name": obj.Name,
1718
+ "Error Message": error_message,
1719
+ }
1720
+ )
1721
+
1722
+ return pd.DataFrame(error_rows)
sempy_labs/_model_bpa.py CHANGED
@@ -10,9 +10,10 @@ from sempy_labs._helper_functions import (
10
10
  create_relationship_name,
11
11
  save_as_delta_table,
12
12
  resolve_workspace_capacity,
13
- resolve_dataset_id,
13
+ resolve_dataset_name_and_id,
14
14
  get_language_codes,
15
15
  _get_max_run_id,
16
+ resolve_workspace_name_and_id,
16
17
  )
17
18
  from sempy_labs.lakehouse import get_lakehouse_tables, lakehouse_attached
18
19
  from sempy_labs.tom import connect_semantic_model
@@ -23,11 +24,12 @@ import sempy_labs._icons as icons
23
24
  from pyspark.sql.functions import col, flatten
24
25
  from pyspark.sql.types import StructType, StructField, StringType
25
26
  import os
27
+ from uuid import UUID
26
28
 
27
29
 
28
30
  @log
29
31
  def run_model_bpa(
30
- dataset: str,
32
+ dataset: str | UUID,
31
33
  rules: Optional[pd.DataFrame] = None,
32
34
  workspace: Optional[str] = None,
33
35
  export: bool = False,
@@ -41,8 +43,8 @@ def run_model_bpa(
41
43
 
42
44
  Parameters
43
45
  ----------
44
- dataset : str
45
- Name of the semantic model.
46
+ dataset : str | UUID
47
+ Name or ID of the semantic model.
46
48
  rules : pandas.DataFrame, default=None
47
49
  A pandas dataframe containing rules to be evaluated.
48
50
  workspace : str, default=None
@@ -105,28 +107,27 @@ def run_model_bpa(
105
107
  if language is not None:
106
108
  language = map_language(language, language_list)
107
109
 
108
- workspace = fabric.resolve_workspace_name(workspace)
110
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
111
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(
112
+ dataset, workspace=workspace_id
113
+ )
109
114
 
110
115
  if language is not None and language not in language_list:
111
116
  print(
112
117
  f"{icons.yellow_dot} The '{language}' language code is not in our predefined language list. Please file an issue and let us know which language code you are using: https://github.com/microsoft/semantic-link-labs/issues/new?assignees=&labels=&projects=&template=bug_report.md&title=."
113
118
  )
114
119
 
115
- if extended:
116
- icons.sll_tags.append("ModelBPAExtended")
117
- with connect_semantic_model(
118
- dataset=dataset, workspace=workspace, readonly=False
119
- ) as tom:
120
- tom.set_vertipaq_annotations()
121
-
122
120
  with connect_semantic_model(
123
- dataset=dataset, workspace=workspace, readonly=True
121
+ dataset=dataset_id, workspace=workspace_id, readonly=True
124
122
  ) as tom:
125
123
 
124
+ if extended:
125
+ tom.set_vertipaq_annotations()
126
+
126
127
  # Do not run BPA for models with no tables
127
128
  if tom.model.Tables.Count == 0:
128
129
  print(
129
- f"{icons.warning} The '{dataset}' semantic model within the '{workspace}' workspace has no tables and therefore there are no valid BPA results."
130
+ f"{icons.warning} The '{dataset_name}' semantic model within the '{workspace_name}' workspace has no tables and therefore there are no valid BPA results."
130
131
  )
131
132
  finalDF = pd.DataFrame(
132
133
  columns=[
@@ -140,7 +141,9 @@ def run_model_bpa(
140
141
  ]
141
142
  )
142
143
  else:
143
- dep = get_model_calc_dependencies(dataset=dataset, workspace=workspace)
144
+ dep = get_model_calc_dependencies(
145
+ dataset=dataset_id, workspace=workspace_id
146
+ )
144
147
 
145
148
  def translate_using_po(rule_file):
146
149
  current_dir = os.path.dirname(os.path.abspath(__file__))
@@ -386,20 +389,19 @@ def run_model_bpa(
386
389
  runId = max_run_id + 1
387
390
 
388
391
  now = datetime.datetime.now()
389
- dfD = fabric.list_datasets(workspace=workspace, mode="rest")
390
- dfD_filt = dfD[dfD["Dataset Name"] == dataset]
392
+ dfD = fabric.list_datasets(workspace=workspace_id, mode="rest")
393
+ dfD_filt = dfD[dfD["Dataset Id"] == dataset_id]
391
394
  configured_by = dfD_filt["Configured By"].iloc[0]
392
- capacity_id, capacity_name = resolve_workspace_capacity(workspace=workspace)
395
+ capacity_id, capacity_name = resolve_workspace_capacity(workspace=workspace_id)
393
396
  dfExport["Capacity Name"] = capacity_name
394
397
  dfExport["Capacity Id"] = capacity_id
395
- dfExport["Workspace Name"] = workspace
396
- dfExport["Workspace Id"] = fabric.resolve_workspace_id(workspace)
397
- dfExport["Dataset Name"] = dataset
398
- dfExport["Dataset Id"] = resolve_dataset_id(dataset, workspace)
398
+ dfExport["Workspace Name"] = workspace_name
399
+ dfExport["Workspace Id"] = workspace_id
400
+ dfExport["Dataset Name"] = dataset_name
401
+ dfExport["Dataset Id"] = dataset_id
399
402
  dfExport["Configured By"] = configured_by
400
403
  dfExport["Timestamp"] = now
401
404
  dfExport["RunId"] = runId
402
- dfExport["Configured By"] = configured_by
403
405
  dfExport["RunId"] = dfExport["RunId"].astype("int")
404
406
 
405
407
  dfExport = dfExport[list(icons.bpa_schema.keys())]
@@ -119,16 +119,16 @@ def run_model_bpa_bulk(
119
119
  dfD_filt = dfD[~dfD["Dataset Name"].isin(skip_models)]
120
120
 
121
121
  if len(dfD_filt) > 0:
122
- for i2, r2 in dfD_filt.iterrows():
122
+ for _, r2 in dfD_filt.iterrows():
123
+ dataset_id = r2["Dataset Id"]
123
124
  dataset_name = r2["Dataset Name"]
124
125
  config_by = r2["Configured By"]
125
- dataset_id = r2["Dataset Id"]
126
126
  print(
127
127
  f"{icons.in_progress} Collecting Model BPA stats for the '{dataset_name}' semantic model within the '{wksp}' workspace."
128
128
  )
129
129
  try:
130
130
  bpa_df = run_model_bpa(
131
- dataset=dataset_name,
131
+ dataset=dataset_id,
132
132
  workspace=wksp,
133
133
  language=language,
134
134
  return_dataframe=True,
@@ -1,10 +1,15 @@
1
1
  import sempy.fabric as fabric
2
2
  import pandas as pd
3
- from sempy_labs._helper_functions import format_dax_object_name
3
+ from sempy_labs._helper_functions import (
4
+ format_dax_object_name,
5
+ resolve_dataset_name_and_id,
6
+ resolve_workspace_name_and_id,
7
+ )
4
8
  import sempy_labs._icons as icons
5
9
  from typing import Any, Dict, Optional
6
10
  from anytree import Node, RenderTree
7
11
  from sempy._utils._log import log
12
+ from uuid import UUID
8
13
 
9
14
 
10
15
  @log
@@ -139,15 +144,15 @@ def get_measure_dependencies(
139
144
 
140
145
  @log
141
146
  def get_model_calc_dependencies(
142
- dataset: str, workspace: Optional[str] = None
147
+ dataset: str | UUID, workspace: Optional[str] = None
143
148
  ) -> pd.DataFrame:
144
149
  """
145
150
  Shows all dependencies for all objects in a semantic model.
146
151
 
147
152
  Parameters
148
153
  ----------
149
- dataset : str
150
- Name of the semantic model.
154
+ dataset : str | UUID
155
+ Name or ID of the semantic model.
151
156
  workspace : str, default=None
152
157
  The Fabric workspace name.
153
158
  Defaults to None which resolves to the workspace of the attached lakehouse
@@ -159,10 +164,11 @@ def get_model_calc_dependencies(
159
164
  Shows all dependencies for all objects in the semantic model.
160
165
  """
161
166
 
162
- workspace = fabric.resolve_workspace_name(workspace)
167
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
168
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
163
169
  dep = fabric.evaluate_dax(
164
- dataset=dataset,
165
- workspace=workspace,
170
+ dataset=dataset_id,
171
+ workspace=workspace_id,
166
172
  dax_string="""
167
173
  SELECT
168
174
  [TABLE] AS [Table Name],
sempy_labs/_notebooks.py CHANGED
@@ -10,6 +10,42 @@ from sempy_labs._helper_functions import (
10
10
  _decode_b64,
11
11
  )
12
12
  from sempy.fabric.exceptions import FabricHTTPException
13
+ import os
14
+
15
+ _notebook_prefix = "notebook-content."
16
+
17
+
18
+ def _get_notebook_definition_base(
19
+ notebook_name: str, workspace: Optional[str] = None
20
+ ) -> pd.DataFrame:
21
+
22
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
23
+ item_id = fabric.resolve_item_id(
24
+ item_name=notebook_name, type="Notebook", workspace=workspace
25
+ )
26
+ client = fabric.FabricRestClient()
27
+ response = client.post(
28
+ f"v1/workspaces/{workspace_id}/notebooks/{item_id}/getDefinition",
29
+ )
30
+
31
+ result = lro(client, response).json()
32
+
33
+ return pd.json_normalize(result["definition"]["parts"])
34
+
35
+
36
+ def _get_notebook_type(notebook_name: str, workspace: Optional[str] = None) -> str:
37
+
38
+ df_items = _get_notebook_definition_base(
39
+ notebook_name=notebook_name, workspace=workspace
40
+ )
41
+
42
+ file_path = df_items[df_items["path"].str.startswith(_notebook_prefix)][
43
+ "path"
44
+ ].iloc[0]
45
+
46
+ _, file_extension = os.path.splitext(file_path)
47
+
48
+ return file_extension[1:]
13
49
 
14
50
 
15
51
  def get_notebook_definition(
@@ -38,18 +74,10 @@ def get_notebook_definition(
38
74
  The notebook definition.
39
75
  """
40
76
 
41
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
42
- item_id = fabric.resolve_item_id(
43
- item_name=notebook_name, type="Notebook", workspace=workspace
77
+ df_items = _get_notebook_definition_base(
78
+ notebook_name=notebook_name, workspace=workspace
44
79
  )
45
- client = fabric.FabricRestClient()
46
- response = client.post(
47
- f"v1/workspaces/{workspace_id}/notebooks/{item_id}/getDefinition",
48
- )
49
-
50
- result = lro(client, response).json()
51
- df_items = pd.json_normalize(result["definition"]["parts"])
52
- df_items_filt = df_items[df_items["path"] == "notebook-content.py"]
80
+ df_items_filt = df_items[df_items["path"].str.startswith(_notebook_prefix)]
53
81
  payload = df_items_filt["payload"].iloc[0]
54
82
 
55
83
  if decode:
@@ -115,9 +143,10 @@ def import_notebook_from_web(
115
143
  description=description,
116
144
  )
117
145
  elif len(dfI_filt) > 0 and overwrite:
118
- update_notebook_definition(
119
- name=notebook_name, notebook_content=response.content, workspace=workspace
120
- )
146
+ print(f"{icons.info} Overwrite of notebooks is currently not supported.")
147
+ # update_notebook_definition(
148
+ # name=notebook_name, notebook_content=response.content, workspace=workspace
149
+ # )
121
150
  else:
122
151
  raise ValueError(
123
152
  f"{icons.red_dot} The '{notebook_name}' already exists within the '{workspace}' workspace and 'overwrite' is set to False."
@@ -127,6 +156,7 @@ def import_notebook_from_web(
127
156
  def create_notebook(
128
157
  name: str,
129
158
  notebook_content: str,
159
+ type: str = "py",
130
160
  description: Optional[str] = None,
131
161
  workspace: Optional[str] = None,
132
162
  ):
@@ -139,6 +169,8 @@ def create_notebook(
139
169
  The name of the notebook to be created.
140
170
  notebook_content : str
141
171
  The Jupyter notebook content (not in Base64 format).
172
+ type : str, default="py"
173
+ The notebook type.
142
174
  description : str, default=None
143
175
  The description of the notebook.
144
176
  Defaults to None which does not place a description.
@@ -158,7 +190,7 @@ def create_notebook(
158
190
  "format": "ipynb",
159
191
  "parts": [
160
192
  {
161
- "path": "notebook-content.py",
193
+ "path": f"{_notebook_prefix}.{type}",
162
194
  "payload": notebook_payload,
163
195
  "payloadType": "InlineBase64",
164
196
  }
@@ -202,13 +234,13 @@ def update_notebook_definition(
202
234
  item_name=name, type="Notebook", workspace=workspace
203
235
  )
204
236
 
237
+ type = _get_notebook_type(notebook_name=name, workspace=workspace_id)
238
+
205
239
  request_body = {
206
- "displayName": name,
207
240
  "definition": {
208
- "format": "ipynb",
209
241
  "parts": [
210
242
  {
211
- "path": "notebook-content.py",
243
+ "path": f"{_notebook_prefix}.{type}",
212
244
  "payload": notebook_payload,
213
245
  "payloadType": "InlineBase64",
214
246
  }