semantic-link-labs 0.4.1__py3-none-any.whl → 0.4.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (52) hide show
  1. {semantic_link_labs-0.4.1.dist-info → semantic_link_labs-0.4.2.dist-info}/METADATA +1 -1
  2. semantic_link_labs-0.4.2.dist-info/RECORD +53 -0
  3. sempy_labs/__init__.py +25 -25
  4. sempy_labs/_ai.py +28 -27
  5. sempy_labs/_clear_cache.py +2 -1
  6. sempy_labs/_dax.py +5 -9
  7. sempy_labs/_generate_semantic_model.py +7 -8
  8. sempy_labs/_helper_functions.py +17 -13
  9. sempy_labs/_icons.py +5 -0
  10. sempy_labs/_list_functions.py +273 -17
  11. sempy_labs/_model_auto_build.py +1 -1
  12. sempy_labs/_model_bpa.py +37 -37
  13. sempy_labs/_model_dependencies.py +11 -12
  14. sempy_labs/_one_lake_integration.py +15 -22
  15. sempy_labs/_query_scale_out.py +1 -1
  16. sempy_labs/_refresh_semantic_model.py +4 -4
  17. sempy_labs/_translations.py +5 -5
  18. sempy_labs/_vertipaq.py +11 -11
  19. sempy_labs/directlake/_directlake_schema_compare.py +11 -9
  20. sempy_labs/directlake/_directlake_schema_sync.py +36 -37
  21. sempy_labs/directlake/_fallback.py +3 -3
  22. sempy_labs/directlake/_get_directlake_lakehouse.py +3 -4
  23. sempy_labs/directlake/_get_shared_expression.py +3 -3
  24. sempy_labs/directlake/_guardrails.py +3 -3
  25. sempy_labs/directlake/_list_directlake_model_calc_tables.py +28 -25
  26. sempy_labs/directlake/_show_unsupported_directlake_objects.py +4 -4
  27. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +10 -11
  28. sempy_labs/directlake/_update_directlake_partition_entity.py +25 -9
  29. sempy_labs/directlake/_warm_cache.py +5 -7
  30. sempy_labs/lakehouse/__init__.py +0 -2
  31. sempy_labs/lakehouse/_get_lakehouse_columns.py +3 -2
  32. sempy_labs/lakehouse/_get_lakehouse_tables.py +10 -7
  33. sempy_labs/lakehouse/_lakehouse.py +6 -5
  34. sempy_labs/lakehouse/_shortcuts.py +8 -106
  35. sempy_labs/migration/__init__.py +4 -2
  36. sempy_labs/migration/_create_pqt_file.py +2 -2
  37. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +7 -7
  38. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +4 -4
  39. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +5 -6
  40. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +6 -6
  41. sempy_labs/migration/_migration_validation.py +1 -164
  42. sempy_labs/migration/_refresh_calc_tables.py +3 -5
  43. sempy_labs/report/__init__.py +2 -2
  44. sempy_labs/report/_generate_report.py +14 -15
  45. sempy_labs/report/_report_functions.py +11 -10
  46. sempy_labs/report/_report_rebind.py +6 -7
  47. sempy_labs/tom/__init__.py +6 -0
  48. sempy_labs/{_tom.py → tom/_model.py} +166 -187
  49. semantic_link_labs-0.4.1.dist-info/RECORD +0 -52
  50. {semantic_link_labs-0.4.1.dist-info → semantic_link_labs-0.4.2.dist-info}/LICENSE +0 -0
  51. {semantic_link_labs-0.4.1.dist-info → semantic_link_labs-0.4.2.dist-info}/WHEEL +0 -0
  52. {semantic_link_labs-0.4.1.dist-info → semantic_link_labs-0.4.2.dist-info}/top_level.txt +0 -0
@@ -6,7 +6,7 @@ from sempy_labs._list_functions import list_tables
6
6
  from sempy_labs.directlake._get_shared_expression import get_shared_expression
7
7
  from sempy_labs._helper_functions import resolve_lakehouse_name
8
8
  from sempy_labs.lakehouse._lakehouse import lakehouse_attached
9
- from sempy_labs._tom import connect_semantic_model
9
+ from sempy_labs.tom import connect_semantic_model
10
10
  from typing import List, Optional, Union
11
11
  from sempy._utils._log import log
12
12
  import sempy_labs._icons as icons
@@ -50,13 +50,13 @@ def migrate_tables_columns_to_semantic_model(
50
50
 
51
51
  workspace = fabric.resolve_workspace_name(workspace)
52
52
 
53
- if new_dataset_workspace == None:
53
+ if new_dataset_workspace is None:
54
54
  new_dataset_workspace = workspace
55
55
 
56
- if lakehouse_workspace == None:
56
+ if lakehouse_workspace is None:
57
57
  lakehouse_workspace = new_dataset_workspace
58
58
 
59
- if lakehouse == None:
59
+ if lakehouse is None:
60
60
  lakehouse_id = fabric.get_lakehouse_id()
61
61
  lakehouse = resolve_lakehouse_name(lakehouse_id, lakehouse_workspace)
62
62
 
@@ -158,8 +158,8 @@ def migrate_tables_columns_to_semantic_model(
158
158
  f"{icons.red_dot} Lakehouse not attached to notebook and lakehouse/lakehouse_workspace are not specified. Please add your lakehouse to this notebook or specify the lakehouse/lakehouse_workspace parameters."
159
159
  )
160
160
  print(
161
- f"To attach a lakehouse to a notebook, go to the the 'Explorer' window to the left, click 'Lakehouses' to add your lakehouse to this notebook"
161
+ "To attach a lakehouse to a notebook, go to the the 'Explorer' window to the left, click 'Lakehouses' to add your lakehouse to this notebook"
162
162
  )
163
163
  print(
164
- f"\nLearn more here: https://learn.microsoft.com/fabric/data-engineering/lakehouse-notebook-explore#add-or-remove-a-lakehouse"
164
+ "\nLearn more here: https://learn.microsoft.com/fabric/data-engineering/lakehouse-notebook-explore#add-or-remove-a-lakehouse"
165
165
  )
@@ -1,173 +1,10 @@
1
1
  import sempy
2
2
  import sempy.fabric as fabric
3
3
  import pandas as pd
4
- from sempy_labs._helper_functions import create_relationship_name
5
- from sempy_labs._tom import connect_semantic_model
6
4
  from typing import List, Optional, Union
5
+ from sempy_labs._list_functions import list_semantic_model_objects
7
6
  from sempy._utils._log import log
8
7
 
9
-
10
- def list_semantic_model_objects(dataset: str, workspace: Optional[str] = None):
11
- """
12
- Shows a list of semantic model objects.
13
-
14
- Parameters
15
- ----------
16
- dataset : str
17
- Name of the semantic model.
18
- workspace : str, default=None
19
- The Fabric workspace name.
20
- Defaults to None which resolves to the workspace of the attached lakehouse
21
- or if no lakehouse attached, resolves to the workspace of the notebook.
22
-
23
-
24
- Returns
25
- -------
26
- pandas.DataFrame
27
- A pandas dataframe showing a list of objects in the semantic model
28
- """
29
-
30
- df = pd.DataFrame(columns=["Parent Name", "Object Name", "Object Type"])
31
- with connect_semantic_model(
32
- dataset=dataset, workspace=workspace, readonly=True
33
- ) as tom:
34
- for t in tom.model.Tables:
35
- if t.CalculationGroup is not None:
36
- new_data = {
37
- "Parent Name": t.Parent.Name,
38
- "Object Name": t.Name,
39
- "Object Type": "Calculation Group",
40
- }
41
- df = pd.concat(
42
- [df, pd.DataFrame(new_data, index=[0])], ignore_index=True
43
- )
44
- for ci in t.CalculationGroup.CalculationItems:
45
- new_data = {
46
- "Parent Name": t.Name,
47
- "Object Name": ci.Name,
48
- "Object Type": str(ci.ObjectType),
49
- }
50
- df = pd.concat(
51
- [df, pd.DataFrame(new_data, index=[0])], ignore_index=True
52
- )
53
- elif any(str(p.SourceType) == "Calculated" for p in t.Partitions):
54
- new_data = {
55
- "Parent Name": t.Parent.Name,
56
- "Object Name": t.Name,
57
- "Object Type": "Calculated Table",
58
- }
59
- df = pd.concat(
60
- [df, pd.DataFrame(new_data, index=[0])], ignore_index=True
61
- )
62
- else:
63
- new_data = {
64
- "Parent Name": t.Parent.Name,
65
- "Object Name": t.Name,
66
- "Object Type": str(t.ObjectType),
67
- }
68
- df = pd.concat(
69
- [df, pd.DataFrame(new_data, index=[0])], ignore_index=True
70
- )
71
- for c in t.Columns:
72
- if str(c.Type) != "RowNumber":
73
- if str(c.Type) == "Calculated":
74
- new_data = {
75
- "Parent Name": c.Parent.Name,
76
- "Object Name": c.Name,
77
- "Object Type": "Calculated Column",
78
- }
79
- df = pd.concat(
80
- [df, pd.DataFrame(new_data, index=[0])], ignore_index=True
81
- )
82
- else:
83
- new_data = {
84
- "Parent Name": c.Parent.Name,
85
- "Object Name": c.Name,
86
- "Object Type": str(c.ObjectType),
87
- }
88
- df = pd.concat(
89
- [df, pd.DataFrame(new_data, index=[0])], ignore_index=True
90
- )
91
- for m in t.Measures:
92
- new_data = {
93
- "Parent Name": m.Parent.Name,
94
- "Object Name": m.Name,
95
- "Object Type": str(m.ObjectType),
96
- }
97
- df = pd.concat(
98
- [df, pd.DataFrame(new_data, index=[0])], ignore_index=True
99
- )
100
- for h in t.Hierarchies:
101
- new_data = {
102
- "Parent Name": h.Parent.Name,
103
- "Object Name": h.Name,
104
- "Object Type": str(h.ObjectType),
105
- }
106
- df = pd.concat(
107
- [df, pd.DataFrame(new_data, index=[0])], ignore_index=True
108
- )
109
- for l in h.Levels:
110
- new_data = {
111
- "Parent Name": l.Parent.Name,
112
- "Object Name": l.Name,
113
- "Object Type": str(l.ObjectType),
114
- }
115
- df = pd.concat(
116
- [df, pd.DataFrame(new_data, index=[0])], ignore_index=True
117
- )
118
- for p in t.Partitions:
119
- new_data = {
120
- "Parent Name": p.Parent.Name,
121
- "Object Name": p.Name,
122
- "Object Type": str(p.ObjectType),
123
- }
124
- df = pd.concat(
125
- [df, pd.DataFrame(new_data, index=[0])], ignore_index=True
126
- )
127
- for r in tom.model.Relationships:
128
- rName = create_relationship_name(
129
- r.FromTable.Name, r.FromColumn.Name, r.ToTable.Name, r.ToColumn.Name
130
- )
131
- new_data = {
132
- "Parent Name": r.Parent.Name,
133
- "Object Name": rName,
134
- "Object Type": str(r.ObjectType),
135
- }
136
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
137
- for role in tom.model.Roles:
138
- new_data = {
139
- "Parent Name": role.Parent.Name,
140
- "Object Name": role.Name,
141
- "Object Type": str(role.ObjectType),
142
- }
143
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
144
- for rls in role.TablePermissions:
145
- new_data = {
146
- "Parent Name": role.Name,
147
- "Object Name": rls.Name,
148
- "Object Type": str(rls.ObjectType),
149
- }
150
- df = pd.concat(
151
- [df, pd.DataFrame(new_data, index=[0])], ignore_index=True
152
- )
153
- for tr in tom.model.Cultures:
154
- new_data = {
155
- "Parent Name": tr.Parent.Name,
156
- "Object Name": tr.Name,
157
- "Object Type": str(tr.ObjectType),
158
- }
159
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
160
- for per in tom.model.Perspectives:
161
- new_data = {
162
- "Parent Name": per.Parent.Name,
163
- "Object Name": per.Name,
164
- "Object Type": str(per.ObjectType),
165
- }
166
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
167
-
168
- return df
169
-
170
-
171
8
  @log
172
9
  def migration_validation(
173
10
  dataset: str,
@@ -3,7 +3,7 @@ import sempy.fabric as fabric
3
3
  import pandas as pd
4
4
  import re, datetime, time
5
5
  from pyspark.sql import SparkSession
6
- from sempy_labs._tom import connect_semantic_model
6
+ from sempy_labs.tom import connect_semantic_model
7
7
  from typing import List, Optional, Union
8
8
  from sempy._utils._log import log
9
9
  import sempy_labs._icons as icons
@@ -118,10 +118,8 @@ def refresh_calc_tables(dataset: str, workspace: Optional[str] = None):
118
118
  print(
119
119
  f"{icons.green_dot} Calculated table '{tName}' has been refreshed as the '{delta_table_name.lower()}' table in the lakehouse."
120
120
  )
121
- except:
122
- print(
123
- f"{icons.red_dot} Failed to create calculated table '{tName}' as a delta table in the lakehouse."
124
- )
121
+ except Exception as e:
122
+ raise ValueError(f"{icons.red_dot} Failed to create calculated table '{tName}' as a delta table in the lakehouse.") from e
125
123
 
126
124
  except Exception as e:
127
125
  if datetime.datetime.now() - start_time > timeout:
@@ -1,6 +1,6 @@
1
1
  from sempy_labs.report._generate_report import (
2
2
  create_report_from_reportjson,
3
- update_report_from_reportjson,
3
+ #update_report_from_reportjson,
4
4
  )
5
5
  from sempy_labs.report._report_functions import (
6
6
  get_report_json,
@@ -20,7 +20,7 @@ from sempy_labs.report._report_rebind import (
20
20
 
21
21
  __all__ = [
22
22
  "create_report_from_reportjson",
23
- "update_report_from_reportjson",
23
+ #"update_report_from_reportjson",
24
24
  "get_report_json",
25
25
  # report_dependency_tree,
26
26
  "export_report",
@@ -4,13 +4,14 @@ import pandas as pd
4
4
  import json, base64, time
5
5
  from typing import Optional
6
6
  from sempy_labs._helper_functions import resolve_workspace_name_and_id
7
+ import sempy_labs._icons as icons
7
8
 
8
9
 
9
10
  def create_report_from_reportjson(
10
11
  report: str,
11
12
  dataset: str,
12
- report_json: str,
13
- theme_json: Optional[str] = None,
13
+ report_json: dict,
14
+ theme_json: Optional[dict] = None,
14
15
  workspace: Optional[str] = None,
15
16
  ):
16
17
  """
@@ -22,9 +23,9 @@ def create_report_from_reportjson(
22
23
  Name of the report.
23
24
  dataset : str
24
25
  Name of the semantic model to connect to the report.
25
- report_json : str
26
+ report_json : dict
26
27
  The report.json file to be used to create the report.
27
- theme_json : str, default=None
28
+ theme_json : dict, default=None
28
29
  The theme.json file to be used for the theme of the report.
29
30
  workspace : str, default=None
30
31
  The Fabric workspace name.
@@ -41,7 +42,7 @@ def create_report_from_reportjson(
41
42
 
42
43
  if len(dfI_model) == 0:
43
44
  print(
44
- f"ERROR: The '{dataset}' semantic model does not exist in the '{workspace}' workspace."
45
+ f"{icons.red_dot} The '{dataset}' semantic model does not exist in the '{workspace}' workspace."
45
46
  )
46
47
  return
47
48
 
@@ -52,7 +53,7 @@ def create_report_from_reportjson(
52
53
 
53
54
  if len(dfI_rpt) > 0:
54
55
  print(
55
- f"WARNING: '{report}' already exists as a report in the '{workspace}' workspace."
56
+ f"{icons.yellow_dot} '{report}' already exists as a report in the '{workspace}' workspace."
56
57
  )
57
58
  return
58
59
 
@@ -82,7 +83,7 @@ def create_report_from_reportjson(
82
83
  definitionPBIR = conv_b64(defPBIR)
83
84
  payloadReportJson = conv_b64(report_json)
84
85
 
85
- if theme_json == None:
86
+ if theme_json is None:
86
87
  request_body = {
87
88
  "displayName": report,
88
89
  "type": objectType,
@@ -132,7 +133,7 @@ def create_report_from_reportjson(
132
133
  response = client.post(f"/v1/workspaces/{workspace_id}/items", json=request_body)
133
134
 
134
135
  if response.status_code == 201:
135
- print("Report creation succeeded")
136
+ print(f"{icons.green_dot} Report creation succeeded")
136
137
  print(response.json())
137
138
  elif response.status_code == 202:
138
139
  operationId = response.headers["x-ms-operation-id"]
@@ -143,7 +144,7 @@ def create_report_from_reportjson(
143
144
  response = client.get(f"/v1/operations/{operationId}")
144
145
  response_body = json.loads(response.content)
145
146
  response = client.get(f"/v1/operations/{operationId}/result")
146
- print("Report creation succeeded")
147
+ print(f"{icons.green_dot} Report creation succeeded")
147
148
  print(response.json())
148
149
 
149
150
 
@@ -167,13 +168,11 @@ def update_report_from_reportjson(
167
168
 
168
169
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
169
170
 
170
- objectType = "Report"
171
-
172
171
  dfR = fabric.list_reports(workspace=workspace)
173
172
  dfR_filt = dfR[(dfR["Name"] == report) & (dfR["Report Type"] == "PowerBIReport")]
174
173
 
175
174
  if len(dfR_filt) == 0:
176
- print(f"The '{report}' report does not exist in the '{workspace}' workspace.")
175
+ print(f"{icons.red_dot} The '{report}' report does not exist in the '{workspace}' workspace.")
177
176
  return
178
177
 
179
178
  reportId = dfR_filt["Id"].iloc[0]
@@ -215,7 +214,7 @@ def update_report_from_reportjson(
215
214
 
216
215
  request_body = {
217
216
  "displayName": report,
218
- "type": objectType,
217
+ "type": 'Report',
219
218
  "definition": {
220
219
  "parts": [
221
220
  {
@@ -238,7 +237,7 @@ def update_report_from_reportjson(
238
237
  )
239
238
 
240
239
  if response.status_code == 201:
241
- print(f"The '{report}' report has been successfully updated.")
240
+ print(f"{icons.green_dot} The '{report}' report has been successfully updated.")
242
241
  # print(response.json())
243
242
  elif response.status_code == 202:
244
243
  operationId = response.headers["x-ms-operation-id"]
@@ -249,5 +248,5 @@ def update_report_from_reportjson(
249
248
  response = client.get(f"/v1/operations/{operationId}")
250
249
  response_body = json.loads(response.content)
251
250
  response = client.get(f"/v1/operations/{operationId}/result")
252
- print(f"The '{report}' report has been successfully updated.")
251
+ print(f"{icons.green_dot} The '{report}' report has been successfully updated.")
253
252
  # print(response.json())
@@ -1,7 +1,7 @@
1
1
  import sempy
2
2
  import sempy.fabric as fabric
3
3
  import pandas as pd
4
- import json, os, time, base64, copy, re
4
+ import json, os, time, base64, copy
5
5
  from anytree import Node, RenderTree
6
6
  from powerbiclient import Report
7
7
  from synapse.ml.services import Translate
@@ -73,7 +73,7 @@ def get_report_json(
73
73
 
74
74
  if save_to_file_name is not None:
75
75
  lakeAttach = lakehouse_attached()
76
- if lakeAttach == False:
76
+ if lakeAttach is False:
77
77
  print(
78
78
  f"{icons.red_dot} In order to save the report.json file, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
79
79
  )
@@ -107,7 +107,7 @@ def report_dependency_tree(workspace: Optional[str] = None):
107
107
  or if no lakehouse attached, resolves to the workspace of the notebook.
108
108
  """
109
109
 
110
- if workspace == None:
110
+ if workspace is None:
111
111
  workspaceId = fabric.get_workspace_id()
112
112
  workspace = fabric.resolve_workspace_name(workspaceId)
113
113
 
@@ -190,7 +190,7 @@ def export_report(
190
190
 
191
191
  lakeAttach = lakehouse_attached()
192
192
 
193
- if lakeAttach == False:
193
+ if lakeAttach is False:
194
194
  print(
195
195
  f"{icons.red_dot} In order to run the 'export_report' function, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
196
196
  )
@@ -240,7 +240,7 @@ def export_report(
240
240
  )
241
241
  return
242
242
 
243
- if file_name == None:
243
+ if file_name is None:
244
244
  file_name = report + fileExt
245
245
  else:
246
246
  file_name = file_name + fileExt
@@ -466,7 +466,7 @@ def clone_report(
466
466
  return
467
467
  target_workspace_id = dfW_filt["Id"].iloc[0]
468
468
 
469
- if target_dataset == None:
469
+ if target_dataset is None:
470
470
  dfR = fabric.list_reports(workspace=target_workspace)
471
471
  dfR_filt = dfR[dfR["Name"] == report]
472
472
  target_dataset_id = dfR_filt["Dataset Id"].iloc[0]
@@ -532,7 +532,7 @@ def launch_report(report: str, workspace: Optional[str] = None):
532
532
  An embedded Power BI report within the notebook.
533
533
  """
534
534
 
535
- from .HelperFunctions import resolve_report_id
535
+ from sempy_labs import resolve_report_id
536
536
 
537
537
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
538
538
 
@@ -562,7 +562,7 @@ def list_report_pages(report: str, workspace: Optional[str] = None):
562
562
  A pandas dataframe showing the pages within a Power BI report and their properties.
563
563
  """
564
564
 
565
- if workspace == None:
565
+ if workspace is None:
566
566
  workspace_id = fabric.get_workspace_id()
567
567
  workspace = fabric.resolve_workspace_name(workspace_id)
568
568
 
@@ -626,7 +626,7 @@ def list_report_visuals(report: str, workspace: Optional[str] = None):
626
626
  A pandas dataframe showing the visuals within a Power BI report and their properties.
627
627
  """
628
628
 
629
- if workspace == None:
629
+ if workspace is None:
630
630
  workspace_id = fabric.get_workspace_id()
631
631
  workspace = fabric.resolve_workspace_name(workspace_id)
632
632
 
@@ -681,7 +681,7 @@ def list_report_bookmarks(report: str, workspace: Optional[str] = None):
681
681
  A pandas dataframe showing the bookmarks within a Power BI report and their properties.
682
682
  """
683
683
 
684
- if workspace == None:
684
+ if workspace is None:
685
685
  workspace_id = fabric.get_workspace_id()
686
686
  workspace = fabric.resolve_workspace_name(workspace_id)
687
687
 
@@ -750,6 +750,7 @@ def list_report_bookmarks(report: str, workspace: Optional[str] = None):
750
750
  )
751
751
 
752
752
 
753
+ @log
753
754
  def translate_report_titles(
754
755
  report: str, languages: Union[str, List[str]], workspace: Optional[str] = None
755
756
  ):
@@ -1,11 +1,10 @@
1
1
  import sempy
2
2
  import sempy.fabric as fabric
3
3
  from sempy_labs._helper_functions import resolve_dataset_id, resolve_report_id
4
- from typing import List, Optional, Union
4
+ from typing import Optional
5
5
  from sempy._utils._log import log
6
6
  import sempy_labs._icons as icons
7
7
 
8
-
9
8
  @log
10
9
  def report_rebind(
11
10
  report: str,
@@ -36,12 +35,12 @@ def report_rebind(
36
35
 
37
36
  """
38
37
 
39
- if report_workspace == None:
38
+ if report_workspace is None:
40
39
  report_workspace_id = fabric.get_workspace_id()
41
40
  report_workspace = fabric.resolve_workspace_name(report_workspace_id)
42
41
  else:
43
42
  report_workspace_id = fabric.resolve_workspace_id(report_workspace)
44
- if dataset_workspace == None:
43
+ if dataset_workspace is None:
45
44
  dataset_workspace = report_workspace
46
45
 
47
46
  client = fabric.PowerBIRestClient()
@@ -104,16 +103,16 @@ def report_rebind_all(
104
103
 
105
104
  """
106
105
 
107
- if dataset_workspace == None:
106
+ if dataset_workspace is None:
108
107
  dataset_workspace_id = fabric.get_workspace_id()
109
108
  dataset_workspace = fabric.resolve_workspace_name(dataset_workspace_id)
110
109
  else:
111
110
  dataset_workspace_id = fabric.resolve_workspace_id(dataset_workspace)
112
111
 
113
- if new_dataset_workpace == None:
112
+ if new_dataset_workpace is None:
114
113
  new_dataset_workpace = dataset_workspace
115
114
 
116
- if report_workspace == None:
115
+ if report_workspace is None:
117
116
  report_workspace = dataset_workspace
118
117
 
119
118
  datasetId = resolve_dataset_id(dataset, dataset_workspace)
@@ -0,0 +1,6 @@
1
+ from sempy_labs.tom._model import TOMWrapper, connect_semantic_model
2
+
3
+ __all__ = [
4
+ "TOMWrapper",
5
+ "connect_semantic_model"
6
+ ]