semantic-link-labs 0.5.0__py3-none-any.whl → 0.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (113) hide show
  1. semantic_link_labs-0.7.0.dist-info/METADATA +148 -0
  2. semantic_link_labs-0.7.0.dist-info/RECORD +111 -0
  3. {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.7.0.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +45 -15
  5. sempy_labs/_ai.py +42 -85
  6. sempy_labs/_bpa_translation/_translations_am-ET.po +828 -0
  7. sempy_labs/_bpa_translation/_translations_ar-AE.po +860 -0
  8. sempy_labs/_bpa_translation/_translations_cs-CZ.po +894 -0
  9. sempy_labs/_bpa_translation/_translations_da-DK.po +894 -0
  10. sempy_labs/_bpa_translation/_translations_de-DE.po +933 -0
  11. sempy_labs/_bpa_translation/_translations_el-GR.po +936 -0
  12. sempy_labs/_bpa_translation/_translations_es-ES.po +915 -0
  13. sempy_labs/_bpa_translation/_translations_fa-IR.po +883 -0
  14. sempy_labs/_bpa_translation/_translations_fr-FR.po +938 -0
  15. sempy_labs/_bpa_translation/_translations_ga-IE.po +912 -0
  16. sempy_labs/_bpa_translation/_translations_he-IL.po +855 -0
  17. sempy_labs/_bpa_translation/_translations_hi-IN.po +892 -0
  18. sempy_labs/_bpa_translation/_translations_hu-HU.po +910 -0
  19. sempy_labs/_bpa_translation/_translations_is-IS.po +887 -0
  20. sempy_labs/_bpa_translation/_translations_it-IT.po +931 -0
  21. sempy_labs/_bpa_translation/_translations_ja-JP.po +805 -0
  22. sempy_labs/_bpa_translation/_translations_nl-NL.po +924 -0
  23. sempy_labs/_bpa_translation/_translations_pl-PL.po +913 -0
  24. sempy_labs/_bpa_translation/_translations_pt-BR.po +909 -0
  25. sempy_labs/_bpa_translation/_translations_pt-PT.po +904 -0
  26. sempy_labs/_bpa_translation/_translations_ru-RU.po +909 -0
  27. sempy_labs/_bpa_translation/_translations_ta-IN.po +922 -0
  28. sempy_labs/_bpa_translation/_translations_te-IN.po +896 -0
  29. sempy_labs/_bpa_translation/_translations_th-TH.po +873 -0
  30. sempy_labs/_bpa_translation/_translations_zh-CN.po +767 -0
  31. sempy_labs/_bpa_translation/_translations_zu-ZA.po +916 -0
  32. sempy_labs/_clear_cache.py +12 -8
  33. sempy_labs/_connections.py +77 -70
  34. sempy_labs/_dax.py +7 -9
  35. sempy_labs/_generate_semantic_model.py +75 -90
  36. sempy_labs/_helper_functions.py +371 -20
  37. sempy_labs/_icons.py +23 -0
  38. sempy_labs/_list_functions.py +855 -427
  39. sempy_labs/_model_auto_build.py +4 -3
  40. sempy_labs/_model_bpa.py +307 -1118
  41. sempy_labs/_model_bpa_bulk.py +363 -0
  42. sempy_labs/_model_bpa_rules.py +831 -0
  43. sempy_labs/_model_dependencies.py +20 -16
  44. sempy_labs/_one_lake_integration.py +18 -12
  45. sempy_labs/_query_scale_out.py +116 -129
  46. sempy_labs/_refresh_semantic_model.py +23 -10
  47. sempy_labs/_translations.py +367 -288
  48. sempy_labs/_vertipaq.py +152 -123
  49. sempy_labs/directlake/__init__.py +7 -1
  50. sempy_labs/directlake/_directlake_schema_compare.py +33 -30
  51. sempy_labs/directlake/_directlake_schema_sync.py +60 -77
  52. sempy_labs/directlake/_dl_helper.py +233 -0
  53. sempy_labs/directlake/_get_directlake_lakehouse.py +7 -8
  54. sempy_labs/directlake/_get_shared_expression.py +5 -3
  55. sempy_labs/directlake/_guardrails.py +20 -16
  56. sempy_labs/directlake/_list_directlake_model_calc_tables.py +17 -10
  57. sempy_labs/directlake/_show_unsupported_directlake_objects.py +3 -2
  58. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +10 -5
  59. sempy_labs/directlake/_update_directlake_partition_entity.py +169 -22
  60. sempy_labs/directlake/_warm_cache.py +7 -4
  61. sempy_labs/lakehouse/_get_lakehouse_columns.py +1 -1
  62. sempy_labs/lakehouse/_get_lakehouse_tables.py +65 -71
  63. sempy_labs/lakehouse/_lakehouse.py +5 -3
  64. sempy_labs/lakehouse/_shortcuts.py +20 -13
  65. sempy_labs/migration/__init__.py +1 -1
  66. sempy_labs/migration/_create_pqt_file.py +184 -186
  67. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +240 -269
  68. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +78 -77
  69. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +444 -425
  70. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +96 -102
  71. sempy_labs/migration/_migration_validation.py +2 -2
  72. sempy_labs/migration/_refresh_calc_tables.py +94 -100
  73. sempy_labs/report/_BPAReportTemplate.json +232 -0
  74. sempy_labs/report/__init__.py +6 -2
  75. sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
  76. sempy_labs/report/_bpareporttemplate/.platform +11 -0
  77. sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +710 -0
  78. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +11 -0
  79. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +191 -0
  80. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +438 -0
  81. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +127 -0
  82. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +576 -0
  83. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +207 -0
  84. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +506 -0
  85. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +127 -0
  86. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +513 -0
  87. sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +8 -0
  88. sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +112 -0
  89. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +11 -0
  90. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +513 -0
  91. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +352 -0
  92. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +37 -0
  93. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +542 -0
  94. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +221 -0
  95. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +127 -0
  96. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +576 -0
  97. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +127 -0
  98. sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +9 -0
  99. sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +38 -0
  100. sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +10 -0
  101. sempy_labs/report/_bpareporttemplate/definition/report.json +176 -0
  102. sempy_labs/report/_bpareporttemplate/definition/version.json +4 -0
  103. sempy_labs/report/_bpareporttemplate/definition.pbir +14 -0
  104. sempy_labs/report/_generate_report.py +260 -139
  105. sempy_labs/report/_report_functions.py +90 -59
  106. sempy_labs/report/_report_rebind.py +40 -34
  107. sempy_labs/tom/__init__.py +1 -4
  108. sempy_labs/tom/_model.py +601 -181
  109. semantic_link_labs-0.5.0.dist-info/METADATA +0 -22
  110. semantic_link_labs-0.5.0.dist-info/RECORD +0 -53
  111. sempy_labs/directlake/_fallback.py +0 -58
  112. {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.7.0.dist-info}/LICENSE +0 -0
  113. {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.7.0.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,6 @@
1
- import sempy
2
1
  import sempy.fabric as fabric
3
- from ._helper_functions import resolve_dataset_id
4
- from typing import List, Optional, Union
2
+ from ._helper_functions import resolve_dataset_id, is_default_semantic_model
3
+ from typing import Optional
5
4
  import sempy_labs._icons as icons
6
5
 
7
6
 
@@ -21,14 +20,19 @@ def clear_cache(dataset: str, workspace: Optional[str] = None):
21
20
  """
22
21
 
23
22
  workspace = fabric.resolve_workspace_name(workspace)
23
+ if is_default_semantic_model(dataset=dataset, workspace=workspace):
24
+ raise ValueError(
25
+ f"{icons.red_dot} Cannot run XMLA operations against a default semantic model. Please choose a different semantic model. "
26
+ "See here for more information: https://learn.microsoft.com/fabric/data-warehouse/semantic-models"
27
+ )
24
28
 
25
- datasetID = resolve_dataset_id(dataset=dataset, workspace=workspace)
29
+ dataset_id = resolve_dataset_id(dataset=dataset, workspace=workspace)
26
30
 
27
31
  xmla = f"""
28
- <ClearCache xmlns="http://schemas.microsoft.com/analysisservices/2003/engine">
29
- <Object>
30
- <DatabaseID>{datasetID}</DatabaseID>
31
- </Object>
32
+ <ClearCache xmlns="http://schemas.microsoft.com/analysisservices/2003/engine">
33
+ <Object>
34
+ <DatabaseID>{dataset_id}</DatabaseID>
35
+ </Object>
32
36
  </ClearCache>
33
37
  """
34
38
  fabric.execute_xmla(dataset=dataset, xmla_command=xmla, workspace=workspace)
@@ -1,8 +1,6 @@
1
- import sempy
2
1
  import sempy.fabric as fabric
3
2
  import pandas as pd
4
- from typing import List, Optional, Union
5
- import sempy_labs._icons as icons
3
+ from sempy.fabric.exceptions import FabricHTTPException
6
4
 
7
5
 
8
6
  def create_connection_cloud(
@@ -56,29 +54,32 @@ def create_connection_cloud(
56
54
  },
57
55
  }
58
56
 
59
- response = client.post(f"/v1/connections", json=request_body)
60
-
61
- if response.status_code == 200:
62
- o = response.json()
63
- new_data = {
64
- "Connection Id": o.get("id"),
65
- "Connection Name": o.get("name"),
66
- "Connectivity Type": o.get("connectivityType"),
67
- "Connection Type": o.get("connectionDetails",{}).get("type"),
68
- "Connection Path": o.get("connectionDetails",{}).get("path"),
69
- "Privacy Level": o.get("privacyLevel"),
70
- "Credential Type": o.get("credentialDetails",{}).get("credentialType"),
71
- "Single Sign On Type": o.get("credentialDetails",{}).get("singleSignOnType"),
72
- "Connection Encryption": o.get("credentialDetails",{}).get("connectionEncryption"),
73
- "Skip Test Connection": o.get("credentialDetails",{}).get("skipTestConnection"),
74
- }
75
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
57
+ response = client.post("/v1/connections", json=request_body)
58
+
59
+ if response.status_code != 200:
60
+ raise FabricHTTPException(response)
61
+ o = response.json()
62
+ new_data = {
63
+ "Connection Id": o.get("id"),
64
+ "Connection Name": o.get("name"),
65
+ "Connectivity Type": o.get("connectivityType"),
66
+ "Connection Type": o.get("connectionDetails", {}).get("type"),
67
+ "Connection Path": o.get("connectionDetails", {}).get("path"),
68
+ "Privacy Level": o.get("privacyLevel"),
69
+ "Credential Type": o.get("credentialDetails", {}).get("credentialType"),
70
+ "Single Sign On Type": o.get("credentialDetails", {}).get("singleSignOnType"),
71
+ "Connection Encryption": o.get("credentialDetails", {}).get(
72
+ "connectionEncryption"
73
+ ),
74
+ "Skip Test Connection": o.get("credentialDetails", {}).get(
75
+ "skipTestConnection"
76
+ ),
77
+ }
78
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
76
79
 
77
- df["Skip Test Connection"] = df["Skip Test Connection"].astype(bool)
80
+ df["Skip Test Connection"] = df["Skip Test Connection"].astype(bool)
78
81
 
79
- return df
80
- else:
81
- print(f"{icons.red_dot} {response.status_code}")
82
+ return df
82
83
 
83
84
 
84
85
  def create_connection_on_prem(
@@ -131,30 +132,33 @@ def create_connection_on_prem(
131
132
  },
132
133
  }
133
134
 
134
- response = client.post(f"/v1/connections", json=request_body)
135
-
136
- if response.status_code == 200:
137
- o = response.json()
138
- new_data = {
139
- "Connection Id": o.get("id"),
140
- "Connection Name": o.get("name"),
141
- "Gateway ID": o.get("gatewayId"),
142
- "Connectivity Type": o.get("connectivityType"),
143
- "Connection Type": o.get("connectionDetails",{}).get("type"),
144
- "Connection Path": o.get("connectionDetails",{}).get("path"),
145
- "Privacy Level": o.get("privacyLevel"),
146
- "Credential Type": o.get("credentialDetails",{}).get("credentialType"),
147
- "Single Sign On Type": o.get("credentialDetails",{}).get("singleSignOnType"),
148
- "Connection Encryption": o.get("credentialDetails",{}).get("connectionEncryption"),
149
- "Skip Test Connection": o.get("credentialDetails",{}).get("skipTestConnection"),
150
- }
151
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
135
+ response = client.post("/v1/connections", json=request_body)
136
+
137
+ if response.status_code != 200:
138
+ raise FabricHTTPException(response)
139
+ o = response.json()
140
+ new_data = {
141
+ "Connection Id": o.get("id"),
142
+ "Connection Name": o.get("name"),
143
+ "Gateway ID": o.get("gatewayId"),
144
+ "Connectivity Type": o.get("connectivityType"),
145
+ "Connection Type": o.get("connectionDetails", {}).get("type"),
146
+ "Connection Path": o.get("connectionDetails", {}).get("path"),
147
+ "Privacy Level": o.get("privacyLevel"),
148
+ "Credential Type": o.get("credentialDetails", {}).get("credentialType"),
149
+ "Single Sign On Type": o.get("credentialDetails", {}).get("singleSignOnType"),
150
+ "Connection Encryption": o.get("credentialDetails", {}).get(
151
+ "connectionEncryption"
152
+ ),
153
+ "Skip Test Connection": o.get("credentialDetails", {}).get(
154
+ "skipTestConnection"
155
+ ),
156
+ }
157
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
152
158
 
153
- df["Skip Test Connection"] = df["Skip Test Connection"].astype(bool)
159
+ df["Skip Test Connection"] = df["Skip Test Connection"].astype(bool)
154
160
 
155
- return df
156
- else:
157
- print(f"{icons.red_dot} {response.status_code}")
161
+ return df
158
162
 
159
163
 
160
164
  def create_connection_vnet(
@@ -209,27 +213,30 @@ def create_connection_vnet(
209
213
  },
210
214
  }
211
215
 
212
- response = client.post(f"/v1/connections", json=request_body)
213
-
214
- if response.status_code == 200:
215
- o = response.json()
216
- new_data = {
217
- "Connection Id": o.get("id"),
218
- "Connection Name": o.get("name"),
219
- "Gateway ID": o.get("gatewayId"),
220
- "Connectivity Type": o.get("connectivityType"),
221
- "Connection Type": o.get("connectionDetails",{}).get("type"),
222
- "Connection Path": o.get("connectionDetails",{}).get("path"),
223
- "Privacy Level": o.get("privacyLevel"),
224
- "Credential Type": o.get("credentialDetails",{}).get("credentialType"),
225
- "Single Sign On Type": o.get("credentialDetails",{}).get("singleSignOnType"),
226
- "Connection Encryption": o.get("credentialDetails",{}).get("connectionEncryption"),
227
- "Skip Test Connection": o.get("credentialDetails",{}).get("skipTestConnection"),
228
- }
229
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
230
-
231
- df["Skip Test Connection"] = df["Skip Test Connection"].astype(bool)
232
-
233
- return df
234
- else:
235
- print(f"{icons.red_dot} {response.status_code}")
216
+ response = client.post("/v1/connections", json=request_body)
217
+
218
+ if response.status_code != 200:
219
+ raise FabricHTTPException(response)
220
+ o = response.json()
221
+ new_data = {
222
+ "Connection Id": o.get("id"),
223
+ "Connection Name": o.get("name"),
224
+ "Gateway ID": o.get("gatewayId"),
225
+ "Connectivity Type": o.get("connectivityType"),
226
+ "Connection Type": o.get("connectionDetails", {}).get("type"),
227
+ "Connection Path": o.get("connectionDetails", {}).get("path"),
228
+ "Privacy Level": o.get("privacyLevel"),
229
+ "Credential Type": o.get("credentialDetails", {}).get("credentialType"),
230
+ "Single Sign On Type": o.get("credentialDetails", {}).get("singleSignOnType"),
231
+ "Connection Encryption": o.get("credentialDetails", {}).get(
232
+ "connectionEncryption"
233
+ ),
234
+ "Skip Test Connection": o.get("credentialDetails", {}).get(
235
+ "skipTestConnection"
236
+ ),
237
+ }
238
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
239
+
240
+ df["Skip Test Connection"] = df["Skip Test Connection"].astype(bool)
241
+
242
+ return df
sempy_labs/_dax.py CHANGED
@@ -1,8 +1,10 @@
1
- import sempy
2
1
  import sempy.fabric as fabric
3
2
  import pandas as pd
4
- from sempy_labs._helper_functions import resolve_dataset_id
5
- from typing import List, Optional, Union
3
+ from sempy_labs._helper_functions import (
4
+ resolve_dataset_id,
5
+ resolve_workspace_name_and_id,
6
+ )
7
+ from typing import Optional
6
8
  from sempy._utils._log import log
7
9
 
8
10
 
@@ -40,17 +42,13 @@ def evaluate_dax_impersonation(
40
42
 
41
43
  # https://learn.microsoft.com/en-us/rest/api/power-bi/datasets/execute-queries-in-group
42
44
 
43
- if workspace is None:
44
- workspace_id = fabric.get_workspace_id()
45
- workspace = fabric.resolve_workspace_name(workspace_id)
46
- else:
47
- workspace_id = fabric.resolve_workspace_id(workspace)
45
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
48
46
 
49
47
  dataset_id = resolve_dataset_id(dataset=dataset, workspace=workspace)
50
48
 
51
49
  request_body = {
52
50
  "queries": [{"query": dax_query}],
53
- "impersonatedUserName": user_name
51
+ "impersonatedUserName": user_name,
54
52
  }
55
53
 
56
54
  client = fabric.PowerBIRestClient()
@@ -1,11 +1,15 @@
1
- import sempy
2
1
  import sempy.fabric as fabric
3
2
  import pandas as pd
4
- import json, base64, time, os
5
- from typing import List, Optional, Union
3
+ import json
4
+ import os
5
+ from typing import Optional
6
6
  from sempy_labs._helper_functions import (
7
7
  resolve_lakehouse_name,
8
8
  resolve_workspace_name_and_id,
9
+ resolve_dataset_id,
10
+ _conv_b64,
11
+ _decode_b64,
12
+ lro,
9
13
  )
10
14
  from sempy_labs.lakehouse._lakehouse import lakehouse_attached
11
15
  import sempy_labs._icons as icons
@@ -31,13 +35,14 @@ def create_blank_semantic_model(
31
35
  or if no lakehouse attached, resolves to the workspace of the notebook.
32
36
  """
33
37
 
34
- if workspace is None:
35
- workspace = fabric.resolve_workspace_name()
38
+ workspace = fabric.resolve_workspace_name(workspace)
36
39
 
37
40
  min_compat = 1500
38
41
 
39
42
  if compatibility_level < min_compat:
40
- raise ValueError(f"{icons.red_dot} Compatiblity level must be at least {min_compat}.")
43
+ raise ValueError(
44
+ f"{icons.red_dot} Compatiblity level must be at least {min_compat}."
45
+ )
41
46
 
42
47
  tmsl = f"""
43
48
  {{
@@ -84,30 +89,22 @@ def create_semantic_model_from_bim(
84
89
 
85
90
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
86
91
 
87
- objectType = "SemanticModel"
88
-
89
- dfI = fabric.list_items(workspace=workspace, type=objectType)
92
+ dfI = fabric.list_items(workspace=workspace, type="SemanticModel")
90
93
  dfI_filt = dfI[(dfI["Display Name"] == dataset)]
91
94
 
92
95
  if len(dfI_filt) > 0:
93
- raise ValueError(f"{icons.red_dot} '{dataset}' already exists as a semantic model in the '{workspace}' workspace.")
96
+ raise ValueError(
97
+ f"{icons.red_dot} '{dataset}' already exists as a semantic model in the '{workspace}' workspace."
98
+ )
94
99
 
95
100
  client = fabric.FabricRestClient()
96
101
  defPBIDataset = {"version": "1.0", "settings": {}}
97
102
 
98
- def conv_b64(file):
99
-
100
- loadJson = json.dumps(file)
101
- f = base64.b64encode(loadJson.encode("utf-8")).decode("utf-8")
102
-
103
- return f
104
-
105
- payloadPBIDefinition = conv_b64(defPBIDataset)
106
- payloadBim = conv_b64(bim_file)
103
+ payloadPBIDefinition = _conv_b64(defPBIDataset)
104
+ payloadBim = _conv_b64(bim_file)
107
105
 
108
106
  request_body = {
109
107
  "displayName": dataset,
110
- "type": objectType,
111
108
  "definition": {
112
109
  "parts": [
113
110
  {
@@ -124,83 +121,82 @@ def create_semantic_model_from_bim(
124
121
  },
125
122
  }
126
123
 
127
- response = client.post(f"/v1/workspaces/{workspace_id}/items", json=request_body)
124
+ response = client.post(
125
+ f"/v1/workspaces/{workspace_id}/semanticModels",
126
+ json=request_body,
127
+ )
128
128
 
129
- if response.status_code == 201:
130
- print(
131
- f"{icons.green_dot} The '{dataset}' semantic model has been created within the '{workspace}' workspace."
132
- )
133
- print(response.json())
134
- elif response.status_code == 202:
135
- operationId = response.headers["x-ms-operation-id"]
136
- response = client.get(f"/v1/operations/{operationId}")
137
- response_body = json.loads(response.content)
138
- while response_body["status"] != "Succeeded":
139
- time.sleep(3)
140
- response = client.get(f"/v1/operations/{operationId}")
141
- response_body = json.loads(response.content)
142
- response = client.get(f"/v1/operations/{operationId}/result")
143
- print(
144
- f"{icons.green_dot} The '{dataset}' semantic model has been created within the '{workspace}' workspace."
145
- )
146
- print(response.json())
129
+ lro(client, response, status_codes=[201, 202])
130
+
131
+ print(
132
+ f"{icons.green_dot} The '{dataset}' semantic model has been created within the '{workspace}' workspace."
133
+ )
147
134
 
148
135
 
149
136
  def deploy_semantic_model(
150
- dataset: str,
151
- new_dataset: Optional[str] = None,
152
- workspace: Optional[str] = None,
153
- new_dataset_workspace: Optional[str] = None,
137
+ source_dataset: str,
138
+ source_workspace: Optional[str] = None,
139
+ target_dataset: Optional[str] = None,
140
+ target_workspace: Optional[str] = None,
141
+ refresh_target_dataset: Optional[bool] = True,
154
142
  ):
155
143
  """
156
144
  Deploys a semantic model based on an existing semantic model.
157
145
 
158
146
  Parameters
159
147
  ----------
160
- dataset : str
148
+ source_dataset : str
161
149
  Name of the semantic model to deploy.
162
- new_dataset: str
163
- Name of the new semantic model to be created.
164
- workspace : str, default=None
150
+ source_workspace : str, default=None
165
151
  The Fabric workspace name.
166
152
  Defaults to None which resolves to the workspace of the attached lakehouse
167
153
  or if no lakehouse attached, resolves to the workspace of the notebook.
168
- new_dataset_workspace : str, default=None
154
+ target_dataset: str
155
+ Name of the new semantic model to be created.
156
+ target_workspace : str, default=None
169
157
  The Fabric workspace name in which the new semantic model will be deployed.
170
158
  Defaults to None which resolves to the workspace of the attached lakehouse
171
159
  or if no lakehouse attached, resolves to the workspace of the notebook.
160
+ refresh_target_dataset : bool, default=True
161
+ If set to True, this will initiate a full refresh of the target semantic model in the target workspace.
172
162
 
173
163
  Returns
174
164
  -------
175
165
 
176
166
  """
177
167
 
178
- workspace = fabric.resolve_workspace_name(workspace)
168
+ from sempy_labs import refresh_semantic_model
179
169
 
180
- if new_dataset_workspace is None:
181
- new_dataset_workspace = workspace
170
+ source_workspace = fabric.resolve_workspace_name(source_workspace)
182
171
 
183
- if new_dataset is None:
184
- new_dataset = dataset
172
+ if target_workspace is None:
173
+ target_workspace = source_workspace
185
174
 
186
- if new_dataset == dataset and new_dataset_workspace == workspace:
187
- print(
188
- f"The 'dataset' and 'new_dataset' parameters have the same value. And, the 'workspace' and 'new_dataset_workspace' parameters have the same value. At least one of these must be different. Please update the parameters."
175
+ if target_dataset is None:
176
+ target_dataset = source_dataset
177
+
178
+ if target_dataset == source_dataset and target_workspace == source_workspace:
179
+ raise ValueError(
180
+ f"{icons.red_dot} The 'dataset' and 'new_dataset' parameters have the same value. And, the 'workspace' and 'new_dataset_workspace' "
181
+ f"parameters have the same value. At least one of these must be different. Please update the parameters."
189
182
  )
190
- return
191
183
 
192
- bim = get_semantic_model_bim(dataset=dataset, workspace=workspace)
184
+ bim = get_semantic_model_bim(dataset=source_dataset, workspace=source_workspace)
193
185
 
194
186
  create_semantic_model_from_bim(
195
- dataset=new_dataset, bim_file=bim, workspace=new_dataset_workspace
187
+ dataset=target_dataset, bim_file=bim, workspace=target_workspace
196
188
  )
197
189
 
190
+ if refresh_target_dataset:
191
+ refresh_semantic_model(dataset=target_dataset, workspace=target_workspace)
192
+
198
193
 
199
194
  def get_semantic_model_bim(
200
195
  dataset: str,
201
196
  workspace: Optional[str] = None,
202
197
  save_to_file_name: Optional[str] = None,
203
- ):
198
+ lakehouse_workspace: Optional[str] = None,
199
+ ) -> dict:
204
200
  """
205
201
  Extracts the Model.bim file for a given semantic model.
206
202
 
@@ -209,66 +205,55 @@ def get_semantic_model_bim(
209
205
  dataset : str
210
206
  Name of the semantic model.
211
207
  workspace : str, default=None
212
- The Fabric workspace name.
208
+ The Fabric workspace name in which the semantic model resides.
213
209
  Defaults to None which resolves to the workspace of the attached lakehouse
214
210
  or if no lakehouse attached, resolves to the workspace of the notebook.
215
211
  save_to_file_name : str, default=None
216
212
  If specified, saves the Model.bim as a file in the lakehouse attached to the notebook.
213
+ lakehouse_workspace : str, default=None
214
+ The Fabric workspace name in which the lakehouse attached to the workspace resides.
215
+ Defaults to None which resolves to the workspace of the attached lakehouse
216
+ or if no lakehouse attached, resolves to the workspace of the notebook.
217
217
 
218
218
  Returns
219
219
  -------
220
- str
220
+ dict
221
221
  The Model.bim file for the semantic model.
222
222
  """
223
223
 
224
224
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
225
225
 
226
- objType = "SemanticModel"
226
+ fmt = "TMSL"
227
227
  client = fabric.FabricRestClient()
228
- itemList = fabric.list_items(workspace=workspace, type=objType)
229
- itemListFilt = itemList[(itemList["Display Name"] == dataset)]
230
- itemId = itemListFilt["Id"].iloc[0]
228
+ dataset_id = resolve_dataset_id(dataset=dataset, workspace=workspace)
231
229
  response = client.post(
232
- f"/v1/workspaces/{workspace_id}/items/{itemId}/getDefinition"
230
+ f"/v1/workspaces/{workspace_id}/semanticModels/{dataset_id}/getDefinition?format={fmt}",
233
231
  )
234
-
235
- if response.status_code == 200:
236
- res = response.json()
237
- elif response.status_code == 202:
238
- operationId = response.headers["x-ms-operation-id"]
239
- response = client.get(f"/v1/operations/{operationId}")
240
- response_body = json.loads(response.content)
241
- while response_body["status"] != "Succeeded":
242
- time.sleep(3)
243
- response = client.get(f"/v1/operations/{operationId}")
244
- response_body = json.loads(response.content)
245
- response = client.get(f"/v1/operations/{operationId}/result")
246
- res = response.json()
247
- df_items = pd.json_normalize(res["definition"]["parts"])
232
+ result = lro(client, response).json()
233
+ df_items = pd.json_normalize(result["definition"]["parts"])
248
234
  df_items_filt = df_items[df_items["path"] == "model.bim"]
249
235
  payload = df_items_filt["payload"].iloc[0]
250
- bimFile = base64.b64decode(payload).decode("utf-8")
236
+ bimFile = _decode_b64(payload)
251
237
  bimJson = json.loads(bimFile)
252
238
 
253
239
  if save_to_file_name is not None:
254
- lakeAttach = lakehouse_attached()
255
- if lakeAttach is False:
256
- print(
257
- f"In order to save the model.bim file, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
240
+ if not lakehouse_attached():
241
+ raise ValueError(
242
+ f"{icons.red_dot} In order to save the model.bim file, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
258
243
  )
259
- return
260
244
 
261
245
  lakehouse_id = fabric.get_lakehouse_id()
262
- lakehouse = resolve_lakehouse_name(lakehouse_id, workspace)
246
+ lake_workspace = fabric.resolve_workspace_name()
247
+ lakehouse = resolve_lakehouse_name(lakehouse_id, lake_workspace)
263
248
  folderPath = "/lakehouse/default/Files"
264
249
  fileExt = ".bim"
265
250
  if not save_to_file_name.endswith(fileExt):
266
- save_to_file_name = save_to_file_name + fileExt
251
+ save_to_file_name = f"{save_to_file_name}{fileExt}"
267
252
  filePath = os.path.join(folderPath, save_to_file_name)
268
253
  with open(filePath, "w") as json_file:
269
254
  json.dump(bimJson, json_file, indent=4)
270
255
  print(
271
- f"The .bim file for the '{dataset}' semantic model has been saved to the '{lakehouse}' in this location: '{filePath}'.\n\n"
256
+ f"{icons.green_dot} The .bim file for the '{dataset}' semantic model has been saved to the '{lakehouse}' in this location: '{filePath}'.\n\n"
272
257
  )
273
258
 
274
259
  return bimJson