semantic-link-labs 0.5.0__py3-none-any.whl → 0.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (113) hide show
  1. semantic_link_labs-0.7.0.dist-info/METADATA +148 -0
  2. semantic_link_labs-0.7.0.dist-info/RECORD +111 -0
  3. {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.7.0.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +45 -15
  5. sempy_labs/_ai.py +42 -85
  6. sempy_labs/_bpa_translation/_translations_am-ET.po +828 -0
  7. sempy_labs/_bpa_translation/_translations_ar-AE.po +860 -0
  8. sempy_labs/_bpa_translation/_translations_cs-CZ.po +894 -0
  9. sempy_labs/_bpa_translation/_translations_da-DK.po +894 -0
  10. sempy_labs/_bpa_translation/_translations_de-DE.po +933 -0
  11. sempy_labs/_bpa_translation/_translations_el-GR.po +936 -0
  12. sempy_labs/_bpa_translation/_translations_es-ES.po +915 -0
  13. sempy_labs/_bpa_translation/_translations_fa-IR.po +883 -0
  14. sempy_labs/_bpa_translation/_translations_fr-FR.po +938 -0
  15. sempy_labs/_bpa_translation/_translations_ga-IE.po +912 -0
  16. sempy_labs/_bpa_translation/_translations_he-IL.po +855 -0
  17. sempy_labs/_bpa_translation/_translations_hi-IN.po +892 -0
  18. sempy_labs/_bpa_translation/_translations_hu-HU.po +910 -0
  19. sempy_labs/_bpa_translation/_translations_is-IS.po +887 -0
  20. sempy_labs/_bpa_translation/_translations_it-IT.po +931 -0
  21. sempy_labs/_bpa_translation/_translations_ja-JP.po +805 -0
  22. sempy_labs/_bpa_translation/_translations_nl-NL.po +924 -0
  23. sempy_labs/_bpa_translation/_translations_pl-PL.po +913 -0
  24. sempy_labs/_bpa_translation/_translations_pt-BR.po +909 -0
  25. sempy_labs/_bpa_translation/_translations_pt-PT.po +904 -0
  26. sempy_labs/_bpa_translation/_translations_ru-RU.po +909 -0
  27. sempy_labs/_bpa_translation/_translations_ta-IN.po +922 -0
  28. sempy_labs/_bpa_translation/_translations_te-IN.po +896 -0
  29. sempy_labs/_bpa_translation/_translations_th-TH.po +873 -0
  30. sempy_labs/_bpa_translation/_translations_zh-CN.po +767 -0
  31. sempy_labs/_bpa_translation/_translations_zu-ZA.po +916 -0
  32. sempy_labs/_clear_cache.py +12 -8
  33. sempy_labs/_connections.py +77 -70
  34. sempy_labs/_dax.py +7 -9
  35. sempy_labs/_generate_semantic_model.py +75 -90
  36. sempy_labs/_helper_functions.py +371 -20
  37. sempy_labs/_icons.py +23 -0
  38. sempy_labs/_list_functions.py +855 -427
  39. sempy_labs/_model_auto_build.py +4 -3
  40. sempy_labs/_model_bpa.py +307 -1118
  41. sempy_labs/_model_bpa_bulk.py +363 -0
  42. sempy_labs/_model_bpa_rules.py +831 -0
  43. sempy_labs/_model_dependencies.py +20 -16
  44. sempy_labs/_one_lake_integration.py +18 -12
  45. sempy_labs/_query_scale_out.py +116 -129
  46. sempy_labs/_refresh_semantic_model.py +23 -10
  47. sempy_labs/_translations.py +367 -288
  48. sempy_labs/_vertipaq.py +152 -123
  49. sempy_labs/directlake/__init__.py +7 -1
  50. sempy_labs/directlake/_directlake_schema_compare.py +33 -30
  51. sempy_labs/directlake/_directlake_schema_sync.py +60 -77
  52. sempy_labs/directlake/_dl_helper.py +233 -0
  53. sempy_labs/directlake/_get_directlake_lakehouse.py +7 -8
  54. sempy_labs/directlake/_get_shared_expression.py +5 -3
  55. sempy_labs/directlake/_guardrails.py +20 -16
  56. sempy_labs/directlake/_list_directlake_model_calc_tables.py +17 -10
  57. sempy_labs/directlake/_show_unsupported_directlake_objects.py +3 -2
  58. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +10 -5
  59. sempy_labs/directlake/_update_directlake_partition_entity.py +169 -22
  60. sempy_labs/directlake/_warm_cache.py +7 -4
  61. sempy_labs/lakehouse/_get_lakehouse_columns.py +1 -1
  62. sempy_labs/lakehouse/_get_lakehouse_tables.py +65 -71
  63. sempy_labs/lakehouse/_lakehouse.py +5 -3
  64. sempy_labs/lakehouse/_shortcuts.py +20 -13
  65. sempy_labs/migration/__init__.py +1 -1
  66. sempy_labs/migration/_create_pqt_file.py +184 -186
  67. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +240 -269
  68. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +78 -77
  69. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +444 -425
  70. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +96 -102
  71. sempy_labs/migration/_migration_validation.py +2 -2
  72. sempy_labs/migration/_refresh_calc_tables.py +94 -100
  73. sempy_labs/report/_BPAReportTemplate.json +232 -0
  74. sempy_labs/report/__init__.py +6 -2
  75. sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
  76. sempy_labs/report/_bpareporttemplate/.platform +11 -0
  77. sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +710 -0
  78. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +11 -0
  79. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +191 -0
  80. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +438 -0
  81. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +127 -0
  82. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +576 -0
  83. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +207 -0
  84. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +506 -0
  85. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +127 -0
  86. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +513 -0
  87. sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +8 -0
  88. sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +112 -0
  89. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +11 -0
  90. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +513 -0
  91. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +352 -0
  92. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +37 -0
  93. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +542 -0
  94. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +221 -0
  95. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +127 -0
  96. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +576 -0
  97. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +127 -0
  98. sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +9 -0
  99. sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +38 -0
  100. sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +10 -0
  101. sempy_labs/report/_bpareporttemplate/definition/report.json +176 -0
  102. sempy_labs/report/_bpareporttemplate/definition/version.json +4 -0
  103. sempy_labs/report/_bpareporttemplate/definition.pbir +14 -0
  104. sempy_labs/report/_generate_report.py +260 -139
  105. sempy_labs/report/_report_functions.py +90 -59
  106. sempy_labs/report/_report_rebind.py +40 -34
  107. sempy_labs/tom/__init__.py +1 -4
  108. sempy_labs/tom/_model.py +601 -181
  109. semantic_link_labs-0.5.0.dist-info/METADATA +0 -22
  110. semantic_link_labs-0.5.0.dist-info/RECORD +0 -53
  111. sempy_labs/directlake/_fallback.py +0 -58
  112. {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.7.0.dist-info}/LICENSE +0 -0
  113. {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.7.0.dist-info}/top_level.txt +0 -0
@@ -1,16 +1,17 @@
1
1
  import sempy
2
2
  import sempy.fabric as fabric
3
3
  import pandas as pd
4
- import re, datetime, time
4
+ import re
5
5
  from sempy_labs.lakehouse._get_lakehouse_tables import get_lakehouse_tables
6
6
  from sempy_labs._helper_functions import (
7
7
  resolve_lakehouse_name,
8
8
  resolve_lakehouse_id,
9
9
  create_abfss_path,
10
+ retry,
10
11
  )
11
12
  from sempy_labs.tom import connect_semantic_model
12
13
  from pyspark.sql import SparkSession
13
- from typing import List, Optional, Union
14
+ from typing import Optional
14
15
  from sempy._utils._log import log
15
16
  import sempy_labs._icons as icons
16
17
 
@@ -25,7 +26,8 @@ def migrate_calc_tables_to_lakehouse(
25
26
  lakehouse_workspace: Optional[str] = None,
26
27
  ):
27
28
  """
28
- Creates delta tables in your lakehouse based on the DAX expression of a calculated table in an import/DirectQuery semantic model. The DAX expression encapsulating the calculated table logic is stored in the new Direct Lake semantic model as model annotations.
29
+ Creates delta tables in your lakehouse based on the DAX expression of a calculated table in an import/DirectQuery semantic model.
30
+ The DAX expression encapsulating the calculated table logic is stored in the new Direct Lake semantic model as model annotations.
29
31
 
30
32
  Parameters
31
33
  ----------
@@ -67,8 +69,6 @@ def migrate_calc_tables_to_lakehouse(
67
69
  else:
68
70
  lakehouse_id = resolve_lakehouse_id(lakehouse, lakehouse_workspace)
69
71
 
70
- dfC = fabric.list_columns(dataset=dataset, workspace=workspace)
71
- # dfC['Column Object'] = "'" + dfC['Table Name'] + "'[" + dfC['Column Name'] + "]"
72
72
  dfP = fabric.list_partitions(dataset=dataset, workspace=workspace)
73
73
  dfP_filt = dfP[(dfP["Source Type"] == "Calculated")]
74
74
  dfP_filt = dfP_filt[
@@ -100,166 +100,142 @@ def migrate_calc_tables_to_lakehouse(
100
100
  )
101
101
  return
102
102
 
103
- start_time = datetime.datetime.now()
104
- timeout = datetime.timedelta(minutes=1)
105
- success = False
106
-
107
- while not success:
108
- try:
109
- with connect_semantic_model(
110
- dataset=dataset, workspace=workspace, readonly=True
111
- ) as tom:
112
- success = True
113
- for t in tom.model.Tables:
114
- if tom.is_auto_date_table(table_name=t.Name):
115
- print(
116
- f"{icons.yellow_dot} The '{t.Name}' table is an auto-datetime table and is not supported in the Direct Lake migration process. Please create a proper Date/Calendar table in your lakehoues and use it in your Direct Lake model."
117
- )
118
- else:
119
- for p in t.Partitions:
120
- if str(p.SourceType) == "Calculated":
121
- query = p.Source.Expression
122
- if "NAMEOF" not in query: # exclude field parameters
123
- daxQuery = ""
124
- if query.lower().startswith("calendar") and any(
125
- str(c.Type) == "Calculated" for c in t.Columns
126
- ):
127
- daxQuery = f"ADDCOLUMNS(\n{query},"
128
- for c in t.Columns:
129
- if str(c.Type) == "Calculated":
130
- expr = c.Expression
131
- expr = expr.replace(
132
- f"'{t.Name}'", ""
133
- ).replace(f"{t.Name}[Date]", "[Date]")
134
- expr = expr.replace(
135
- "[MonthNo]", "MONTH([Date])"
136
- ).replace(
137
- "[QuarterNo]",
138
- "INT((MONTH([Date]) + 2) / 3)",
139
- )
140
- daxQuery = (
141
- f'{daxQuery}\n"{c.Name}",{expr},'
142
- )
143
- daxQuery = (
144
- "EVALUATE\n" + daxQuery.rstrip(",") + "\n)"
103
+ with connect_semantic_model(
104
+ dataset=dataset, workspace=workspace, readonly=True
105
+ ) as tom:
106
+ for t in tom.model.Tables:
107
+ if tom.is_auto_date_table(table_name=t.Name):
108
+ print(
109
+ f"{icons.yellow_dot} The '{t.Name}' table is an auto-datetime table and is not supported in the Direct Lake migration process. "
110
+ "Please create a proper Date/Calendar table in your lakehoues and use it in your Direct Lake model."
111
+ )
112
+ else:
113
+ for p in t.Partitions:
114
+ if str(p.SourceType) == "Calculated":
115
+ query = p.Source.Expression
116
+ if "NAMEOF" not in query: # exclude field parameters
117
+ daxQuery = ""
118
+ if query.lower().startswith("calendar") and any(
119
+ str(c.Type) == "Calculated" for c in t.Columns
120
+ ):
121
+ daxQuery = f"ADDCOLUMNS(\n{query},"
122
+ for c in t.Columns:
123
+ if str(c.Type) == "Calculated":
124
+ expr = c.Expression
125
+ expr = expr.replace(f"'{t.Name}'", "").replace(
126
+ f"{t.Name}[Date]", "[Date]"
145
127
  )
146
- else:
147
- daxQuery = f"EVALUATE\n{query}"
148
- daxQueryTopN = (
149
- daxQuery.replace(
150
- "EVALUATE\n", "EVALUATE\nTOPN(1,"
128
+ expr = expr.replace(
129
+ "[MonthNo]", "MONTH([Date])"
130
+ ).replace(
131
+ "[QuarterNo]",
132
+ "INT((MONTH([Date]) + 2) / 3)",
151
133
  )
152
- + ")"
134
+ daxQuery = f'{daxQuery}\n"{c.Name}",{expr},'
135
+ daxQuery = "EVALUATE\n" + daxQuery.rstrip(",") + "\n)"
136
+ else:
137
+ daxQuery = f"EVALUATE\n{query}"
138
+ daxQueryTopN = (
139
+ daxQuery.replace("EVALUATE\n", "EVALUATE\nTOPN(1,")
140
+ + ")"
141
+ )
142
+
143
+ try:
144
+ df = fabric.evaluate_dax(
145
+ dataset=dataset,
146
+ dax_string=daxQueryTopN,
147
+ workspace=workspace,
148
+ )
149
+
150
+ for col in df.columns:
151
+ pattern = r"\[([^\]]+)\]"
152
+
153
+ matches = re.findall(pattern, col)
154
+ new_column_name = matches[0].replace(" ", "")
155
+
156
+ df.rename(
157
+ columns={col: new_column_name},
158
+ inplace=True,
153
159
  )
154
160
 
155
161
  try:
156
- df = fabric.evaluate_dax(
157
- dataset=dataset,
158
- dax_string=daxQueryTopN,
159
- workspace=workspace,
160
- )
161
-
162
- for col in df.columns:
163
- pattern = r"\[([^\]]+)\]"
164
-
165
- matches = re.findall(pattern, col)
166
- new_column_name = matches[0].replace(
167
- " ", ""
168
- )
169
-
170
- df.rename(
171
- columns={col: new_column_name},
172
- inplace=True,
173
- )
174
-
175
- try:
176
- dataType = next(
177
- str(c.DataType)
178
- for c in tom.model.Tables[
179
- t.Name
180
- ].Columns
181
- if str(c.Type)
182
- == "CalculatedTableColumn"
183
- and c.SourceColumn == col
184
- )
185
- except:
186
- dataType = next(
187
- str(c.DataType)
188
- for c in tom.model.Tables[
189
- t.Name
190
- ].Columns
191
- if str(c.Type) == "Calculated"
192
- and c.Name == new_column_name
193
- )
194
-
195
- if dataType == "Int64":
196
- df[new_column_name] = df[
197
- new_column_name
198
- ].astype(int)
199
- elif dataType in ["Decimal", "Double"]:
200
- df[new_column_name] = df[
201
- new_column_name
202
- ].astype(float)
203
- elif dataType == "Boolean":
204
- df[new_column_name] = df[
205
- new_column_name
206
- ].astype(bool)
207
- elif dataType == "DateTime":
208
- df[new_column_name] = pd.to_datetime(
209
- df[new_column_name]
210
- )
211
-
212
- delta_table_name = t.Name.replace(
213
- " ", "_"
214
- ).lower()
215
-
216
- spark_df = spark.createDataFrame(df)
217
- filePath = create_abfss_path(
218
- lakehouse_id=lakehouse_id,
219
- lakehouse_workspace_id=lakehouse_workspace_id,
220
- delta_table_name=delta_table_name,
162
+ dataType = next(
163
+ str(c.DataType)
164
+ for c in tom.model.Tables[t.Name].Columns
165
+ if str(c.Type) == "CalculatedTableColumn"
166
+ and c.SourceColumn == col
221
167
  )
222
- spark_df.write.mode("overwrite").format(
223
- "delta"
224
- ).save(filePath)
225
-
226
- start_time2 = datetime.datetime.now()
227
- timeout2 = datetime.timedelta(minutes=1)
228
- success2 = False
229
-
230
- while not success2:
231
- try:
232
- with connect_semantic_model(
233
- dataset=new_dataset,
234
- readonly=False,
235
- workspace=new_dataset_workspace,
236
- ) as tom2:
237
- success2 = True
238
- tom2.set_annotation(
239
- object=tom2.model,
240
- name=t.Name,
241
- value=daxQuery,
242
- )
243
- except Exception as e:
244
- if (
245
- datetime.datetime.now()
246
- - start_time2
247
- > timeout2
248
- ):
249
- break
250
- time.sleep(1)
251
-
252
- print(
253
- f"{icons.green_dot} Calculated table '{t.Name}' has been created as delta table '{delta_table_name.lower()}' in the '{lakehouse}' lakehouse within the '{lakehouse_workspace}' workspace."
168
+ except Exception:
169
+ dataType = next(
170
+ str(c.DataType)
171
+ for c in tom.model.Tables[t.Name].Columns
172
+ if str(c.Type) == "Calculated"
173
+ and c.Name == new_column_name
254
174
  )
255
- except:
256
- print(
257
- f"{icons.red_dot} Failed to create calculated table '{t.Name}' as a delta table in the lakehouse."
175
+
176
+ if dataType == "Int64":
177
+ df[new_column_name] = df[
178
+ new_column_name
179
+ ].astype(int)
180
+ elif dataType in ["Decimal", "Double"]:
181
+ df[new_column_name] = df[
182
+ new_column_name
183
+ ].astype(float)
184
+ elif dataType == "Boolean":
185
+ df[new_column_name] = df[
186
+ new_column_name
187
+ ].astype(bool)
188
+ elif dataType == "DateTime":
189
+ df[new_column_name] = pd.to_datetime(
190
+ df[new_column_name]
258
191
  )
259
- except Exception as e:
260
- if datetime.datetime.now() - start_time > timeout:
261
- break
262
- time.sleep(1)
192
+
193
+ delta_table_name = t.Name.replace(" ", "_").lower()
194
+
195
+ spark_df = spark.createDataFrame(df)
196
+ filePath = create_abfss_path(
197
+ lakehouse_id=lakehouse_id,
198
+ lakehouse_workspace_id=lakehouse_workspace_id,
199
+ delta_table_name=delta_table_name,
200
+ )
201
+ spark_df.write.mode("overwrite").format("delta").save(
202
+ filePath
203
+ )
204
+
205
+ @retry(
206
+ sleep_time=1,
207
+ timeout_error_message=f"{icons.red_dot} Function timed out after 1 minute",
208
+ )
209
+ def dyn_connect():
210
+ with connect_semantic_model(
211
+ dataset=new_dataset,
212
+ readonly=True,
213
+ workspace=new_dataset_workspace,
214
+ ) as tom2:
215
+
216
+ tom2.model
217
+
218
+ dyn_connect()
219
+
220
+ with connect_semantic_model(
221
+ dataset=new_dataset,
222
+ readonly=False,
223
+ workspace=new_dataset_workspace,
224
+ ) as tom2:
225
+ tom2.set_annotation(
226
+ object=tom2.model,
227
+ name=t.Name,
228
+ value=daxQuery,
229
+ )
230
+
231
+ print(
232
+ f"{icons.green_dot} Calculated table '{t.Name}' has been created as delta table '{delta_table_name.lower()}' "
233
+ f"in the '{lakehouse}' lakehouse within the '{lakehouse_workspace}' workspace."
234
+ )
235
+ except Exception:
236
+ print(
237
+ f"{icons.red_dot} Failed to create calculated table '{t.Name}' as a delta table in the lakehouse."
238
+ )
263
239
 
264
240
 
265
241
  @log
@@ -315,115 +291,110 @@ def migrate_field_parameters(
315
291
  )
316
292
  return
317
293
 
318
- start_time = datetime.datetime.now()
319
- timeout = datetime.timedelta(minutes=1)
320
- success = False
321
-
322
- while not success:
323
- try:
324
- with connect_semantic_model(
325
- dataset=new_dataset, workspace=new_dataset_workspace, readonly=False
326
- ) as tom:
327
- success = True
328
-
329
- for i, r in dfP_filt.iterrows():
330
- tName = r["Table Name"]
331
- query = r["Query"]
332
-
333
- # For field parameters, remove calc columns from the query
334
- rows = query.strip().split("\n")
335
- filtered_rows = [
336
- row
337
- for row in rows
338
- if not any(
339
- value in row
340
- for value in dfC_CalcColumn["Column Object"].values
341
- )
342
- ]
343
- updated_query_string = "\n".join(filtered_rows)
344
-
345
- # Remove extra comma
346
- lines = updated_query_string.strip().split("\n")
347
- lines[-2] = lines[-2].rstrip(",")
348
- expr = "\n".join(lines)
349
-
350
- try:
351
- par = TOM.Partition()
352
- par.Name = tName
353
-
354
- parSource = TOM.CalculatedPartitionSource()
355
- par.Source = parSource
356
- parSource.Expression = expr
357
-
358
- tbl = TOM.Table()
359
- tbl.Name = tName
360
- tbl.Partitions.Add(par)
361
-
362
- columns = ["Value1", "Value2", "Value3"]
363
-
364
- for colName in columns:
365
- col = TOM.CalculatedTableColumn()
366
- col.Name = colName
367
- col.SourceColumn = "[" + colName + "]"
368
- col.DataType = TOM.DataType.String
369
-
370
- tbl.Columns.Add(col)
371
-
372
- tom.model.Tables.Add(tbl)
373
-
374
- ep = TOM.JsonExtendedProperty()
375
- ep.Name = "ParameterMetadata"
376
- ep.Value = '{"version":3,"kind":2}'
377
-
378
- rcd = TOM.RelatedColumnDetails()
379
- gpc = TOM.GroupByColumn()
380
- gpc.GroupingColumn = tom.model.Tables[tName].Columns["Value2"]
381
- rcd.GroupByColumns.Add(gpc)
382
-
383
- # Update column properties
384
- tom.model.Tables[tName].Columns["Value2"].IsHidden = True
385
- tom.model.Tables[tName].Columns["Value3"].IsHidden = True
386
- tom.model.Tables[tName].Columns[
387
- "Value3"
388
- ].DataType = TOM.DataType.Int64
389
- tom.model.Tables[tName].Columns["Value1"].SortByColumn = (
390
- tom.model.Tables[tName].Columns["Value3"]
391
- )
392
- tom.model.Tables[tName].Columns["Value2"].SortByColumn = (
393
- tom.model.Tables[tName].Columns["Value3"]
394
- )
395
- tom.model.Tables[tName].Columns[
396
- "Value2"
397
- ].ExtendedProperties.Add(ep)
398
- tom.model.Tables[tName].Columns[
399
- "Value1"
400
- ].RelatedColumnDetails = rcd
401
-
402
- dfC_filt1 = dfC[
403
- (dfC["Table Name"] == tName) & (dfC["Source"] == "[Value1]")
404
- ]
405
- col1 = dfC_filt1["Column Name"].iloc[0]
406
- dfC_filt2 = dfC[
407
- (dfC["Table Name"] == tName) & (dfC["Source"] == "[Value2]")
408
- ]
409
- col2 = dfC_filt2["Column Name"].iloc[0]
410
- dfC_filt3 = dfC[
411
- (dfC["Table Name"] == tName) & (dfC["Source"] == "[Value3]")
412
- ]
413
- col3 = dfC_filt3["Column Name"].iloc[0]
414
-
415
- tom.model.Tables[tName].Columns["Value1"].Name = col1
416
- tom.model.Tables[tName].Columns["Value2"].Name = col2
417
- tom.model.Tables[tName].Columns["Value3"].Name = col3
418
-
419
- print(
420
- f"{icons.green_dot} The '{tName}' table has been added as a field parameter to the '{new_dataset}' semantic model in the '{new_dataset_workspace}' workspace."
421
- )
422
- except:
423
- print(
424
- f"{icons.red_dot} The '{tName}' table has not been added as a field parameter."
425
- )
426
- except Exception as e:
427
- if datetime.datetime.now() - start_time > timeout:
428
- break
429
- time.sleep(1)
294
+ @retry(
295
+ sleep_time=1,
296
+ timeout_error_message=f"{icons.red_dot} Function timed out after 1 minute",
297
+ )
298
+ def dyn_connect():
299
+ with connect_semantic_model(
300
+ dataset=new_dataset, readonly=True, workspace=new_dataset_workspace
301
+ ) as tom:
302
+
303
+ tom.model
304
+
305
+ dyn_connect()
306
+
307
+ with connect_semantic_model(
308
+ dataset=new_dataset, workspace=new_dataset_workspace, readonly=False
309
+ ) as tom:
310
+
311
+ for i, r in dfP_filt.iterrows():
312
+ tName = r["Table Name"]
313
+ query = r["Query"]
314
+
315
+ # For field parameters, remove calc columns from the query
316
+ rows = query.strip().split("\n")
317
+ filtered_rows = [
318
+ row
319
+ for row in rows
320
+ if not any(
321
+ value in row for value in dfC_CalcColumn["Column Object"].values
322
+ )
323
+ ]
324
+ updated_query_string = "\n".join(filtered_rows)
325
+
326
+ # Remove extra comma
327
+ lines = updated_query_string.strip().split("\n")
328
+ lines[-2] = lines[-2].rstrip(",")
329
+ expr = "\n".join(lines)
330
+
331
+ try:
332
+ par = TOM.Partition()
333
+ par.Name = tName
334
+
335
+ parSource = TOM.CalculatedPartitionSource()
336
+ par.Source = parSource
337
+ parSource.Expression = expr
338
+
339
+ tbl = TOM.Table()
340
+ tbl.Name = tName
341
+ tbl.Partitions.Add(par)
342
+
343
+ columns = ["Value1", "Value2", "Value3"]
344
+
345
+ for colName in columns:
346
+ col = TOM.CalculatedTableColumn()
347
+ col.Name = colName
348
+ col.SourceColumn = "[" + colName + "]"
349
+ col.DataType = TOM.DataType.String
350
+
351
+ tbl.Columns.Add(col)
352
+
353
+ tom.model.Tables.Add(tbl)
354
+
355
+ ep = TOM.JsonExtendedProperty()
356
+ ep.Name = "ParameterMetadata"
357
+ ep.Value = '{"version":3,"kind":2}'
358
+
359
+ rcd = TOM.RelatedColumnDetails()
360
+ gpc = TOM.GroupByColumn()
361
+ gpc.GroupingColumn = tom.model.Tables[tName].Columns["Value2"]
362
+ rcd.GroupByColumns.Add(gpc)
363
+
364
+ # Update column properties
365
+ tom.model.Tables[tName].Columns["Value2"].IsHidden = True
366
+ tom.model.Tables[tName].Columns["Value3"].IsHidden = True
367
+ tom.model.Tables[tName].Columns["Value3"].DataType = TOM.DataType.Int64
368
+ tom.model.Tables[tName].Columns["Value1"].SortByColumn = (
369
+ tom.model.Tables[tName].Columns["Value3"]
370
+ )
371
+ tom.model.Tables[tName].Columns["Value2"].SortByColumn = (
372
+ tom.model.Tables[tName].Columns["Value3"]
373
+ )
374
+ tom.model.Tables[tName].Columns["Value2"].ExtendedProperties.Add(ep)
375
+ tom.model.Tables[tName].Columns["Value1"].RelatedColumnDetails = rcd
376
+
377
+ dfC_filt1 = dfC[
378
+ (dfC["Table Name"] == tName) & (dfC["Source"] == "[Value1]")
379
+ ]
380
+ col1 = dfC_filt1["Column Name"].iloc[0]
381
+ dfC_filt2 = dfC[
382
+ (dfC["Table Name"] == tName) & (dfC["Source"] == "[Value2]")
383
+ ]
384
+ col2 = dfC_filt2["Column Name"].iloc[0]
385
+ dfC_filt3 = dfC[
386
+ (dfC["Table Name"] == tName) & (dfC["Source"] == "[Value3]")
387
+ ]
388
+ col3 = dfC_filt3["Column Name"].iloc[0]
389
+
390
+ tom.model.Tables[tName].Columns["Value1"].Name = col1
391
+ tom.model.Tables[tName].Columns["Value2"].Name = col2
392
+ tom.model.Tables[tName].Columns["Value3"].Name = col3
393
+
394
+ print(
395
+ f"{icons.green_dot} The '{tName}' table has been added as a field parameter to the '{new_dataset}' semantic model in the '{new_dataset_workspace}' workspace."
396
+ )
397
+ except Exception:
398
+ print(
399
+ f"{icons.red_dot} The '{tName}' table has not been added as a field parameter."
400
+ )