semantic-link-labs 0.6.0__py3-none-any.whl → 0.7.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (104) hide show
  1. semantic_link_labs-0.7.1.dist-info/METADATA +148 -0
  2. semantic_link_labs-0.7.1.dist-info/RECORD +111 -0
  3. {semantic_link_labs-0.6.0.dist-info → semantic_link_labs-0.7.1.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +26 -2
  5. sempy_labs/_ai.py +3 -65
  6. sempy_labs/_bpa_translation/_translations_am-ET.po +828 -0
  7. sempy_labs/_bpa_translation/_translations_ar-AE.po +860 -0
  8. sempy_labs/_bpa_translation/_translations_cs-CZ.po +894 -0
  9. sempy_labs/_bpa_translation/_translations_da-DK.po +894 -0
  10. sempy_labs/_bpa_translation/_translations_de-DE.po +933 -0
  11. sempy_labs/_bpa_translation/_translations_el-GR.po +936 -0
  12. sempy_labs/_bpa_translation/_translations_es-ES.po +915 -0
  13. sempy_labs/_bpa_translation/_translations_fa-IR.po +883 -0
  14. sempy_labs/_bpa_translation/_translations_fr-FR.po +938 -0
  15. sempy_labs/_bpa_translation/_translations_ga-IE.po +912 -0
  16. sempy_labs/_bpa_translation/_translations_he-IL.po +855 -0
  17. sempy_labs/_bpa_translation/_translations_hi-IN.po +892 -0
  18. sempy_labs/_bpa_translation/_translations_hu-HU.po +910 -0
  19. sempy_labs/_bpa_translation/_translations_is-IS.po +887 -0
  20. sempy_labs/_bpa_translation/_translations_it-IT.po +931 -0
  21. sempy_labs/_bpa_translation/_translations_ja-JP.po +805 -0
  22. sempy_labs/_bpa_translation/_translations_nl-NL.po +924 -0
  23. sempy_labs/_bpa_translation/_translations_pl-PL.po +913 -0
  24. sempy_labs/_bpa_translation/_translations_pt-BR.po +909 -0
  25. sempy_labs/_bpa_translation/_translations_pt-PT.po +904 -0
  26. sempy_labs/_bpa_translation/_translations_ru-RU.po +909 -0
  27. sempy_labs/_bpa_translation/_translations_ta-IN.po +922 -0
  28. sempy_labs/_bpa_translation/_translations_te-IN.po +896 -0
  29. sempy_labs/_bpa_translation/_translations_th-TH.po +873 -0
  30. sempy_labs/_bpa_translation/_translations_zh-CN.po +767 -0
  31. sempy_labs/_bpa_translation/_translations_zu-ZA.po +916 -0
  32. sempy_labs/_clear_cache.py +9 -4
  33. sempy_labs/_generate_semantic_model.py +30 -56
  34. sempy_labs/_helper_functions.py +361 -14
  35. sempy_labs/_icons.py +10 -1
  36. sempy_labs/_list_functions.py +539 -260
  37. sempy_labs/_model_bpa.py +194 -18
  38. sempy_labs/_model_bpa_bulk.py +367 -0
  39. sempy_labs/_model_bpa_rules.py +19 -8
  40. sempy_labs/_model_dependencies.py +12 -10
  41. sempy_labs/_one_lake_integration.py +7 -7
  42. sempy_labs/_query_scale_out.py +61 -96
  43. sempy_labs/_refresh_semantic_model.py +7 -0
  44. sempy_labs/_translations.py +154 -1
  45. sempy_labs/_vertipaq.py +103 -90
  46. sempy_labs/directlake/__init__.py +5 -1
  47. sempy_labs/directlake/_directlake_schema_compare.py +27 -31
  48. sempy_labs/directlake/_directlake_schema_sync.py +55 -66
  49. sempy_labs/directlake/_dl_helper.py +233 -0
  50. sempy_labs/directlake/_get_directlake_lakehouse.py +6 -7
  51. sempy_labs/directlake/_get_shared_expression.py +1 -1
  52. sempy_labs/directlake/_guardrails.py +17 -13
  53. sempy_labs/directlake/_update_directlake_partition_entity.py +54 -30
  54. sempy_labs/directlake/_warm_cache.py +1 -1
  55. sempy_labs/lakehouse/__init__.py +2 -0
  56. sempy_labs/lakehouse/_get_lakehouse_tables.py +61 -69
  57. sempy_labs/lakehouse/_lakehouse.py +66 -9
  58. sempy_labs/lakehouse/_shortcuts.py +1 -1
  59. sempy_labs/migration/_create_pqt_file.py +174 -182
  60. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +236 -268
  61. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +75 -73
  62. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +442 -426
  63. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +91 -97
  64. sempy_labs/migration/_refresh_calc_tables.py +92 -101
  65. sempy_labs/report/_BPAReportTemplate.json +232 -0
  66. sempy_labs/report/__init__.py +6 -2
  67. sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
  68. sempy_labs/report/_bpareporttemplate/.platform +11 -0
  69. sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +710 -0
  70. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +11 -0
  71. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +191 -0
  72. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +438 -0
  73. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +127 -0
  74. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +576 -0
  75. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +207 -0
  76. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +506 -0
  77. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +127 -0
  78. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +513 -0
  79. sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +8 -0
  80. sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +112 -0
  81. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +11 -0
  82. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +513 -0
  83. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +352 -0
  84. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +37 -0
  85. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +542 -0
  86. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +221 -0
  87. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +127 -0
  88. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +576 -0
  89. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +127 -0
  90. sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +9 -0
  91. sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +38 -0
  92. sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +10 -0
  93. sempy_labs/report/_bpareporttemplate/definition/report.json +176 -0
  94. sempy_labs/report/_bpareporttemplate/definition/version.json +4 -0
  95. sempy_labs/report/_bpareporttemplate/definition.pbir +14 -0
  96. sempy_labs/report/_generate_report.py +255 -139
  97. sempy_labs/report/_report_functions.py +26 -33
  98. sempy_labs/report/_report_rebind.py +31 -26
  99. sempy_labs/tom/_model.py +75 -58
  100. semantic_link_labs-0.6.0.dist-info/METADATA +0 -22
  101. semantic_link_labs-0.6.0.dist-info/RECORD +0 -54
  102. sempy_labs/directlake/_fallback.py +0 -60
  103. {semantic_link_labs-0.6.0.dist-info → semantic_link_labs-0.7.1.dist-info}/LICENSE +0 -0
  104. {semantic_link_labs-0.6.0.dist-info → semantic_link_labs-0.7.1.dist-info}/top_level.txt +0 -0
@@ -2,13 +2,12 @@ import sempy
2
2
  import sempy.fabric as fabric
3
3
  import pandas as pd
4
4
  import re
5
- import datetime
6
- import time
7
5
  from sempy_labs.lakehouse._get_lakehouse_tables import get_lakehouse_tables
8
6
  from sempy_labs._helper_functions import (
9
7
  resolve_lakehouse_name,
10
8
  resolve_lakehouse_id,
11
9
  create_abfss_path,
10
+ retry,
12
11
  )
13
12
  from sempy_labs.tom import connect_semantic_model
14
13
  from pyspark.sql import SparkSession
@@ -101,168 +100,142 @@ def migrate_calc_tables_to_lakehouse(
101
100
  )
102
101
  return
103
102
 
104
- start_time = datetime.datetime.now()
105
- timeout = datetime.timedelta(minutes=1)
106
- success = False
107
-
108
- while not success:
109
- try:
110
- with connect_semantic_model(
111
- dataset=dataset, workspace=workspace, readonly=True
112
- ) as tom:
113
- success = True
114
- for t in tom.model.Tables:
115
- if tom.is_auto_date_table(table_name=t.Name):
116
- print(
117
- f"{icons.yellow_dot} The '{t.Name}' table is an auto-datetime table and is not supported in the Direct Lake migration process. "
118
- "Please create a proper Date/Calendar table in your lakehoues and use it in your Direct Lake model."
119
- )
120
- else:
121
- for p in t.Partitions:
122
- if str(p.SourceType) == "Calculated":
123
- query = p.Source.Expression
124
- if "NAMEOF" not in query: # exclude field parameters
125
- daxQuery = ""
126
- if query.lower().startswith("calendar") and any(
127
- str(c.Type) == "Calculated" for c in t.Columns
128
- ):
129
- daxQuery = f"ADDCOLUMNS(\n{query},"
130
- for c in t.Columns:
131
- if str(c.Type) == "Calculated":
132
- expr = c.Expression
133
- expr = expr.replace(
134
- f"'{t.Name}'", ""
135
- ).replace(f"{t.Name}[Date]", "[Date]")
136
- expr = expr.replace(
137
- "[MonthNo]", "MONTH([Date])"
138
- ).replace(
139
- "[QuarterNo]",
140
- "INT((MONTH([Date]) + 2) / 3)",
141
- )
142
- daxQuery = (
143
- f'{daxQuery}\n"{c.Name}",{expr},'
144
- )
145
- daxQuery = (
146
- "EVALUATE\n" + daxQuery.rstrip(",") + "\n)"
103
+ with connect_semantic_model(
104
+ dataset=dataset, workspace=workspace, readonly=True
105
+ ) as tom:
106
+ for t in tom.model.Tables:
107
+ if tom.is_auto_date_table(table_name=t.Name):
108
+ print(
109
+ f"{icons.yellow_dot} The '{t.Name}' table is an auto-datetime table and is not supported in the Direct Lake migration process. "
110
+ "Please create a proper Date/Calendar table in your lakehoues and use it in your Direct Lake model."
111
+ )
112
+ else:
113
+ for p in t.Partitions:
114
+ if str(p.SourceType) == "Calculated":
115
+ query = p.Source.Expression
116
+ if "NAMEOF" not in query: # exclude field parameters
117
+ daxQuery = ""
118
+ if query.lower().startswith("calendar") and any(
119
+ str(c.Type) == "Calculated" for c in t.Columns
120
+ ):
121
+ daxQuery = f"ADDCOLUMNS(\n{query},"
122
+ for c in t.Columns:
123
+ if str(c.Type) == "Calculated":
124
+ expr = c.Expression
125
+ expr = expr.replace(f"'{t.Name}'", "").replace(
126
+ f"{t.Name}[Date]", "[Date]"
147
127
  )
148
- else:
149
- daxQuery = f"EVALUATE\n{query}"
150
- daxQueryTopN = (
151
- daxQuery.replace(
152
- "EVALUATE\n", "EVALUATE\nTOPN(1,"
128
+ expr = expr.replace(
129
+ "[MonthNo]", "MONTH([Date])"
130
+ ).replace(
131
+ "[QuarterNo]",
132
+ "INT((MONTH([Date]) + 2) / 3)",
153
133
  )
154
- + ")"
134
+ daxQuery = f'{daxQuery}\n"{c.Name}",{expr},'
135
+ daxQuery = "EVALUATE\n" + daxQuery.rstrip(",") + "\n)"
136
+ else:
137
+ daxQuery = f"EVALUATE\n{query}"
138
+ daxQueryTopN = (
139
+ daxQuery.replace("EVALUATE\n", "EVALUATE\nTOPN(1,")
140
+ + ")"
141
+ )
142
+
143
+ try:
144
+ df = fabric.evaluate_dax(
145
+ dataset=dataset,
146
+ dax_string=daxQueryTopN,
147
+ workspace=workspace,
148
+ )
149
+
150
+ for col in df.columns:
151
+ pattern = r"\[([^\]]+)\]"
152
+
153
+ matches = re.findall(pattern, col)
154
+ new_column_name = matches[0].replace(" ", "")
155
+
156
+ df.rename(
157
+ columns={col: new_column_name},
158
+ inplace=True,
155
159
  )
156
160
 
157
161
  try:
158
- df = fabric.evaluate_dax(
159
- dataset=dataset,
160
- dax_string=daxQueryTopN,
161
- workspace=workspace,
162
- )
163
-
164
- for col in df.columns:
165
- pattern = r"\[([^\]]+)\]"
166
-
167
- matches = re.findall(pattern, col)
168
- new_column_name = matches[0].replace(
169
- " ", ""
170
- )
171
-
172
- df.rename(
173
- columns={col: new_column_name},
174
- inplace=True,
175
- )
176
-
177
- try:
178
- dataType = next(
179
- str(c.DataType)
180
- for c in tom.model.Tables[
181
- t.Name
182
- ].Columns
183
- if str(c.Type)
184
- == "CalculatedTableColumn"
185
- and c.SourceColumn == col
186
- )
187
- except Exception:
188
- dataType = next(
189
- str(c.DataType)
190
- for c in tom.model.Tables[
191
- t.Name
192
- ].Columns
193
- if str(c.Type) == "Calculated"
194
- and c.Name == new_column_name
195
- )
196
-
197
- if dataType == "Int64":
198
- df[new_column_name] = df[
199
- new_column_name
200
- ].astype(int)
201
- elif dataType in ["Decimal", "Double"]:
202
- df[new_column_name] = df[
203
- new_column_name
204
- ].astype(float)
205
- elif dataType == "Boolean":
206
- df[new_column_name] = df[
207
- new_column_name
208
- ].astype(bool)
209
- elif dataType == "DateTime":
210
- df[new_column_name] = pd.to_datetime(
211
- df[new_column_name]
212
- )
213
-
214
- delta_table_name = t.Name.replace(
215
- " ", "_"
216
- ).lower()
217
-
218
- spark_df = spark.createDataFrame(df)
219
- filePath = create_abfss_path(
220
- lakehouse_id=lakehouse_id,
221
- lakehouse_workspace_id=lakehouse_workspace_id,
222
- delta_table_name=delta_table_name,
223
- )
224
- spark_df.write.mode("overwrite").format(
225
- "delta"
226
- ).save(filePath)
227
-
228
- start_time2 = datetime.datetime.now()
229
- timeout2 = datetime.timedelta(minutes=1)
230
- success2 = False
231
-
232
- while not success2:
233
- try:
234
- with connect_semantic_model(
235
- dataset=new_dataset,
236
- readonly=False,
237
- workspace=new_dataset_workspace,
238
- ) as tom2:
239
- success2 = True
240
- tom2.set_annotation(
241
- object=tom2.model,
242
- name=t.Name,
243
- value=daxQuery,
244
- )
245
- except Exception:
246
- if (
247
- datetime.datetime.now()
248
- - start_time2
249
- > timeout2
250
- ):
251
- break
252
- time.sleep(1)
253
-
254
- print(
255
- f"{icons.green_dot} Calculated table '{t.Name}' has been created as delta table '{delta_table_name.lower()}' "
256
- f"in the '{lakehouse}' lakehouse within the '{lakehouse_workspace}' workspace."
162
+ dataType = next(
163
+ str(c.DataType)
164
+ for c in tom.model.Tables[t.Name].Columns
165
+ if str(c.Type) == "CalculatedTableColumn"
166
+ and c.SourceColumn == col
257
167
  )
258
168
  except Exception:
259
- print(
260
- f"{icons.red_dot} Failed to create calculated table '{t.Name}' as a delta table in the lakehouse."
169
+ dataType = next(
170
+ str(c.DataType)
171
+ for c in tom.model.Tables[t.Name].Columns
172
+ if str(c.Type) == "Calculated"
173
+ and c.Name == new_column_name
174
+ )
175
+
176
+ if dataType == "Int64":
177
+ df[new_column_name] = df[
178
+ new_column_name
179
+ ].astype(int)
180
+ elif dataType in ["Decimal", "Double"]:
181
+ df[new_column_name] = df[
182
+ new_column_name
183
+ ].astype(float)
184
+ elif dataType == "Boolean":
185
+ df[new_column_name] = df[
186
+ new_column_name
187
+ ].astype(bool)
188
+ elif dataType == "DateTime":
189
+ df[new_column_name] = pd.to_datetime(
190
+ df[new_column_name]
261
191
  )
262
- except Exception:
263
- if datetime.datetime.now() - start_time > timeout:
264
- break
265
- time.sleep(1)
192
+
193
+ delta_table_name = t.Name.replace(" ", "_").lower()
194
+
195
+ spark_df = spark.createDataFrame(df)
196
+ filePath = create_abfss_path(
197
+ lakehouse_id=lakehouse_id,
198
+ lakehouse_workspace_id=lakehouse_workspace_id,
199
+ delta_table_name=delta_table_name,
200
+ )
201
+ spark_df.write.mode("overwrite").format("delta").save(
202
+ filePath
203
+ )
204
+
205
+ @retry(
206
+ sleep_time=1,
207
+ timeout_error_message=f"{icons.red_dot} Function timed out after 1 minute",
208
+ )
209
+ def dyn_connect():
210
+ with connect_semantic_model(
211
+ dataset=new_dataset,
212
+ readonly=True,
213
+ workspace=new_dataset_workspace,
214
+ ) as tom2:
215
+
216
+ tom2.model
217
+
218
+ dyn_connect()
219
+
220
+ with connect_semantic_model(
221
+ dataset=new_dataset,
222
+ readonly=False,
223
+ workspace=new_dataset_workspace,
224
+ ) as tom2:
225
+ tom2.set_annotation(
226
+ object=tom2.model,
227
+ name=t.Name,
228
+ value=daxQuery,
229
+ )
230
+
231
+ print(
232
+ f"{icons.green_dot} Calculated table '{t.Name}' has been created as delta table '{delta_table_name.lower()}' "
233
+ f"in the '{lakehouse}' lakehouse within the '{lakehouse_workspace}' workspace."
234
+ )
235
+ except Exception:
236
+ print(
237
+ f"{icons.red_dot} Failed to create calculated table '{t.Name}' as a delta table in the lakehouse."
238
+ )
266
239
 
267
240
 
268
241
  @log
@@ -318,115 +291,110 @@ def migrate_field_parameters(
318
291
  )
319
292
  return
320
293
 
321
- start_time = datetime.datetime.now()
322
- timeout = datetime.timedelta(minutes=1)
323
- success = False
324
-
325
- while not success:
326
- try:
327
- with connect_semantic_model(
328
- dataset=new_dataset, workspace=new_dataset_workspace, readonly=False
329
- ) as tom:
330
- success = True
331
-
332
- for i, r in dfP_filt.iterrows():
333
- tName = r["Table Name"]
334
- query = r["Query"]
335
-
336
- # For field parameters, remove calc columns from the query
337
- rows = query.strip().split("\n")
338
- filtered_rows = [
339
- row
340
- for row in rows
341
- if not any(
342
- value in row
343
- for value in dfC_CalcColumn["Column Object"].values
344
- )
345
- ]
346
- updated_query_string = "\n".join(filtered_rows)
347
-
348
- # Remove extra comma
349
- lines = updated_query_string.strip().split("\n")
350
- lines[-2] = lines[-2].rstrip(",")
351
- expr = "\n".join(lines)
352
-
353
- try:
354
- par = TOM.Partition()
355
- par.Name = tName
356
-
357
- parSource = TOM.CalculatedPartitionSource()
358
- par.Source = parSource
359
- parSource.Expression = expr
360
-
361
- tbl = TOM.Table()
362
- tbl.Name = tName
363
- tbl.Partitions.Add(par)
364
-
365
- columns = ["Value1", "Value2", "Value3"]
366
-
367
- for colName in columns:
368
- col = TOM.CalculatedTableColumn()
369
- col.Name = colName
370
- col.SourceColumn = "[" + colName + "]"
371
- col.DataType = TOM.DataType.String
372
-
373
- tbl.Columns.Add(col)
374
-
375
- tom.model.Tables.Add(tbl)
376
-
377
- ep = TOM.JsonExtendedProperty()
378
- ep.Name = "ParameterMetadata"
379
- ep.Value = '{"version":3,"kind":2}'
380
-
381
- rcd = TOM.RelatedColumnDetails()
382
- gpc = TOM.GroupByColumn()
383
- gpc.GroupingColumn = tom.model.Tables[tName].Columns["Value2"]
384
- rcd.GroupByColumns.Add(gpc)
385
-
386
- # Update column properties
387
- tom.model.Tables[tName].Columns["Value2"].IsHidden = True
388
- tom.model.Tables[tName].Columns["Value3"].IsHidden = True
389
- tom.model.Tables[tName].Columns[
390
- "Value3"
391
- ].DataType = TOM.DataType.Int64
392
- tom.model.Tables[tName].Columns["Value1"].SortByColumn = (
393
- tom.model.Tables[tName].Columns["Value3"]
394
- )
395
- tom.model.Tables[tName].Columns["Value2"].SortByColumn = (
396
- tom.model.Tables[tName].Columns["Value3"]
397
- )
398
- tom.model.Tables[tName].Columns[
399
- "Value2"
400
- ].ExtendedProperties.Add(ep)
401
- tom.model.Tables[tName].Columns[
402
- "Value1"
403
- ].RelatedColumnDetails = rcd
404
-
405
- dfC_filt1 = dfC[
406
- (dfC["Table Name"] == tName) & (dfC["Source"] == "[Value1]")
407
- ]
408
- col1 = dfC_filt1["Column Name"].iloc[0]
409
- dfC_filt2 = dfC[
410
- (dfC["Table Name"] == tName) & (dfC["Source"] == "[Value2]")
411
- ]
412
- col2 = dfC_filt2["Column Name"].iloc[0]
413
- dfC_filt3 = dfC[
414
- (dfC["Table Name"] == tName) & (dfC["Source"] == "[Value3]")
415
- ]
416
- col3 = dfC_filt3["Column Name"].iloc[0]
417
-
418
- tom.model.Tables[tName].Columns["Value1"].Name = col1
419
- tom.model.Tables[tName].Columns["Value2"].Name = col2
420
- tom.model.Tables[tName].Columns["Value3"].Name = col3
421
-
422
- print(
423
- f"{icons.green_dot} The '{tName}' table has been added as a field parameter to the '{new_dataset}' semantic model in the '{new_dataset_workspace}' workspace."
424
- )
425
- except Exception:
426
- print(
427
- f"{icons.red_dot} The '{tName}' table has not been added as a field parameter."
428
- )
429
- except Exception:
430
- if datetime.datetime.now() - start_time > timeout:
431
- break
432
- time.sleep(1)
294
+ @retry(
295
+ sleep_time=1,
296
+ timeout_error_message=f"{icons.red_dot} Function timed out after 1 minute",
297
+ )
298
+ def dyn_connect():
299
+ with connect_semantic_model(
300
+ dataset=new_dataset, readonly=True, workspace=new_dataset_workspace
301
+ ) as tom:
302
+
303
+ tom.model
304
+
305
+ dyn_connect()
306
+
307
+ with connect_semantic_model(
308
+ dataset=new_dataset, workspace=new_dataset_workspace, readonly=False
309
+ ) as tom:
310
+
311
+ for i, r in dfP_filt.iterrows():
312
+ tName = r["Table Name"]
313
+ query = r["Query"]
314
+
315
+ # For field parameters, remove calc columns from the query
316
+ rows = query.strip().split("\n")
317
+ filtered_rows = [
318
+ row
319
+ for row in rows
320
+ if not any(
321
+ value in row for value in dfC_CalcColumn["Column Object"].values
322
+ )
323
+ ]
324
+ updated_query_string = "\n".join(filtered_rows)
325
+
326
+ # Remove extra comma
327
+ lines = updated_query_string.strip().split("\n")
328
+ lines[-2] = lines[-2].rstrip(",")
329
+ expr = "\n".join(lines)
330
+
331
+ try:
332
+ par = TOM.Partition()
333
+ par.Name = tName
334
+
335
+ parSource = TOM.CalculatedPartitionSource()
336
+ par.Source = parSource
337
+ parSource.Expression = expr
338
+
339
+ tbl = TOM.Table()
340
+ tbl.Name = tName
341
+ tbl.Partitions.Add(par)
342
+
343
+ columns = ["Value1", "Value2", "Value3"]
344
+
345
+ for colName in columns:
346
+ col = TOM.CalculatedTableColumn()
347
+ col.Name = colName
348
+ col.SourceColumn = "[" + colName + "]"
349
+ col.DataType = TOM.DataType.String
350
+
351
+ tbl.Columns.Add(col)
352
+
353
+ tom.model.Tables.Add(tbl)
354
+
355
+ ep = TOM.JsonExtendedProperty()
356
+ ep.Name = "ParameterMetadata"
357
+ ep.Value = '{"version":3,"kind":2}'
358
+
359
+ rcd = TOM.RelatedColumnDetails()
360
+ gpc = TOM.GroupByColumn()
361
+ gpc.GroupingColumn = tom.model.Tables[tName].Columns["Value2"]
362
+ rcd.GroupByColumns.Add(gpc)
363
+
364
+ # Update column properties
365
+ tom.model.Tables[tName].Columns["Value2"].IsHidden = True
366
+ tom.model.Tables[tName].Columns["Value3"].IsHidden = True
367
+ tom.model.Tables[tName].Columns["Value3"].DataType = TOM.DataType.Int64
368
+ tom.model.Tables[tName].Columns["Value1"].SortByColumn = (
369
+ tom.model.Tables[tName].Columns["Value3"]
370
+ )
371
+ tom.model.Tables[tName].Columns["Value2"].SortByColumn = (
372
+ tom.model.Tables[tName].Columns["Value3"]
373
+ )
374
+ tom.model.Tables[tName].Columns["Value2"].ExtendedProperties.Add(ep)
375
+ tom.model.Tables[tName].Columns["Value1"].RelatedColumnDetails = rcd
376
+
377
+ dfC_filt1 = dfC[
378
+ (dfC["Table Name"] == tName) & (dfC["Source"] == "[Value1]")
379
+ ]
380
+ col1 = dfC_filt1["Column Name"].iloc[0]
381
+ dfC_filt2 = dfC[
382
+ (dfC["Table Name"] == tName) & (dfC["Source"] == "[Value2]")
383
+ ]
384
+ col2 = dfC_filt2["Column Name"].iloc[0]
385
+ dfC_filt3 = dfC[
386
+ (dfC["Table Name"] == tName) & (dfC["Source"] == "[Value3]")
387
+ ]
388
+ col3 = dfC_filt3["Column Name"].iloc[0]
389
+
390
+ tom.model.Tables[tName].Columns["Value1"].Name = col1
391
+ tom.model.Tables[tName].Columns["Value2"].Name = col2
392
+ tom.model.Tables[tName].Columns["Value3"].Name = col3
393
+
394
+ print(
395
+ f"{icons.green_dot} The '{tName}' table has been added as a field parameter to the '{new_dataset}' semantic model in the '{new_dataset_workspace}' workspace."
396
+ )
397
+ except Exception:
398
+ print(
399
+ f"{icons.red_dot} The '{tName}' table has not been added as a field parameter."
400
+ )