semantic-link-labs 0.6.0__py3-none-any.whl → 0.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (103) hide show
  1. semantic_link_labs-0.7.0.dist-info/METADATA +148 -0
  2. semantic_link_labs-0.7.0.dist-info/RECORD +111 -0
  3. {semantic_link_labs-0.6.0.dist-info → semantic_link_labs-0.7.0.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +26 -2
  5. sempy_labs/_ai.py +3 -65
  6. sempy_labs/_bpa_translation/_translations_am-ET.po +828 -0
  7. sempy_labs/_bpa_translation/_translations_ar-AE.po +860 -0
  8. sempy_labs/_bpa_translation/_translations_cs-CZ.po +894 -0
  9. sempy_labs/_bpa_translation/_translations_da-DK.po +894 -0
  10. sempy_labs/_bpa_translation/_translations_de-DE.po +933 -0
  11. sempy_labs/_bpa_translation/_translations_el-GR.po +936 -0
  12. sempy_labs/_bpa_translation/_translations_es-ES.po +915 -0
  13. sempy_labs/_bpa_translation/_translations_fa-IR.po +883 -0
  14. sempy_labs/_bpa_translation/_translations_fr-FR.po +938 -0
  15. sempy_labs/_bpa_translation/_translations_ga-IE.po +912 -0
  16. sempy_labs/_bpa_translation/_translations_he-IL.po +855 -0
  17. sempy_labs/_bpa_translation/_translations_hi-IN.po +892 -0
  18. sempy_labs/_bpa_translation/_translations_hu-HU.po +910 -0
  19. sempy_labs/_bpa_translation/_translations_is-IS.po +887 -0
  20. sempy_labs/_bpa_translation/_translations_it-IT.po +931 -0
  21. sempy_labs/_bpa_translation/_translations_ja-JP.po +805 -0
  22. sempy_labs/_bpa_translation/_translations_nl-NL.po +924 -0
  23. sempy_labs/_bpa_translation/_translations_pl-PL.po +913 -0
  24. sempy_labs/_bpa_translation/_translations_pt-BR.po +909 -0
  25. sempy_labs/_bpa_translation/_translations_pt-PT.po +904 -0
  26. sempy_labs/_bpa_translation/_translations_ru-RU.po +909 -0
  27. sempy_labs/_bpa_translation/_translations_ta-IN.po +922 -0
  28. sempy_labs/_bpa_translation/_translations_te-IN.po +896 -0
  29. sempy_labs/_bpa_translation/_translations_th-TH.po +873 -0
  30. sempy_labs/_bpa_translation/_translations_zh-CN.po +767 -0
  31. sempy_labs/_bpa_translation/_translations_zu-ZA.po +916 -0
  32. sempy_labs/_clear_cache.py +9 -4
  33. sempy_labs/_generate_semantic_model.py +30 -56
  34. sempy_labs/_helper_functions.py +358 -14
  35. sempy_labs/_icons.py +10 -1
  36. sempy_labs/_list_functions.py +478 -237
  37. sempy_labs/_model_bpa.py +194 -18
  38. sempy_labs/_model_bpa_bulk.py +363 -0
  39. sempy_labs/_model_bpa_rules.py +4 -4
  40. sempy_labs/_model_dependencies.py +12 -10
  41. sempy_labs/_one_lake_integration.py +7 -7
  42. sempy_labs/_query_scale_out.py +45 -66
  43. sempy_labs/_refresh_semantic_model.py +7 -0
  44. sempy_labs/_translations.py +154 -1
  45. sempy_labs/_vertipaq.py +103 -90
  46. sempy_labs/directlake/__init__.py +5 -1
  47. sempy_labs/directlake/_directlake_schema_compare.py +27 -31
  48. sempy_labs/directlake/_directlake_schema_sync.py +55 -66
  49. sempy_labs/directlake/_dl_helper.py +233 -0
  50. sempy_labs/directlake/_get_directlake_lakehouse.py +6 -7
  51. sempy_labs/directlake/_get_shared_expression.py +1 -1
  52. sempy_labs/directlake/_guardrails.py +17 -13
  53. sempy_labs/directlake/_update_directlake_partition_entity.py +54 -30
  54. sempy_labs/directlake/_warm_cache.py +1 -1
  55. sempy_labs/lakehouse/_get_lakehouse_tables.py +61 -69
  56. sempy_labs/lakehouse/_lakehouse.py +3 -2
  57. sempy_labs/lakehouse/_shortcuts.py +1 -1
  58. sempy_labs/migration/_create_pqt_file.py +174 -182
  59. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +236 -268
  60. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +75 -73
  61. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +442 -426
  62. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +91 -97
  63. sempy_labs/migration/_refresh_calc_tables.py +92 -101
  64. sempy_labs/report/_BPAReportTemplate.json +232 -0
  65. sempy_labs/report/__init__.py +6 -2
  66. sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
  67. sempy_labs/report/_bpareporttemplate/.platform +11 -0
  68. sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +710 -0
  69. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +11 -0
  70. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +191 -0
  71. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +438 -0
  72. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +127 -0
  73. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +576 -0
  74. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +207 -0
  75. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +506 -0
  76. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +127 -0
  77. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +513 -0
  78. sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +8 -0
  79. sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +112 -0
  80. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +11 -0
  81. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +513 -0
  82. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +352 -0
  83. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +37 -0
  84. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +542 -0
  85. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +221 -0
  86. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +127 -0
  87. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +576 -0
  88. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +127 -0
  89. sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +9 -0
  90. sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +38 -0
  91. sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +10 -0
  92. sempy_labs/report/_bpareporttemplate/definition/report.json +176 -0
  93. sempy_labs/report/_bpareporttemplate/definition/version.json +4 -0
  94. sempy_labs/report/_bpareporttemplate/definition.pbir +14 -0
  95. sempy_labs/report/_generate_report.py +255 -139
  96. sempy_labs/report/_report_functions.py +26 -33
  97. sempy_labs/report/_report_rebind.py +31 -26
  98. sempy_labs/tom/_model.py +75 -58
  99. semantic_link_labs-0.6.0.dist-info/METADATA +0 -22
  100. semantic_link_labs-0.6.0.dist-info/RECORD +0 -54
  101. sempy_labs/directlake/_fallback.py +0 -60
  102. {semantic_link_labs-0.6.0.dist-info → semantic_link_labs-0.7.0.dist-info}/LICENSE +0 -0
  103. {semantic_link_labs-0.6.0.dist-info → semantic_link_labs-0.7.0.dist-info}/top_level.txt +0 -0
@@ -4,7 +4,7 @@ import datetime
4
4
  import time
5
5
  from sempy_labs._list_functions import list_tables
6
6
  from sempy_labs.directlake._get_shared_expression import get_shared_expression
7
- from sempy_labs._helper_functions import resolve_lakehouse_name
7
+ from sempy_labs._helper_functions import resolve_lakehouse_name, retry
8
8
  from sempy_labs.lakehouse._lakehouse import lakehouse_attached
9
9
  from sempy_labs.tom import connect_semantic_model
10
10
  from typing import Optional
@@ -61,105 +61,99 @@ def migrate_tables_columns_to_semantic_model(
61
61
  lakehouse = resolve_lakehouse_name(lakehouse_id, lakehouse_workspace)
62
62
 
63
63
  # Check that lakehouse is attached to the notebook
64
- lakeAttach = lakehouse_attached()
65
-
66
- # Run if lakehouse is attached to the notebook or a lakehouse & lakehouse workspace are specified
67
- if lakeAttach or (lakehouse is not None and lakehouse_workspace is not None):
68
- shEx = get_shared_expression(lakehouse, lakehouse_workspace)
69
-
70
- dfC = fabric.list_columns(dataset=dataset, workspace=workspace)
71
- dfT = list_tables(dataset, workspace)
72
- dfT.rename(columns={"Type": "Table Type"}, inplace=True)
73
- dfC = pd.merge(
74
- dfC,
75
- dfT[["Name", "Table Type"]],
76
- left_on="Table Name",
77
- right_on="Name",
78
- how="left",
79
- )
80
- dfT_filt = dfT[dfT["Table Type"] == "Table"]
81
- dfC_filt = dfC[
82
- (dfC["Table Type"] == "Table")
83
- & ~(dfC["Column Name"].str.startswith("RowNumber-"))
84
- & (dfC["Type"] != "Calculated")
85
- ]
86
-
87
- print(f"{icons.in_progress} Updating '{new_dataset}' based on '{dataset}'...")
88
- start_time = datetime.datetime.now()
89
- timeout = datetime.timedelta(minutes=1)
90
- success = False
91
-
92
- while not success:
93
- try:
94
- with connect_semantic_model(
95
- dataset=new_dataset, readonly=False, workspace=new_dataset_workspace
96
- ) as tom:
97
- success = True
98
- if not any(
99
- e.Name == "DatabaseQuery" for e in tom.model.Expressions
100
- ):
101
- tom.add_expression("DatabaseQuery", expression=shEx)
102
- print(
103
- f"{icons.green_dot} The 'DatabaseQuery' expression has been added."
104
- )
105
-
106
- for i, r in dfT_filt.iterrows():
107
- tName = r["Name"]
108
- tDC = r["Data Category"]
109
- tHid = bool(r["Hidden"])
110
- tDesc = r["Description"]
111
-
112
- if not any(t.Name == tName for t in tom.model.Tables):
113
- tom.add_table(
114
- name=tName,
115
- description=tDesc,
116
- data_category=tDC,
117
- hidden=tHid,
118
- )
119
- tom.add_entity_partition(
120
- table_name=tName, entity_name=tName.replace(" ", "_")
121
- )
122
- print(
123
- f"{icons.green_dot} The '{tName}' table has been added."
124
- )
125
-
126
- for i, r in dfC_filt.iterrows():
127
- tName = r["Table Name"]
128
- cName = r["Column Name"]
129
- scName = r["Source"].replace(" ", "_")
130
- cHid = bool(r["Hidden"])
131
- cDataType = r["Data Type"]
132
-
133
- if not any(
134
- c.Name == cName and c.Parent.Name == tName
135
- for c in tom.all_columns()
136
- ):
137
- tom.add_data_column(
138
- table_name=tName,
139
- column_name=cName,
140
- source_column=scName,
141
- hidden=cHid,
142
- data_type=cDataType,
143
- )
144
- print(
145
- f"{icons.green_dot} The '{tName}'[{cName}] column has been added."
146
- )
147
-
148
- print(
149
- f"\n{icons.green_dot} All regular tables and columns have been added to the '{new_dataset}' semantic model."
150
- )
151
- except Exception:
152
- if datetime.datetime.now() - start_time > timeout:
153
- break
154
- time.sleep(1)
155
- else:
156
- print(
64
+ if not lakehouse_attached() and (lakehouse is None and lakehouse_workspace is None):
65
+ raise ValueError(
157
66
  f"{icons.red_dot} Lakehouse not attached to notebook and lakehouse/lakehouse_workspace are not specified. Please add your lakehouse to this notebook"
158
67
  f" or specify the lakehouse/lakehouse_workspace parameters."
159
- )
160
- print(
161
68
  "To attach a lakehouse to a notebook, go to the the 'Explorer' window to the left, click 'Lakehouses' to add your lakehouse to this notebook"
69
+ "\nLearn more here: https://learn.microsoft.com/fabric/data-engineering/lakehouse-notebook-explore#add-or-remove-a-lakehouse"
162
70
  )
71
+ shEx = get_shared_expression(lakehouse, lakehouse_workspace)
72
+
73
+ dfC = fabric.list_columns(dataset=dataset, workspace=workspace)
74
+ dfT = list_tables(dataset, workspace)
75
+ dfT.rename(columns={"Type": "Table Type"}, inplace=True)
76
+ dfC = pd.merge(
77
+ dfC,
78
+ dfT[["Name", "Table Type"]],
79
+ left_on="Table Name",
80
+ right_on="Name",
81
+ how="left",
82
+ )
83
+ dfT_filt = dfT[dfT["Table Type"] == "Table"]
84
+ dfC_filt = dfC[
85
+ (dfC["Table Type"] == "Table")
86
+ & ~(dfC["Column Name"].str.startswith("RowNumber-"))
87
+ & (dfC["Type"] != "Calculated")
88
+ ]
89
+
90
+ print(f"{icons.in_progress} Updating '{new_dataset}' based on '{dataset}'...")
91
+
92
+ @retry(
93
+ sleep_time=1,
94
+ timeout_error_message=f"{icons.red_dot} Function timed out after 1 minute",
95
+ )
96
+ def dyn_connect():
97
+ with connect_semantic_model(
98
+ dataset=new_dataset, readonly=True, workspace=new_dataset_workspace
99
+ ) as tom:
100
+
101
+ tom.model
102
+
103
+ dyn_connect()
104
+
105
+ with connect_semantic_model(
106
+ dataset=new_dataset, readonly=False, workspace=new_dataset_workspace
107
+ ) as tom:
108
+ if not any(e.Name == "DatabaseQuery" for e in tom.model.Expressions):
109
+ tom.add_expression("DatabaseQuery", expression=shEx)
110
+ print(f"{icons.green_dot} The 'DatabaseQuery' expression has been added.")
111
+
112
+ for i, r in dfT_filt.iterrows():
113
+ tName = r["Name"]
114
+ tDC = r["Data Category"]
115
+ tHid = bool(r["Hidden"])
116
+ tDesc = r["Description"]
117
+ ent_name = tName.replace(" ", "_")
118
+ for char in icons.special_characters:
119
+ ent_name = ent_name.replace(char, "")
120
+
121
+ if not any(t.Name == tName for t in tom.model.Tables):
122
+ tom.add_table(
123
+ name=tName,
124
+ description=tDesc,
125
+ data_category=tDC,
126
+ hidden=tHid,
127
+ )
128
+ tom.add_entity_partition(table_name=tName, entity_name=ent_name)
129
+ print(f"{icons.green_dot} The '{tName}' table has been added.")
130
+
131
+ for i, r in dfC_filt.iterrows():
132
+ tName = r["Table Name"]
133
+ cName = r["Column Name"]
134
+ scName = r["Source"].replace(" ", "_")
135
+ cHid = bool(r["Hidden"])
136
+ cDataType = r["Data Type"]
137
+ for char in icons.special_characters:
138
+ scName = scName.replace(char, "")
139
+
140
+ if scName.endswith("_"):
141
+ scName = scName[:-1]
142
+
143
+ if not any(
144
+ c.Name == cName and c.Parent.Name == tName for c in tom.all_columns()
145
+ ):
146
+ tom.add_data_column(
147
+ table_name=tName,
148
+ column_name=cName,
149
+ source_column=scName,
150
+ hidden=cHid,
151
+ data_type=cDataType,
152
+ )
153
+ print(
154
+ f"{icons.green_dot} The '{tName}'[{cName}] column has been added."
155
+ )
156
+
163
157
  print(
164
- "\nLearn more here: https://learn.microsoft.com/fabric/data-engineering/lakehouse-notebook-explore#add-or-remove-a-lakehouse"
158
+ f"\n{icons.green_dot} All regular tables and columns have been added to the '{new_dataset}' semantic model."
165
159
  )
@@ -1,8 +1,7 @@
1
1
  import sempy.fabric as fabric
2
2
  import pandas as pd
3
3
  import re
4
- import datetime
5
- import time
4
+ from sempy_labs._helper_functions import retry
6
5
  from pyspark.sql import SparkSession
7
6
  from sempy_labs.tom import connect_semantic_model
8
7
  from typing import Optional
@@ -26,107 +25,99 @@ def refresh_calc_tables(dataset: str, workspace: Optional[str] = None):
26
25
  """
27
26
 
28
27
  spark = SparkSession.builder.getOrCreate()
29
-
30
- start_time = datetime.datetime.now()
31
- timeout = datetime.timedelta(minutes=1)
32
- success = False
33
-
34
28
  workspace = fabric.resolve_workspace_name(workspace)
35
29
 
36
- while not success:
37
- try:
38
- with connect_semantic_model(
39
- dataset=dataset, readonly=True, workspace=workspace
40
- ) as tom:
41
- success = True
42
- for a in tom.model.Annotations:
43
- if any(a.Name == t.Name for t in tom.model.Tables):
44
- tName = a.Name
45
- query = a.Value
46
-
47
- if not query.startswith("EVALUATE"):
48
- daxquery = "EVALUATE \n" + query
49
- else:
50
- daxquery = query
51
-
52
- try:
53
- df = fabric.evaluate_dax(
54
- dataset=dataset,
55
- dax_string=daxquery,
56
- workspace=workspace,
57
- )
58
-
59
- # Update column names for non-field parameters
60
- if query.find("NAMEOF") == -1:
61
- for old_column_name in df.columns:
62
- pattern = r"\[([^\]]+)\]"
63
-
64
- matches = re.findall(pattern, old_column_name)
65
- new_column_name = matches[0]
66
- new_column_name = new_column_name.replace(" ", "")
67
-
68
- df.rename(
69
- columns={old_column_name: new_column_name},
70
- inplace=True,
71
- )
72
-
73
- # Update data types for lakehouse columns
74
- dataType = next(
75
- str(c.DataType)
76
- for c in tom.all_columns()
77
- if c.Parent.Name == tName
78
- and c.SourceColumn == new_column_name
79
- )
80
- # dfC_type = dfC[(dfC['Table Name'] == tName) & (dfC['Source'] == new_column_name)]
81
- # dataType = dfC_type['Data Type'].iloc[0]
82
-
83
- if dataType == "Int64":
84
- df[new_column_name] = df[
85
- new_column_name
86
- ].astype(int)
87
- elif dataType in ["Decimal", "Double"]:
88
- df[new_column_name] = df[
89
- new_column_name
90
- ].astype(float)
91
- elif dataType == "Boolean":
92
- df[new_column_name] = df[
93
- new_column_name
94
- ].astype(bool)
95
- elif dataType == "DateTime":
96
- df[new_column_name] = pd.to_datetime(
97
- df[new_column_name]
98
- )
99
- else:
100
- df[new_column_name] = df[
101
- new_column_name
102
- ].astype(str)
103
- # else:
104
- # second_column_name = df.columns[1]
105
- # third_column_name = df.columns[2]
106
- # df[third_column_name] = df[third_column_name].astype(int)
107
-
108
- # Remove calc columns from field parameters
109
- # mask = df[second_column_name].isin(dfC_filt['Full Column Name'])
110
- # df = df[~mask]
111
-
112
- delta_table_name = tName.replace(" ", "_")
113
- print(
114
- f"{icons.in_progress} Refresh of the '{delta_table_name}' table within the lakehouse is in progress..."
30
+ @retry(
31
+ sleep_time=1,
32
+ timeout_error_message=f"{icons.red_dot} Function timed out after 1 minute",
33
+ )
34
+ def dyn_connect():
35
+ with connect_semantic_model(
36
+ dataset=dataset, readonly=True, workspace=workspace
37
+ ) as tom:
38
+
39
+ tom.model
40
+
41
+ dyn_connect()
42
+
43
+ with connect_semantic_model(
44
+ dataset=dataset, readonly=True, workspace=workspace
45
+ ) as tom:
46
+ for a in tom.model.Annotations:
47
+ if any(a.Name == t.Name for t in tom.model.Tables):
48
+ tName = a.Name
49
+ query = a.Value
50
+
51
+ if not query.startswith("EVALUATE"):
52
+ daxquery = "EVALUATE \n" + query
53
+ else:
54
+ daxquery = query
55
+
56
+ try:
57
+ df = fabric.evaluate_dax(
58
+ dataset=dataset,
59
+ dax_string=daxquery,
60
+ workspace=workspace,
61
+ )
62
+
63
+ # Update column names for non-field parameters
64
+ if query.find("NAMEOF") == -1:
65
+ for old_column_name in df.columns:
66
+ pattern = r"\[([^\]]+)\]"
67
+
68
+ matches = re.findall(pattern, old_column_name)
69
+ new_column_name = matches[0]
70
+ new_column_name = new_column_name.replace(" ", "")
71
+
72
+ df.rename(
73
+ columns={old_column_name: new_column_name},
74
+ inplace=True,
115
75
  )
116
76
 
117
- spark_df = spark.createDataFrame(df)
118
- spark_df.write.mode("overwrite").format(
119
- "delta"
120
- ).saveAsTable(delta_table_name)
121
- print(
122
- f"{icons.green_dot} Calculated table '{tName}' has been refreshed as the '{delta_table_name.lower()}' table in the lakehouse."
77
+ # Update data types for lakehouse columns
78
+ dataType = next(
79
+ str(c.DataType)
80
+ for c in tom.all_columns()
81
+ if c.Parent.Name == tName
82
+ and c.SourceColumn == new_column_name
123
83
  )
124
- except Exception as e:
125
- raise ValueError(
126
- f"{icons.red_dot} Failed to create calculated table '{tName}' as a delta table in the lakehouse."
127
- ) from e
128
-
129
- except Exception:
130
- if datetime.datetime.now() - start_time > timeout:
131
- break
132
- time.sleep(1)
84
+ # dfC_type = dfC[(dfC['Table Name'] == tName) & (dfC['Source'] == new_column_name)]
85
+ # dataType = dfC_type['Data Type'].iloc[0]
86
+
87
+ if dataType == "Int64":
88
+ df[new_column_name] = df[new_column_name].astype(int)
89
+ elif dataType in ["Decimal", "Double"]:
90
+ df[new_column_name] = df[new_column_name].astype(float)
91
+ elif dataType == "Boolean":
92
+ df[new_column_name] = df[new_column_name].astype(bool)
93
+ elif dataType == "DateTime":
94
+ df[new_column_name] = pd.to_datetime(
95
+ df[new_column_name]
96
+ )
97
+ else:
98
+ df[new_column_name] = df[new_column_name].astype(str)
99
+ # else:
100
+ # second_column_name = df.columns[1]
101
+ # third_column_name = df.columns[2]
102
+ # df[third_column_name] = df[third_column_name].astype(int)
103
+
104
+ # Remove calc columns from field parameters
105
+ # mask = df[second_column_name].isin(dfC_filt['Full Column Name'])
106
+ # df = df[~mask]
107
+
108
+ delta_table_name = tName.replace(" ", "_")
109
+ print(
110
+ f"{icons.in_progress} Refresh of the '{delta_table_name}' table within the lakehouse is in progress..."
111
+ )
112
+
113
+ spark_df = spark.createDataFrame(df)
114
+ spark_df.write.mode("overwrite").format("delta").saveAsTable(
115
+ delta_table_name
116
+ )
117
+ print(
118
+ f"{icons.green_dot} Calculated table '{tName}' has been refreshed as the '{delta_table_name.lower()}' table in the lakehouse."
119
+ )
120
+ except Exception as e:
121
+ raise ValueError(
122
+ f"{icons.red_dot} Failed to create calculated table '{tName}' as a delta table in the lakehouse."
123
+ ) from e