semantic-link-labs 0.7.2__py3-none-any.whl → 0.7.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.7.2.dist-info → semantic_link_labs-0.7.4.dist-info}/METADATA +15 -3
- semantic_link_labs-0.7.4.dist-info/RECORD +134 -0
- {semantic_link_labs-0.7.2.dist-info → semantic_link_labs-0.7.4.dist-info}/WHEEL +1 -1
- sempy_labs/__init__.py +120 -24
- sempy_labs/_bpa_translation/{_translations_am-ET.po → _model/_translations_am-ET.po} +22 -0
- sempy_labs/_bpa_translation/{_translations_ar-AE.po → _model/_translations_ar-AE.po} +24 -0
- sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +938 -0
- sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +934 -0
- sempy_labs/_bpa_translation/{_translations_cs-CZ.po → _model/_translations_cs-CZ.po} +179 -157
- sempy_labs/_bpa_translation/{_translations_da-DK.po → _model/_translations_da-DK.po} +24 -0
- sempy_labs/_bpa_translation/{_translations_de-DE.po → _model/_translations_de-DE.po} +77 -52
- sempy_labs/_bpa_translation/{_translations_el-GR.po → _model/_translations_el-GR.po} +25 -0
- sempy_labs/_bpa_translation/{_translations_es-ES.po → _model/_translations_es-ES.po} +67 -43
- sempy_labs/_bpa_translation/{_translations_fa-IR.po → _model/_translations_fa-IR.po} +24 -0
- sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +915 -0
- sempy_labs/_bpa_translation/{_translations_fr-FR.po → _model/_translations_fr-FR.po} +83 -57
- sempy_labs/_bpa_translation/{_translations_ga-IE.po → _model/_translations_ga-IE.po} +25 -0
- sempy_labs/_bpa_translation/{_translations_he-IL.po → _model/_translations_he-IL.po} +23 -0
- sempy_labs/_bpa_translation/{_translations_hi-IN.po → _model/_translations_hi-IN.po} +24 -0
- sempy_labs/_bpa_translation/{_translations_hu-HU.po → _model/_translations_hu-HU.po} +25 -0
- sempy_labs/_bpa_translation/_model/_translations_id-ID.po +918 -0
- sempy_labs/_bpa_translation/{_translations_is-IS.po → _model/_translations_is-IS.po} +25 -0
- sempy_labs/_bpa_translation/{_translations_it-IT.po → _model/_translations_it-IT.po} +25 -0
- sempy_labs/_bpa_translation/{_translations_ja-JP.po → _model/_translations_ja-JP.po} +21 -0
- sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +823 -0
- sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +937 -0
- sempy_labs/_bpa_translation/{_translations_nl-NL.po → _model/_translations_nl-NL.po} +80 -56
- sempy_labs/_bpa_translation/{_translations_pl-PL.po → _model/_translations_pl-PL.po} +101 -76
- sempy_labs/_bpa_translation/{_translations_pt-BR.po → _model/_translations_pt-BR.po} +25 -0
- sempy_labs/_bpa_translation/{_translations_pt-PT.po → _model/_translations_pt-PT.po} +25 -0
- sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +939 -0
- sempy_labs/_bpa_translation/{_translations_ru-RU.po → _model/_translations_ru-RU.po} +25 -0
- sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +925 -0
- sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +922 -0
- sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +914 -0
- sempy_labs/_bpa_translation/{_translations_ta-IN.po → _model/_translations_ta-IN.po} +26 -0
- sempy_labs/_bpa_translation/{_translations_te-IN.po → _model/_translations_te-IN.po} +24 -0
- sempy_labs/_bpa_translation/{_translations_th-TH.po → _model/_translations_th-TH.po} +24 -0
- sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +925 -0
- sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +933 -0
- sempy_labs/_bpa_translation/{_translations_zh-CN.po → _model/_translations_zh-CN.po} +116 -97
- sempy_labs/_bpa_translation/{_translations_zu-ZA.po → _model/_translations_zu-ZA.po} +25 -0
- sempy_labs/_capacities.py +541 -0
- sempy_labs/_clear_cache.py +298 -3
- sempy_labs/_connections.py +138 -0
- sempy_labs/_dataflows.py +130 -0
- sempy_labs/_deployment_pipelines.py +171 -0
- sempy_labs/_environments.py +156 -0
- sempy_labs/_generate_semantic_model.py +148 -27
- sempy_labs/_git.py +380 -0
- sempy_labs/_helper_functions.py +203 -8
- sempy_labs/_icons.py +43 -0
- sempy_labs/_list_functions.py +170 -1012
- sempy_labs/_model_bpa.py +90 -112
- sempy_labs/_model_bpa_bulk.py +3 -1
- sempy_labs/_model_bpa_rules.py +788 -800
- sempy_labs/_notebooks.py +143 -0
- sempy_labs/_query_scale_out.py +28 -7
- sempy_labs/_spark.py +465 -0
- sempy_labs/_sql.py +120 -0
- sempy_labs/_translations.py +3 -1
- sempy_labs/_vertipaq.py +160 -99
- sempy_labs/_workspace_identity.py +66 -0
- sempy_labs/_workspaces.py +294 -0
- sempy_labs/directlake/__init__.py +2 -0
- sempy_labs/directlake/_directlake_schema_compare.py +1 -2
- sempy_labs/directlake/_directlake_schema_sync.py +1 -2
- sempy_labs/directlake/_dl_helper.py +4 -7
- sempy_labs/directlake/_generate_shared_expression.py +85 -0
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +1 -2
- sempy_labs/lakehouse/_get_lakehouse_tables.py +7 -3
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +5 -0
- sempy_labs/migration/_migrate_calctables_to_semantic_model.py +5 -0
- sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +6 -2
- sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +6 -5
- sempy_labs/migration/_migration_validation.py +6 -0
- sempy_labs/report/_report_functions.py +21 -42
- sempy_labs/report/_report_rebind.py +5 -0
- sempy_labs/tom/_model.py +95 -52
- semantic_link_labs-0.7.2.dist-info/RECORD +0 -111
- {semantic_link_labs-0.7.2.dist-info → semantic_link_labs-0.7.4.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.7.2.dist-info → semantic_link_labs-0.7.4.dist-info}/top_level.txt +0 -0
sempy_labs/_model_bpa.py
CHANGED
|
@@ -12,6 +12,7 @@ from sempy_labs._helper_functions import (
|
|
|
12
12
|
save_as_delta_table,
|
|
13
13
|
resolve_workspace_capacity,
|
|
14
14
|
resolve_dataset_id,
|
|
15
|
+
get_language_codes,
|
|
15
16
|
)
|
|
16
17
|
from sempy_labs.lakehouse import get_lakehouse_tables, lakehouse_attached
|
|
17
18
|
from sempy_labs.tom import connect_semantic_model
|
|
@@ -21,7 +22,6 @@ from sempy._utils._log import log
|
|
|
21
22
|
import sempy_labs._icons as icons
|
|
22
23
|
from pyspark.sql.functions import col, flatten
|
|
23
24
|
from pyspark.sql.types import StructType, StructField, StringType
|
|
24
|
-
import polib
|
|
25
25
|
import os
|
|
26
26
|
|
|
27
27
|
|
|
@@ -65,7 +65,7 @@ def run_model_bpa(
|
|
|
65
65
|
A pandas dataframe in HTML format showing semantic model objects which violated the best practice analyzer rules.
|
|
66
66
|
"""
|
|
67
67
|
|
|
68
|
-
|
|
68
|
+
import polib
|
|
69
69
|
|
|
70
70
|
if "extend" in kwargs:
|
|
71
71
|
print(
|
|
@@ -81,34 +81,9 @@ def run_model_bpa(
|
|
|
81
81
|
"ignore", category=UserWarning, message=".*Arrow optimization.*"
|
|
82
82
|
)
|
|
83
83
|
|
|
84
|
-
language_list =
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
"he-IL",
|
|
88
|
-
"pt-PT",
|
|
89
|
-
"zh-CN",
|
|
90
|
-
"fr-FR",
|
|
91
|
-
"da-DK",
|
|
92
|
-
"cs-CZ",
|
|
93
|
-
"de-DE",
|
|
94
|
-
"el-GR",
|
|
95
|
-
"fa-IR",
|
|
96
|
-
"ga-IE",
|
|
97
|
-
"hi-IN",
|
|
98
|
-
"hu-HU",
|
|
99
|
-
"is-IS",
|
|
100
|
-
"ja-JP",
|
|
101
|
-
"nl-NL",
|
|
102
|
-
"pl-PL",
|
|
103
|
-
"pt-BR",
|
|
104
|
-
"ru-RU",
|
|
105
|
-
"te-IN",
|
|
106
|
-
"ta-IN",
|
|
107
|
-
"th-TH",
|
|
108
|
-
"zu-ZA",
|
|
109
|
-
"am-ET",
|
|
110
|
-
"ar-AE",
|
|
111
|
-
]
|
|
84
|
+
language_list = list(icons.language_map.keys())
|
|
85
|
+
if language is not None:
|
|
86
|
+
language = get_language_codes(languages=language)[0]
|
|
112
87
|
|
|
113
88
|
# Map languages to the closest language (first 2 letters matching)
|
|
114
89
|
def map_language(language, language_list):
|
|
@@ -152,7 +127,7 @@ def run_model_bpa(
|
|
|
152
127
|
def translate_using_po(rule_file):
|
|
153
128
|
current_dir = os.path.dirname(os.path.abspath(__file__))
|
|
154
129
|
translation_file = (
|
|
155
|
-
f"{current_dir}/_bpa_translation/_translations_{language}.po"
|
|
130
|
+
f"{current_dir}/_bpa_translation/_model/_translations_{language}.po"
|
|
156
131
|
)
|
|
157
132
|
for c in ["Category", "Description", "Rule Name"]:
|
|
158
133
|
po = polib.pofile(translation_file)
|
|
@@ -162,86 +137,86 @@ def run_model_bpa(
|
|
|
162
137
|
entry.msgstr
|
|
163
138
|
)
|
|
164
139
|
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
)
|
|
140
|
+
translated = False
|
|
141
|
+
|
|
142
|
+
# Translations
|
|
143
|
+
if language is not None and rules is None and language in language_list:
|
|
144
|
+
rules = model_bpa_rules(dependencies=dep)
|
|
145
|
+
translate_using_po(rules)
|
|
146
|
+
translated = True
|
|
147
|
+
if rules is None:
|
|
148
|
+
rules = model_bpa_rules(dependencies=dep)
|
|
149
|
+
if language is not None and not translated:
|
|
150
|
+
|
|
151
|
+
def translate_using_spark(rule_file):
|
|
152
|
+
|
|
153
|
+
from synapse.ml.services import Translate
|
|
154
|
+
|
|
155
|
+
rules_temp = rule_file.copy()
|
|
156
|
+
rules_temp = rules_temp.drop(["Expression", "URL", "Severity"], axis=1)
|
|
177
157
|
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
.setTextCol(clm)
|
|
186
|
-
.setToLanguage(language)
|
|
187
|
-
.setOutputCol("translation")
|
|
188
|
-
.setConcurrency(5)
|
|
158
|
+
schema = StructType(
|
|
159
|
+
[
|
|
160
|
+
StructField("Category", StringType(), True),
|
|
161
|
+
StructField("Scope", StringType(), True),
|
|
162
|
+
StructField("Rule Name", StringType(), True),
|
|
163
|
+
StructField("Description", StringType(), True),
|
|
164
|
+
]
|
|
189
165
|
)
|
|
190
166
|
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
.
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
translate.transform(dfRules)
|
|
203
|
-
.withColumn(
|
|
204
|
-
"translation", flatten(col("translation.translations"))
|
|
205
|
-
)
|
|
206
|
-
.withColumn("translation", col("translation.text"))
|
|
207
|
-
.select("Rule Name", clm, "translation")
|
|
167
|
+
spark = SparkSession.builder.getOrCreate()
|
|
168
|
+
dfRules = spark.createDataFrame(rules_temp, schema)
|
|
169
|
+
|
|
170
|
+
columns = ["Category", "Rule Name", "Description"]
|
|
171
|
+
for clm in columns:
|
|
172
|
+
translate = (
|
|
173
|
+
Translate()
|
|
174
|
+
.setTextCol(clm)
|
|
175
|
+
.setToLanguage(language)
|
|
176
|
+
.setOutputCol("translation")
|
|
177
|
+
.setConcurrency(5)
|
|
208
178
|
)
|
|
209
179
|
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
180
|
+
if clm == "Rule Name":
|
|
181
|
+
transDF = (
|
|
182
|
+
translate.transform(dfRules)
|
|
183
|
+
.withColumn(
|
|
184
|
+
"translation", flatten(col("translation.translations"))
|
|
185
|
+
)
|
|
186
|
+
.withColumn("translation", col("translation.text"))
|
|
187
|
+
.select(clm, "translation")
|
|
188
|
+
)
|
|
189
|
+
else:
|
|
190
|
+
transDF = (
|
|
191
|
+
translate.transform(dfRules)
|
|
192
|
+
.withColumn(
|
|
193
|
+
"translation", flatten(col("translation.translations"))
|
|
194
|
+
)
|
|
195
|
+
.withColumn("translation", col("translation.text"))
|
|
196
|
+
.select("Rule Name", clm, "translation")
|
|
197
|
+
)
|
|
217
198
|
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
199
|
+
df_panda = transDF.toPandas()
|
|
200
|
+
rule_file = pd.merge(
|
|
201
|
+
rule_file,
|
|
202
|
+
df_panda[["Rule Name", "translation"]],
|
|
203
|
+
on="Rule Name",
|
|
204
|
+
how="left",
|
|
205
|
+
)
|
|
224
206
|
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
207
|
+
rule_file = rule_file.rename(
|
|
208
|
+
columns={"translation": f"{clm}Translated"}
|
|
209
|
+
)
|
|
210
|
+
rule_file[f"{clm}Translated"] = rule_file[f"{clm}Translated"].apply(
|
|
211
|
+
lambda x: x[0] if x is not None else None
|
|
212
|
+
)
|
|
228
213
|
|
|
229
|
-
|
|
214
|
+
for clm in columns:
|
|
215
|
+
rule_file = rule_file.drop([clm], axis=1)
|
|
216
|
+
rule_file = rule_file.rename(columns={f"{clm}Translated": clm})
|
|
230
217
|
|
|
231
|
-
|
|
218
|
+
return rule_file
|
|
232
219
|
|
|
233
|
-
# Translations
|
|
234
|
-
if language is not None and rules is None and language in language_list:
|
|
235
|
-
rules = model_bpa_rules(
|
|
236
|
-
dataset=dataset, workspace=workspace, dependencies=dep
|
|
237
|
-
)
|
|
238
|
-
translate_using_po(rules)
|
|
239
|
-
translated = True
|
|
240
|
-
if rules is None:
|
|
241
|
-
rules = model_bpa_rules(
|
|
242
|
-
dataset=dataset, workspace=workspace, dependencies=dep
|
|
243
|
-
)
|
|
244
|
-
if language is not None and not translated:
|
|
245
220
|
rules = translate_using_spark(rules)
|
|
246
221
|
|
|
247
222
|
rules["Severity"].replace("Warning", icons.warning, inplace=True)
|
|
@@ -302,26 +277,28 @@ def run_model_bpa(
|
|
|
302
277
|
|
|
303
278
|
if scope == "Model":
|
|
304
279
|
x = []
|
|
305
|
-
if expr(func):
|
|
280
|
+
if expr(func, tom):
|
|
306
281
|
x = ["Model"]
|
|
307
282
|
elif scope == "Measure":
|
|
308
|
-
x = [nm(obj) for obj in tom.all_measures() if expr(obj)]
|
|
283
|
+
x = [nm(obj) for obj in tom.all_measures() if expr(obj, tom)]
|
|
309
284
|
elif scope == "Column":
|
|
310
|
-
x = [nm(obj) for obj in tom.all_columns() if expr(obj)]
|
|
285
|
+
x = [nm(obj) for obj in tom.all_columns() if expr(obj, tom)]
|
|
311
286
|
elif scope == "Partition":
|
|
312
|
-
x = [nm(obj) for obj in tom.all_partitions() if expr(obj)]
|
|
287
|
+
x = [nm(obj) for obj in tom.all_partitions() if expr(obj, tom)]
|
|
313
288
|
elif scope == "Hierarchy":
|
|
314
|
-
x = [nm(obj) for obj in tom.all_hierarchies() if expr(obj)]
|
|
289
|
+
x = [nm(obj) for obj in tom.all_hierarchies() if expr(obj, tom)]
|
|
315
290
|
elif scope == "Table":
|
|
316
|
-
x = [nm(obj) for obj in tom.model.Tables if expr(obj)]
|
|
291
|
+
x = [nm(obj) for obj in tom.model.Tables if expr(obj, tom)]
|
|
317
292
|
elif scope == "Relationship":
|
|
318
|
-
x = [nm(obj) for obj in tom.model.Relationships if expr(obj)]
|
|
293
|
+
x = [nm(obj) for obj in tom.model.Relationships if expr(obj, tom)]
|
|
319
294
|
elif scope == "Role":
|
|
320
|
-
x = [nm(obj) for obj in tom.model.Roles if expr(obj)]
|
|
295
|
+
x = [nm(obj) for obj in tom.model.Roles if expr(obj, tom)]
|
|
321
296
|
elif scope == "Row Level Security":
|
|
322
|
-
x = [nm(obj) for obj in tom.all_rls() if expr(obj)]
|
|
297
|
+
x = [nm(obj) for obj in tom.all_rls() if expr(obj, tom)]
|
|
323
298
|
elif scope == "Calculation Item":
|
|
324
|
-
x = [
|
|
299
|
+
x = [
|
|
300
|
+
nm(obj) for obj in tom.all_calculation_items() if expr(obj, tom)
|
|
301
|
+
]
|
|
325
302
|
|
|
326
303
|
if len(x) > 0:
|
|
327
304
|
new_data = {"Object Name": x, "Scope": scope, "Rule Name": ruleName}
|
|
@@ -528,7 +505,8 @@ def run_model_bpa(
|
|
|
528
505
|
content_html += f'<td>{row["Rule Name"]}</td>'
|
|
529
506
|
content_html += f'<td>{row["Object Type"]}</td>'
|
|
530
507
|
content_html += f'<td>{row["Object Name"]}</td>'
|
|
531
|
-
content_html += f'<td>{row["Severity"]}</td>'
|
|
508
|
+
content_html += f'<td style="text-align: center;">{row["Severity"]}</td>'
|
|
509
|
+
#content_html += f'<td>{row["Severity"]}</td>'
|
|
532
510
|
content_html += "</tr>"
|
|
533
511
|
content_html += "</table>"
|
|
534
512
|
|
sempy_labs/_model_bpa_bulk.py
CHANGED
|
@@ -229,7 +229,9 @@ def create_model_bpa_semantic_model(
|
|
|
229
229
|
expr = get_shared_expression(lakehouse=lakehouse, workspace=lakehouse_workspace)
|
|
230
230
|
|
|
231
231
|
# Create blank model
|
|
232
|
-
create_blank_semantic_model(
|
|
232
|
+
create_blank_semantic_model(
|
|
233
|
+
dataset=dataset, workspace=lakehouse_workspace, overwrite=True
|
|
234
|
+
)
|
|
233
235
|
|
|
234
236
|
@retry(
|
|
235
237
|
sleep_time=1,
|