semantic-link-labs 0.4.2__py3-none-any.whl → 0.6.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (54) hide show
  1. {semantic_link_labs-0.4.2.dist-info → semantic_link_labs-0.6.0.dist-info}/METADATA +2 -2
  2. semantic_link_labs-0.6.0.dist-info/RECORD +54 -0
  3. {semantic_link_labs-0.4.2.dist-info → semantic_link_labs-0.6.0.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +44 -14
  5. sempy_labs/_ai.py +31 -32
  6. sempy_labs/_clear_cache.py +5 -8
  7. sempy_labs/_connections.py +80 -72
  8. sempy_labs/_dax.py +7 -9
  9. sempy_labs/_generate_semantic_model.py +60 -54
  10. sempy_labs/_helper_functions.py +8 -10
  11. sempy_labs/_icons.py +15 -0
  12. sempy_labs/_list_functions.py +1139 -428
  13. sempy_labs/_model_auto_build.py +5 -6
  14. sempy_labs/_model_bpa.py +134 -1125
  15. sempy_labs/_model_bpa_rules.py +831 -0
  16. sempy_labs/_model_dependencies.py +21 -25
  17. sempy_labs/_one_lake_integration.py +10 -7
  18. sempy_labs/_query_scale_out.py +83 -93
  19. sempy_labs/_refresh_semantic_model.py +12 -16
  20. sempy_labs/_translations.py +214 -288
  21. sempy_labs/_vertipaq.py +51 -42
  22. sempy_labs/directlake/__init__.py +2 -0
  23. sempy_labs/directlake/_directlake_schema_compare.py +12 -11
  24. sempy_labs/directlake/_directlake_schema_sync.py +13 -23
  25. sempy_labs/directlake/_fallback.py +5 -7
  26. sempy_labs/directlake/_get_directlake_lakehouse.py +1 -1
  27. sempy_labs/directlake/_get_shared_expression.py +4 -8
  28. sempy_labs/directlake/_guardrails.py +6 -8
  29. sempy_labs/directlake/_list_directlake_model_calc_tables.py +18 -12
  30. sempy_labs/directlake/_show_unsupported_directlake_objects.py +4 -4
  31. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +9 -8
  32. sempy_labs/directlake/_update_directlake_partition_entity.py +129 -12
  33. sempy_labs/directlake/_warm_cache.py +5 -5
  34. sempy_labs/lakehouse/_get_lakehouse_columns.py +2 -2
  35. sempy_labs/lakehouse/_get_lakehouse_tables.py +4 -4
  36. sempy_labs/lakehouse/_lakehouse.py +3 -4
  37. sempy_labs/lakehouse/_shortcuts.py +17 -13
  38. sempy_labs/migration/__init__.py +1 -1
  39. sempy_labs/migration/_create_pqt_file.py +21 -24
  40. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +16 -13
  41. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +17 -18
  42. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +45 -46
  43. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +14 -14
  44. sempy_labs/migration/_migration_validation.py +6 -2
  45. sempy_labs/migration/_refresh_calc_tables.py +10 -5
  46. sempy_labs/report/__init__.py +2 -2
  47. sempy_labs/report/_generate_report.py +8 -7
  48. sempy_labs/report/_report_functions.py +47 -52
  49. sempy_labs/report/_report_rebind.py +38 -37
  50. sempy_labs/tom/__init__.py +1 -4
  51. sempy_labs/tom/_model.py +541 -180
  52. semantic_link_labs-0.4.2.dist-info/RECORD +0 -53
  53. {semantic_link_labs-0.4.2.dist-info → semantic_link_labs-0.6.0.dist-info}/LICENSE +0 -0
  54. {semantic_link_labs-0.4.2.dist-info → semantic_link_labs-0.6.0.dist-info}/top_level.txt +0 -0
@@ -1,6 +1,8 @@
1
1
  import sempy
2
2
  import sempy.fabric as fabric
3
- import re, datetime, time
3
+ import re
4
+ import datetime
5
+ import time
4
6
  from sempy_labs._list_functions import list_tables
5
7
  from sempy_labs._helper_functions import create_relationship_name
6
8
  from sempy_labs.tom import connect_semantic_model
@@ -8,6 +10,7 @@ from typing import Optional
8
10
  from sempy._utils._log import log
9
11
  import sempy_labs._icons as icons
10
12
 
13
+
11
14
  @log
12
15
  def migrate_model_objects_to_semantic_model(
13
16
  dataset: str,
@@ -38,11 +41,7 @@ def migrate_model_objects_to_semantic_model(
38
41
  import Microsoft.AnalysisServices.Tabular as TOM
39
42
  import System
40
43
 
41
- if workspace is None:
42
- workspace_id = fabric.get_workspace_id()
43
- workspace = fabric.resolve_workspace_name(workspace_id)
44
- else:
45
- workspace_id = fabric.resolve_workspace_id(workspace)
44
+ workspace = fabric.resolve_workspace_name(workspace)
46
45
 
47
46
  if new_dataset_workspace is None:
48
47
  new_dataset_workspace = workspace
@@ -63,7 +62,6 @@ def migrate_model_objects_to_semantic_model(
63
62
 
64
63
  dfP_cc = dfPar[(dfPar["Source Type"] == "Calculated")]
65
64
  dfP_fp = dfP_cc[dfP_cc["Query"].str.contains("NAMEOF")]
66
- dfC_fp = dfC[dfC["Table Name"].isin(dfP_fp["Table Name"].values)]
67
65
 
68
66
  print(f"{icons.in_progress} Updating '{new_dataset}' based on '{dataset}'...")
69
67
  start_time = datetime.datetime.now()
@@ -128,12 +126,15 @@ def migrate_model_objects_to_semantic_model(
128
126
  c.IsKey = bool(dfC_filt["Key"].iloc[0])
129
127
  sbc = dfC_filt["Sort By Column"].iloc[0]
130
128
 
131
- if sbc != None:
132
- try:
129
+ if sbc is not None:
130
+ if any(
131
+ o.Name == sbc and o.Parent.Name == c.Parent.Name
132
+ for o in tom.all_columns()
133
+ ):
133
134
  c.SortByColumn = tom.model.Tables[
134
135
  t.Name
135
136
  ].Columns[sbc]
136
- except:
137
+ else:
137
138
  print(
138
139
  f"{icons.red_dot} Failed to create '{sbc}' as a Sort By Column for the '{c.Name}' in the '{t.Name}' table."
139
140
  )
@@ -163,9 +164,11 @@ def migrate_model_objects_to_semantic_model(
163
164
  cols = r["Column Name"]
164
165
  lvls = r["Level Name"]
165
166
 
166
- try:
167
- tom.model.Tables[tName].Hierarchies[hName]
168
- except:
167
+ if not any(
168
+ t.Name == tName and h.Name == hName
169
+ for t in tom.model.Tables
170
+ for h in t.Hierarchies
171
+ ):
169
172
  tom.add_hierarchy(
170
173
  table_name=tName,
171
174
  hierarchy_name=hName,
@@ -188,9 +191,7 @@ def migrate_model_objects_to_semantic_model(
188
191
  mDesc = r["Measure Description"]
189
192
  mFS = r["Format String"]
190
193
 
191
- try:
192
- tom.model.Tables[tName].Measures[mName]
193
- except:
194
+ if not any(m.Name == mName for m in tom.all_measures()):
194
195
  tom.add_measure(
195
196
  table_name=tName,
196
197
  measure_name=mName,
@@ -203,7 +204,7 @@ def migrate_model_objects_to_semantic_model(
203
204
  print(
204
205
  f"{icons.green_dot} The '{mName}' measure has been added."
205
206
  )
206
-
207
+ print(f"\n{icons.in_progress} Creating calculation groups...")
207
208
  for cgName in dfCI["Calculation Group Name"].unique():
208
209
 
209
210
  isHidden = bool(
@@ -220,9 +221,7 @@ def migrate_model_objects_to_semantic_model(
220
221
  (dfCI["Calculation Group Name"] == cgName), "Description"
221
222
  ].iloc[0]
222
223
 
223
- try:
224
- tom.model.Tables[cgName]
225
- except:
224
+ if not any(t.Name == cgName for t in tom.model.Tables):
226
225
  tom.add_calculation_group(
227
226
  name=cgName,
228
227
  description=desc,
@@ -234,11 +233,11 @@ def migrate_model_objects_to_semantic_model(
234
233
  )
235
234
  tom.model.DiscourageImplicitMeasures = True
236
235
 
237
- print(
238
- f"\n{icons.in_progress} Updating calculation group column name..."
239
- )
236
+ # print(
237
+ # f"\n{icons.in_progress} Updating calculation group column names..."
238
+ # )
240
239
  dfC_filt = dfC[
241
- (dfC["Table Name"] == cgName) & (dfC["Hidden"] is False)
240
+ (dfC["Table Name"] == cgName) & (dfC["Hidden"] == False)
242
241
  ]
243
242
  colName = dfC_filt["Column Name"].iloc[0]
244
243
  tom.model.Tables[cgName].Columns["Name"].Name = colName
@@ -267,11 +266,12 @@ def migrate_model_objects_to_semantic_model(
267
266
  & (dfCI["Calculation Item Name"] == calcItem),
268
267
  "Format String Expression",
269
268
  ].iloc[0]
270
- try:
271
- tom.model.Tables[cgName].CalculationGroup.CalculationItems[
272
- calcItem
273
- ]
274
- except:
269
+
270
+ if not any(
271
+ ci.CalculationGroup.Parent.Name == cgName
272
+ and ci.Name == calcItem
273
+ for ci in tom.all_calculation_items()
274
+ ):
275
275
  tom.add_calculation_item(
276
276
  table_name=cgName,
277
277
  calculation_item_name=calcItem,
@@ -340,7 +340,13 @@ def migrate_model_objects_to_semantic_model(
340
340
  f"{icons.yellow_dot} {relName} was not created since columns used in a relationship must have the same data type."
341
341
  )
342
342
  else:
343
- try:
343
+ if not any(
344
+ r.FromTable.Name == fromTable
345
+ and r.FromColumn.Name == fromColumn
346
+ and r.ToTable.Name == toTable
347
+ and r.ToColumn.Name == toColumn
348
+ for r in tom.model.Relationships
349
+ ):
344
350
  tom.add_relationship(
345
351
  from_table=fromTable,
346
352
  from_column=fromColumn,
@@ -353,11 +359,10 @@ def migrate_model_objects_to_semantic_model(
353
359
  rely_on_referential_integrity=rori,
354
360
  is_active=isActive,
355
361
  )
356
-
357
362
  print(
358
363
  f"{icons.green_dot} The {relName} relationship has been added."
359
364
  )
360
- except:
365
+ else:
361
366
  print(
362
367
  f"{icons.red_dot} The {relName} relationship was not added."
363
368
  )
@@ -368,9 +373,7 @@ def migrate_model_objects_to_semantic_model(
368
373
  roleDesc = row["Description"]
369
374
  modPerm = row["Model Permission"]
370
375
 
371
- try:
372
- tom.model.Roles[roleName]
373
- except:
376
+ if not any(r.Name == roleName for r in tom.model.Roles):
374
377
  tom.add_role(
375
378
  role_name=roleName,
376
379
  model_permission=modPerm,
@@ -386,14 +389,14 @@ def migrate_model_objects_to_semantic_model(
386
389
  tName = row["Table"]
387
390
  expr = row["Filter Expression"]
388
391
 
389
- try:
392
+ if any(t.Name == tName for t in tom.model.Tables):
390
393
  tom.set_rls(
391
394
  role_name=roleName, table_name=tName, filter_expression=expr
392
395
  )
393
396
  print(
394
397
  f"{icons.green_dot} Row level security for the '{tName}' table within the '{roleName}' role has been set."
395
398
  )
396
- except:
399
+ else:
397
400
  print(
398
401
  f"{icons.red_dot} Row level security for the '{tName}' table within the '{roleName}' role was not set."
399
402
  )
@@ -401,9 +404,7 @@ def migrate_model_objects_to_semantic_model(
401
404
  print(f"\n{icons.in_progress} Creating perspectives...")
402
405
  for pName in dfP["Perspective Name"].unique():
403
406
 
404
- try:
405
- tom.model.Perspectives[pName]
406
- except:
407
+ if not any(p.Name == pName for p in tom.model.Perspectives):
407
408
  tom.add_perspective(perspective_name=pName)
408
409
  print(
409
410
  f"{icons.green_dot} The '{pName}' perspective has been added."
@@ -437,14 +438,12 @@ def migrate_model_objects_to_semantic_model(
437
438
  object=tom.model.Tables[tName].Hierarchies[oName],
438
439
  perspective_name=pName,
439
440
  )
440
- except:
441
+ except Exception:
441
442
  pass
442
443
 
443
444
  print(f"\n{icons.in_progress} Creating translation languages...")
444
445
  for trName in dfTranslation["Culture Name"].unique():
445
- try:
446
- tom.model.Cultures[trName]
447
- except:
446
+ if not any(c.Name == trName for c in tom.model.Cultures):
448
447
  tom.add_translation(trName)
449
448
  print(
450
449
  f"{icons.green_dot} The '{trName}' translation language has been added."
@@ -510,14 +509,14 @@ def migrate_model_objects_to_semantic_model(
510
509
  property=prop,
511
510
  value=translation,
512
511
  )
513
- except:
512
+ except Exception:
514
513
  pass
515
514
 
516
515
  print(
517
516
  f"\n{icons.green_dot} Migration of objects from '{dataset}' -> '{new_dataset}' is complete."
518
517
  )
519
518
 
520
- except Exception as e:
519
+ except Exception:
521
520
  if datetime.datetime.now() - start_time > timeout:
522
521
  break
523
522
  time.sleep(1)
@@ -1,13 +1,13 @@
1
- import sempy
2
1
  import sempy.fabric as fabric
3
2
  import pandas as pd
4
- import datetime, time
3
+ import datetime
4
+ import time
5
5
  from sempy_labs._list_functions import list_tables
6
6
  from sempy_labs.directlake._get_shared_expression import get_shared_expression
7
7
  from sempy_labs._helper_functions import resolve_lakehouse_name
8
8
  from sempy_labs.lakehouse._lakehouse import lakehouse_attached
9
9
  from sempy_labs.tom import connect_semantic_model
10
- from typing import List, Optional, Union
10
+ from typing import Optional
11
11
  from sempy._utils._log import log
12
12
  import sempy_labs._icons as icons
13
13
 
@@ -95,9 +95,9 @@ def migrate_tables_columns_to_semantic_model(
95
95
  dataset=new_dataset, readonly=False, workspace=new_dataset_workspace
96
96
  ) as tom:
97
97
  success = True
98
- try:
99
- tom.model.Expressions["DatabaseQuery"]
100
- except:
98
+ if not any(
99
+ e.Name == "DatabaseQuery" for e in tom.model.Expressions
100
+ ):
101
101
  tom.add_expression("DatabaseQuery", expression=shEx)
102
102
  print(
103
103
  f"{icons.green_dot} The 'DatabaseQuery' expression has been added."
@@ -109,9 +109,7 @@ def migrate_tables_columns_to_semantic_model(
109
109
  tHid = bool(r["Hidden"])
110
110
  tDesc = r["Description"]
111
111
 
112
- try:
113
- tom.model.Tables[tName]
114
- except:
112
+ if not any(t.Name == tName for t in tom.model.Tables):
115
113
  tom.add_table(
116
114
  name=tName,
117
115
  description=tDesc,
@@ -132,9 +130,10 @@ def migrate_tables_columns_to_semantic_model(
132
130
  cHid = bool(r["Hidden"])
133
131
  cDataType = r["Data Type"]
134
132
 
135
- try:
136
- tom.model.Tables[tName].Columns[cName]
137
- except:
133
+ if not any(
134
+ c.Name == cName and c.Parent.Name == tName
135
+ for c in tom.all_columns()
136
+ ):
138
137
  tom.add_data_column(
139
138
  table_name=tName,
140
139
  column_name=cName,
@@ -149,13 +148,14 @@ def migrate_tables_columns_to_semantic_model(
149
148
  print(
150
149
  f"\n{icons.green_dot} All regular tables and columns have been added to the '{new_dataset}' semantic model."
151
150
  )
152
- except Exception as e:
151
+ except Exception:
153
152
  if datetime.datetime.now() - start_time > timeout:
154
153
  break
155
154
  time.sleep(1)
156
155
  else:
157
156
  print(
158
- f"{icons.red_dot} Lakehouse not attached to notebook and lakehouse/lakehouse_workspace are not specified. Please add your lakehouse to this notebook or specify the lakehouse/lakehouse_workspace parameters."
157
+ f"{icons.red_dot} Lakehouse not attached to notebook and lakehouse/lakehouse_workspace are not specified. Please add your lakehouse to this notebook"
158
+ f" or specify the lakehouse/lakehouse_workspace parameters."
159
159
  )
160
160
  print(
161
161
  "To attach a lakehouse to a notebook, go to the the 'Explorer' window to the left, click 'Lakehouses' to add your lakehouse to this notebook"
@@ -1,10 +1,10 @@
1
- import sempy
2
1
  import sempy.fabric as fabric
3
2
  import pandas as pd
4
- from typing import List, Optional, Union
3
+ from typing import Optional
5
4
  from sempy_labs._list_functions import list_semantic_model_objects
6
5
  from sempy._utils._log import log
7
6
 
7
+
8
8
  @log
9
9
  def migration_validation(
10
10
  dataset: str,
@@ -36,6 +36,10 @@ def migration_validation(
36
36
  A pandas dataframe showing a list of objects and whether they were successfully migrated. Also shows the % of objects which were migrated successfully.
37
37
  """
38
38
 
39
+ workspace = fabric.resolve_workspace_name(workspace)
40
+ if new_dataset_workspace is None:
41
+ new_dataset_workspace = workspace
42
+
39
43
  dfA = list_semantic_model_objects(dataset=dataset, workspace=workspace)
40
44
  dfB = list_semantic_model_objects(
41
45
  dataset=new_dataset, workspace=new_dataset_workspace
@@ -1,10 +1,11 @@
1
- import sempy
2
1
  import sempy.fabric as fabric
3
2
  import pandas as pd
4
- import re, datetime, time
3
+ import re
4
+ import datetime
5
+ import time
5
6
  from pyspark.sql import SparkSession
6
7
  from sempy_labs.tom import connect_semantic_model
7
- from typing import List, Optional, Union
8
+ from typing import Optional
8
9
  from sempy._utils._log import log
9
10
  import sempy_labs._icons as icons
10
11
 
@@ -30,6 +31,8 @@ def refresh_calc_tables(dataset: str, workspace: Optional[str] = None):
30
31
  timeout = datetime.timedelta(minutes=1)
31
32
  success = False
32
33
 
34
+ workspace = fabric.resolve_workspace_name(workspace)
35
+
33
36
  while not success:
34
37
  try:
35
38
  with connect_semantic_model(
@@ -119,9 +122,11 @@ def refresh_calc_tables(dataset: str, workspace: Optional[str] = None):
119
122
  f"{icons.green_dot} Calculated table '{tName}' has been refreshed as the '{delta_table_name.lower()}' table in the lakehouse."
120
123
  )
121
124
  except Exception as e:
122
- raise ValueError(f"{icons.red_dot} Failed to create calculated table '{tName}' as a delta table in the lakehouse.") from e
125
+ raise ValueError(
126
+ f"{icons.red_dot} Failed to create calculated table '{tName}' as a delta table in the lakehouse."
127
+ ) from e
123
128
 
124
- except Exception as e:
129
+ except Exception:
125
130
  if datetime.datetime.now() - start_time > timeout:
126
131
  break
127
132
  time.sleep(1)
@@ -1,6 +1,6 @@
1
1
  from sempy_labs.report._generate_report import (
2
2
  create_report_from_reportjson,
3
- #update_report_from_reportjson,
3
+ # update_report_from_reportjson,
4
4
  )
5
5
  from sempy_labs.report._report_functions import (
6
6
  get_report_json,
@@ -20,7 +20,7 @@ from sempy_labs.report._report_rebind import (
20
20
 
21
21
  __all__ = [
22
22
  "create_report_from_reportjson",
23
- #"update_report_from_reportjson",
23
+ # "update_report_from_reportjson",
24
24
  "get_report_json",
25
25
  # report_dependency_tree,
26
26
  "export_report",
@@ -1,7 +1,8 @@
1
- import sempy
2
1
  import sempy.fabric as fabric
3
2
  import pandas as pd
4
- import json, base64, time
3
+ import json
4
+ import base64
5
+ import time
5
6
  from typing import Optional
6
7
  from sempy_labs._helper_functions import resolve_workspace_name_and_id
7
8
  import sempy_labs._icons as icons
@@ -41,10 +42,9 @@ def create_report_from_reportjson(
41
42
  dfI_model = dfI_m[(dfI_m["Display Name"] == dataset)]
42
43
 
43
44
  if len(dfI_model) == 0:
44
- print(
45
+ raise ValueError(
45
46
  f"{icons.red_dot} The '{dataset}' semantic model does not exist in the '{workspace}' workspace."
46
47
  )
47
- return
48
48
 
49
49
  datasetId = dfI_model["Id"].iloc[0]
50
50
 
@@ -172,8 +172,9 @@ def update_report_from_reportjson(
172
172
  dfR_filt = dfR[(dfR["Name"] == report) & (dfR["Report Type"] == "PowerBIReport")]
173
173
 
174
174
  if len(dfR_filt) == 0:
175
- print(f"{icons.red_dot} The '{report}' report does not exist in the '{workspace}' workspace.")
176
- return
175
+ raise ValueError(
176
+ f"{icons.red_dot} The '{report}' report does not exist in the '{workspace}' workspace."
177
+ )
177
178
 
178
179
  reportId = dfR_filt["Id"].iloc[0]
179
180
  client = fabric.FabricRestClient()
@@ -214,7 +215,7 @@ def update_report_from_reportjson(
214
215
 
215
216
  request_body = {
216
217
  "displayName": report,
217
- "type": 'Report',
218
+ "type": "Report",
218
219
  "definition": {
219
220
  "parts": [
220
221
  {