semantic-link-labs 0.4.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (52) hide show
  1. semantic_link_labs-0.4.1.dist-info/LICENSE +21 -0
  2. semantic_link_labs-0.4.1.dist-info/METADATA +22 -0
  3. semantic_link_labs-0.4.1.dist-info/RECORD +52 -0
  4. semantic_link_labs-0.4.1.dist-info/WHEEL +5 -0
  5. semantic_link_labs-0.4.1.dist-info/top_level.txt +1 -0
  6. sempy_labs/__init__.py +154 -0
  7. sempy_labs/_ai.py +496 -0
  8. sempy_labs/_clear_cache.py +39 -0
  9. sempy_labs/_connections.py +234 -0
  10. sempy_labs/_dax.py +70 -0
  11. sempy_labs/_generate_semantic_model.py +280 -0
  12. sempy_labs/_helper_functions.py +506 -0
  13. sempy_labs/_icons.py +4 -0
  14. sempy_labs/_list_functions.py +1372 -0
  15. sempy_labs/_model_auto_build.py +143 -0
  16. sempy_labs/_model_bpa.py +1354 -0
  17. sempy_labs/_model_dependencies.py +341 -0
  18. sempy_labs/_one_lake_integration.py +155 -0
  19. sempy_labs/_query_scale_out.py +447 -0
  20. sempy_labs/_refresh_semantic_model.py +184 -0
  21. sempy_labs/_tom.py +3766 -0
  22. sempy_labs/_translations.py +378 -0
  23. sempy_labs/_vertipaq.py +893 -0
  24. sempy_labs/directlake/__init__.py +45 -0
  25. sempy_labs/directlake/_directlake_schema_compare.py +110 -0
  26. sempy_labs/directlake/_directlake_schema_sync.py +128 -0
  27. sempy_labs/directlake/_fallback.py +62 -0
  28. sempy_labs/directlake/_get_directlake_lakehouse.py +69 -0
  29. sempy_labs/directlake/_get_shared_expression.py +59 -0
  30. sempy_labs/directlake/_guardrails.py +84 -0
  31. sempy_labs/directlake/_list_directlake_model_calc_tables.py +54 -0
  32. sempy_labs/directlake/_show_unsupported_directlake_objects.py +89 -0
  33. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +81 -0
  34. sempy_labs/directlake/_update_directlake_partition_entity.py +64 -0
  35. sempy_labs/directlake/_warm_cache.py +210 -0
  36. sempy_labs/lakehouse/__init__.py +24 -0
  37. sempy_labs/lakehouse/_get_lakehouse_columns.py +81 -0
  38. sempy_labs/lakehouse/_get_lakehouse_tables.py +250 -0
  39. sempy_labs/lakehouse/_lakehouse.py +85 -0
  40. sempy_labs/lakehouse/_shortcuts.py +296 -0
  41. sempy_labs/migration/__init__.py +29 -0
  42. sempy_labs/migration/_create_pqt_file.py +239 -0
  43. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +429 -0
  44. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +150 -0
  45. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +524 -0
  46. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +165 -0
  47. sempy_labs/migration/_migration_validation.py +227 -0
  48. sempy_labs/migration/_refresh_calc_tables.py +129 -0
  49. sempy_labs/report/__init__.py +35 -0
  50. sempy_labs/report/_generate_report.py +253 -0
  51. sempy_labs/report/_report_functions.py +855 -0
  52. sempy_labs/report/_report_rebind.py +131 -0
@@ -0,0 +1,143 @@
1
+ import sempy
2
+ import sempy.fabric as fabric
3
+ import pandas as pd
4
+ from sempy_labs._tom import connect_semantic_model
5
+ from sempy_labs._generate_semantic_model import create_blank_semantic_model
6
+ from sempy_labs.directlake._get_shared_expression import get_shared_expression
7
+ from typing import List, Optional, Union
8
+ from sempy._utils._log import log
9
+
10
+
11
+ @log
12
+ def model_auto_build(
13
+ dataset: str,
14
+ file_path: str,
15
+ workspace: Optional[str] = None,
16
+ lakehouse: Optional[str] = None,
17
+ lakehouse_workspace: Optional[str] = None,
18
+ ):
19
+ """
20
+ Dynamically generates a semantic model based on an Excel file template.
21
+
22
+ Parameters
23
+ ----------
24
+ dataset : str
25
+ Name of the semantic model.
26
+ file_path : str
27
+ workspace : str, default=None
28
+ The Fabric workspace name.
29
+ Defaults to None which resolves to the workspace of the attached lakehouse
30
+ or if no lakehouse attached, resolves to the workspace of the notebook.
31
+ lakehouse : str, default=None
32
+ The Fabric lakehouse used by the Direct Lake semantic model.
33
+ Defaults to None which resolves to the lakehouse attached to the notebook.
34
+ lakehouse_workspace : str, default=None
35
+ The Fabric workspace used by the lakehouse.
36
+ Defaults to None which resolves to the workspace of the attached lakehouse
37
+ or if no lakehouse attached, resolves to the workspace of the notebook.
38
+
39
+ Returns
40
+ -------
41
+
42
+ """
43
+
44
+ if workspace is None:
45
+ workspace_id = fabric.get_workspace_id()
46
+ workspace = fabric.resolve_workspace_name(workspace_id)
47
+
48
+ if lakehouse_workspace is None:
49
+ lakehouse_workspace = workspace
50
+
51
+ sheets = [
52
+ "Model",
53
+ "Tables",
54
+ "Measures",
55
+ "Columns",
56
+ "Roles",
57
+ "Hierarchies",
58
+ "Relationships",
59
+ ]
60
+
61
+ create_blank_semantic_model(dataset=dataset, workspace=workspace)
62
+
63
+ with connect_semantic_model(dataset=dataset, workspace=workspace) as tom:
64
+
65
+ # DL Only
66
+ expr = get_shared_expression(lakehouse=lakehouse, workspace=lakehouse_workspace)
67
+ tom.add_expression(name="DatbaseQuery", expression=expr)
68
+
69
+ for sheet in sheets:
70
+ df = pd.read_excel(file_path, sheet_name=sheet)
71
+
72
+ if sheet == "Tables":
73
+ for i, r in df.iterrows():
74
+ tName = r["Table Name"]
75
+ desc = r["Description"]
76
+ dc = r["Data Category"]
77
+ mode = r["Mode"]
78
+ hidden = bool(r["Hidden"])
79
+
80
+ tom.add_table(
81
+ name=tName, description=desc, data_category=dc, hidden=hidden
82
+ )
83
+ if mode == "DirectLake":
84
+ tom.add_entity_partition(table_name=tName, entity_name=tName)
85
+ elif sheet == "Columns":
86
+ for i, r in df.iterrows():
87
+ tName = r["Table Name"]
88
+ cName = r["Column Name"]
89
+ scName = r["Source Column"]
90
+ dataType = r["Data Type"]
91
+ hidden = bool(r["Hidden"])
92
+ key = bool(r["Key"])
93
+ if dataType == "Integer":
94
+ dataType = "Int64"
95
+ desc = r["Description"]
96
+
97
+ tom.add_data_column(
98
+ table_name=tName,
99
+ column_name=cName,
100
+ source_column=scName,
101
+ data_type=dataType,
102
+ description=desc,
103
+ hidden=hidden,
104
+ key=key,
105
+ )
106
+ elif sheet == "Measures":
107
+ for i, r in df.iterrows():
108
+ tName = r["Table Name"]
109
+ mName = r["Measure Name"]
110
+ expr = r["Expression"]
111
+ desc = r["Description"]
112
+ format = r["Format String"]
113
+ hidden = bool(r["Hidden"])
114
+
115
+ tom.add_measure(
116
+ table_name=tName,
117
+ measure_name=mName,
118
+ expression=expr,
119
+ format_string=format,
120
+ description=desc,
121
+ hidden=hidden,
122
+ )
123
+ elif sheet == "Relationships":
124
+ for i, r in df.iterrows():
125
+ fromTable = r["From Table"]
126
+ fromColumn = r["From Column"]
127
+ toTable = r["To Table"]
128
+ toColumn = r["To Column"]
129
+ fromCard = r["From Cardinality"]
130
+ toCard = r["To Cardinality"]
131
+
132
+ tom.add_relationship(
133
+ from_table=fromTable,
134
+ from_column=fromColumn,
135
+ to_table=toTable,
136
+ to_column=toColumn,
137
+ from_cardinality=fromCard,
138
+ to_cardinality=toCard,
139
+ )
140
+ elif sheet == "Roles":
141
+ print("hi")
142
+ elif sheet == "Hierarchies":
143
+ print("hi")