semantic-link-labs 0.8.11__py3-none-any.whl → 0.9.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (40) hide show
  1. {semantic_link_labs-0.8.11.dist-info → semantic_link_labs-0.9.1.dist-info}/METADATA +9 -6
  2. {semantic_link_labs-0.8.11.dist-info → semantic_link_labs-0.9.1.dist-info}/RECORD +40 -40
  3. {semantic_link_labs-0.8.11.dist-info → semantic_link_labs-0.9.1.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +29 -2
  5. sempy_labs/_authentication.py +78 -4
  6. sempy_labs/_capacities.py +770 -200
  7. sempy_labs/_capacity_migration.py +7 -37
  8. sempy_labs/_clear_cache.py +8 -8
  9. sempy_labs/_deployment_pipelines.py +1 -1
  10. sempy_labs/_gateways.py +2 -0
  11. sempy_labs/_generate_semantic_model.py +8 -0
  12. sempy_labs/_helper_functions.py +119 -79
  13. sempy_labs/_job_scheduler.py +138 -3
  14. sempy_labs/_list_functions.py +40 -31
  15. sempy_labs/_model_bpa.py +207 -204
  16. sempy_labs/_model_bpa_bulk.py +2 -2
  17. sempy_labs/_model_bpa_rules.py +3 -3
  18. sempy_labs/_notebooks.py +2 -0
  19. sempy_labs/_query_scale_out.py +8 -0
  20. sempy_labs/_sql.py +11 -7
  21. sempy_labs/_vertipaq.py +4 -2
  22. sempy_labs/_warehouses.py +6 -6
  23. sempy_labs/admin/_basic_functions.py +156 -103
  24. sempy_labs/admin/_domains.py +7 -2
  25. sempy_labs/admin/_git.py +4 -1
  26. sempy_labs/admin/_items.py +7 -2
  27. sempy_labs/admin/_scanner.py +7 -4
  28. sempy_labs/directlake/_directlake_schema_compare.py +7 -2
  29. sempy_labs/directlake/_directlake_schema_sync.py +6 -0
  30. sempy_labs/directlake/_dl_helper.py +51 -31
  31. sempy_labs/directlake/_get_directlake_lakehouse.py +20 -27
  32. sempy_labs/directlake/_update_directlake_partition_entity.py +5 -0
  33. sempy_labs/lakehouse/_get_lakehouse_columns.py +17 -22
  34. sempy_labs/lakehouse/_get_lakehouse_tables.py +20 -32
  35. sempy_labs/lakehouse/_lakehouse.py +2 -19
  36. sempy_labs/report/_generate_report.py +45 -0
  37. sempy_labs/report/_report_bpa.py +2 -2
  38. sempy_labs/tom/_model.py +97 -16
  39. {semantic_link_labs-0.8.11.dist-info → semantic_link_labs-0.9.1.dist-info}/LICENSE +0 -0
  40. {semantic_link_labs-0.8.11.dist-info → semantic_link_labs-0.9.1.dist-info}/top_level.txt +0 -0
sempy_labs/tom/_model.py CHANGED
@@ -20,6 +20,8 @@ import sempy_labs._icons as icons
20
20
  from sempy.fabric.exceptions import FabricHTTPException
21
21
  import ast
22
22
  from uuid import UUID
23
+ import sempy_labs._authentication as auth
24
+
23
25
 
24
26
  if TYPE_CHECKING:
25
27
  import Microsoft.AnalysisServices.Tabular
@@ -44,19 +46,91 @@ class TOMWrapper:
44
46
 
45
47
  def __init__(self, dataset, workspace, readonly):
46
48
 
47
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
48
- (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
49
- self._dataset_id = dataset_id
50
- self._dataset_name = dataset_name
51
- self._workspace_name = workspace_name
52
- self._workspace_id = workspace_id
49
+ self._is_azure_as = False
50
+ prefix = "asazure"
51
+ prefix_full = f"{prefix}://"
52
+ read_write = ":rw"
53
+ self._token_provider = auth.token_provider.get()
54
+
55
+ # Azure AS workspace logic
56
+ if workspace is not None and workspace.startswith(prefix_full):
57
+ # Set read or read/write accordingly
58
+ if readonly is False and not workspace.endswith(read_write):
59
+ workspace += read_write
60
+ elif readonly is True and workspace.endswith(read_write):
61
+ workspace = workspace[: -len(read_write)]
62
+ self._workspace_name = workspace
63
+ self._workspace_id = workspace
64
+ self._dataset_id = dataset
65
+ self._dataset_name = dataset
66
+ self._is_azure_as = True
67
+ if self._token_provider is None:
68
+ raise ValueError(
69
+ f"{icons.red_dot} A token provider must be provided when connecting to an Azure AS workspace."
70
+ )
71
+ else:
72
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
73
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(
74
+ dataset, workspace_id
75
+ )
76
+ self._dataset_id = dataset_id
77
+ self._dataset_name = dataset_name
78
+ self._workspace_name = workspace_name
79
+ self._workspace_id = workspace_id
53
80
  self._readonly = readonly
54
81
  self._tables_added = []
55
82
 
56
- self._tom_server = fabric.create_tom_server(
57
- readonly=readonly, workspace=workspace_id
58
- )
59
- self.model = self._tom_server.Databases[dataset_id].Model
83
+ # No token provider (standard authentication)
84
+ if self._token_provider is None:
85
+ self._tom_server = fabric.create_tom_server(
86
+ readonly=readonly, workspace=workspace_id
87
+ )
88
+ # Service Principal Authentication for Azure AS via token provider
89
+ elif self._is_azure_as:
90
+ import Microsoft.AnalysisServices.Tabular as TOM
91
+
92
+ # Extract region from the workspace
93
+ match = re.search(rf"{prefix_full}(.*?).{prefix}", self._workspace_name)
94
+ if match:
95
+ region = match.group(1)
96
+ if self._token_provider is None:
97
+ raise ValueError(
98
+ f"{icons.red_dot} A token provider must be provided when connecting to Azure Analysis Services."
99
+ )
100
+ token = self._token_provider(audience="asazure", region=region)
101
+ connection_str = f'Provider=MSOLAP;Data Source={self._workspace_name};Password="{token}";Persist Security Info=True;Impersonation Level=Impersonate'
102
+ self._tom_server = TOM.Server()
103
+ self._tom_server.Connect(connection_str)
104
+ # Service Principal Authentication for Power BI via token provider
105
+ else:
106
+ from sempy.fabric._client._utils import _build_adomd_connection_string
107
+ import Microsoft.AnalysisServices.Tabular as TOM
108
+ from Microsoft.AnalysisServices import AccessToken
109
+ from sempy.fabric._token_provider import (
110
+ create_on_access_token_expired_callback,
111
+ ConstantTokenProvider,
112
+ )
113
+ from System import Func
114
+
115
+ token = self._token_provider(audience="pbi")
116
+ self._tom_server = TOM.Server()
117
+ get_access_token = create_on_access_token_expired_callback(
118
+ ConstantTokenProvider(token)
119
+ )
120
+ self._tom_server.AccessToken = get_access_token(None)
121
+ self._tom_server.OnAccessTokenExpired = Func[AccessToken, AccessToken](
122
+ get_access_token
123
+ )
124
+ workspace_url = f"powerbi://api.powerbi.com/v1.0/myorg/{workspace}"
125
+ connection_str = _build_adomd_connection_string(
126
+ workspace_url, readonly=readonly
127
+ )
128
+ self._tom_server.Connect(connection_str)
129
+
130
+ if self._is_azure_as:
131
+ self.model = self._tom_server.Databases.GetByName(self._dataset_name).Model
132
+ else:
133
+ self.model = self._tom_server.Databases[dataset_id].Model
60
134
 
61
135
  self._table_map = {}
62
136
  self._column_map = {}
@@ -4404,8 +4478,6 @@ class TOMWrapper:
4404
4478
  if isinstance(measure_name, str):
4405
4479
  measure_name = [measure_name]
4406
4480
 
4407
- client = fabric.FabricRestClient()
4408
-
4409
4481
  if len(measure_name) > max_batch_size:
4410
4482
  measure_lists = [
4411
4483
  measure_name[i : i + max_batch_size]
@@ -4446,6 +4518,7 @@ class TOMWrapper:
4446
4518
  "modelItems"
4447
4519
  ].append(new_item)
4448
4520
 
4521
+ client = fabric.FabricRestClient()
4449
4522
  response = client.post("/explore/v202304/nl2nl/completions", json=payload)
4450
4523
  if response.status_code != 200:
4451
4524
  raise FabricHTTPException(response)
@@ -4667,11 +4740,15 @@ class TOMWrapper:
4667
4740
  @log
4668
4741
  @contextmanager
4669
4742
  def connect_semantic_model(
4670
- dataset: str | UUID, readonly: bool = True, workspace: Optional[str] = None
4743
+ dataset: str | UUID,
4744
+ readonly: bool = True,
4745
+ workspace: Optional[str | UUID] = None,
4671
4746
  ) -> Iterator[TOMWrapper]:
4672
4747
  """
4673
4748
  Connects to the Tabular Object Model (TOM) within a semantic model.
4674
4749
 
4750
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
4751
+
4675
4752
  Parameters
4676
4753
  ----------
4677
4754
  dataset : str | uuid.UUID
@@ -4679,10 +4756,10 @@ def connect_semantic_model(
4679
4756
  readonly: bool, default=True
4680
4757
  Whether the connection is read-only or read/write. Setting this to False enables read/write which saves the changes made back to the server.
4681
4758
  workspace : str | uuid.UUID, default=None
4682
- The Fabric workspace name or ID.
4759
+ The Fabric workspace name or ID. Also supports Azure Analysis Services (Service Principal Authentication required).
4760
+ If connecting to Azure Analysis Services, enter the workspace parameter in the following format: 'asazure://<region>.asazure.windows.net/<server_name>'.
4683
4761
  Defaults to None which resolves to the workspace of the attached lakehouse
4684
4762
  or if no lakehouse attached, resolves to the workspace of the notebook.
4685
-
4686
4763
  Returns
4687
4764
  -------
4688
4765
  typing.Iterator[TOMWrapper]
@@ -4692,7 +4769,11 @@ def connect_semantic_model(
4692
4769
  # initialize .NET to make sure System and Microsoft.AnalysisServices.Tabular is defined
4693
4770
  sempy.fabric._client._utils._init_analysis_services()
4694
4771
 
4695
- tw = TOMWrapper(dataset=dataset, workspace=workspace, readonly=readonly)
4772
+ tw = TOMWrapper(
4773
+ dataset=dataset,
4774
+ workspace=workspace,
4775
+ readonly=readonly,
4776
+ )
4696
4777
  try:
4697
4778
  yield tw
4698
4779
  finally: