semantic-link-labs 0.9.3__py3-none-any.whl → 0.9.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (68) hide show
  1. {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.5.dist-info}/METADATA +25 -6
  2. {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.5.dist-info}/RECORD +68 -52
  3. {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.5.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +45 -4
  5. sempy_labs/_capacities.py +22 -127
  6. sempy_labs/_capacity_migration.py +11 -9
  7. sempy_labs/_dashboards.py +60 -0
  8. sempy_labs/_data_pipelines.py +5 -31
  9. sempy_labs/_dax.py +17 -3
  10. sempy_labs/_delta_analyzer.py +279 -127
  11. sempy_labs/_environments.py +20 -48
  12. sempy_labs/_eventhouses.py +69 -30
  13. sempy_labs/_eventstreams.py +16 -34
  14. sempy_labs/_gateways.py +4 -4
  15. sempy_labs/_generate_semantic_model.py +30 -10
  16. sempy_labs/_git.py +90 -1
  17. sempy_labs/_graphQL.py +3 -20
  18. sempy_labs/_helper_functions.py +201 -44
  19. sempy_labs/_job_scheduler.py +226 -2
  20. sempy_labs/_kql_databases.py +19 -34
  21. sempy_labs/_kql_querysets.py +15 -32
  22. sempy_labs/_list_functions.py +14 -133
  23. sempy_labs/_mirrored_databases.py +14 -48
  24. sempy_labs/_ml_experiments.py +5 -30
  25. sempy_labs/_ml_models.py +4 -28
  26. sempy_labs/_model_bpa.py +17 -0
  27. sempy_labs/_model_bpa_rules.py +12 -2
  28. sempy_labs/_mounted_data_factories.py +119 -0
  29. sempy_labs/_notebooks.py +16 -26
  30. sempy_labs/_semantic_models.py +117 -0
  31. sempy_labs/_sql.py +78 -10
  32. sempy_labs/_sqldatabase.py +227 -0
  33. sempy_labs/_utils.py +42 -0
  34. sempy_labs/_vertipaq.py +17 -2
  35. sempy_labs/_warehouses.py +5 -17
  36. sempy_labs/_workloads.py +23 -9
  37. sempy_labs/_workspaces.py +13 -5
  38. sempy_labs/admin/__init__.py +70 -9
  39. sempy_labs/admin/_activities.py +166 -0
  40. sempy_labs/admin/_apps.py +143 -0
  41. sempy_labs/admin/_artifacts.py +62 -0
  42. sempy_labs/admin/_basic_functions.py +32 -704
  43. sempy_labs/admin/_capacities.py +311 -0
  44. sempy_labs/admin/_datasets.py +184 -0
  45. sempy_labs/admin/_domains.py +1 -1
  46. sempy_labs/admin/_items.py +3 -1
  47. sempy_labs/admin/_reports.py +239 -0
  48. sempy_labs/admin/_scanner.py +0 -1
  49. sempy_labs/admin/_shared.py +76 -0
  50. sempy_labs/admin/_tenant.py +489 -0
  51. sempy_labs/admin/_users.py +133 -0
  52. sempy_labs/admin/_workspaces.py +148 -0
  53. sempy_labs/directlake/_dl_helper.py +0 -1
  54. sempy_labs/directlake/_update_directlake_partition_entity.py +14 -0
  55. sempy_labs/graph/_teams.py +1 -1
  56. sempy_labs/graph/_users.py +9 -1
  57. sempy_labs/lakehouse/__init__.py +2 -0
  58. sempy_labs/lakehouse/_lakehouse.py +6 -7
  59. sempy_labs/lakehouse/_shortcuts.py +216 -64
  60. sempy_labs/report/__init__.py +3 -1
  61. sempy_labs/report/_download_report.py +4 -1
  62. sempy_labs/report/_export_report.py +272 -0
  63. sempy_labs/report/_generate_report.py +9 -17
  64. sempy_labs/report/_report_bpa.py +12 -19
  65. sempy_labs/report/_report_functions.py +9 -261
  66. sempy_labs/tom/_model.py +307 -40
  67. {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.5.dist-info}/LICENSE +0 -0
  68. {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.5.dist-info}/top_level.txt +0 -0
@@ -1,18 +1,25 @@
1
- import sempy.fabric as fabric
2
1
  import pandas as pd
3
2
  from typing import Optional
4
3
  from sempy_labs._helper_functions import (
5
4
  resolve_workspace_name_and_id,
6
5
  _base_api,
7
- _print_success,
8
6
  resolve_item_id,
9
7
  _create_dataframe,
8
+ _conv_b64,
9
+ _decode_b64,
10
+ delete_item,
11
+ create_item,
12
+ get_item_definition,
10
13
  )
11
14
  from uuid import UUID
15
+ import sempy_labs._icons as icons
12
16
 
13
17
 
14
18
  def create_eventhouse(
15
- name: str, description: Optional[str] = None, workspace: Optional[str | UUID] = None
19
+ name: str,
20
+ definition: Optional[dict],
21
+ description: Optional[str] = None,
22
+ workspace: Optional[str | UUID] = None,
16
23
  ):
17
24
  """
18
25
  Creates a Fabric eventhouse.
@@ -23,6 +30,8 @@ def create_eventhouse(
23
30
  ----------
24
31
  name: str
25
32
  Name of the eventhouse.
33
+ definition : dict
34
+ The definition (EventhouseProperties.json) of the eventhouse.
26
35
  description : str, default=None
27
36
  A description of the environment.
28
37
  workspace : str | uuid.UUID, default=None
@@ -31,25 +40,29 @@ def create_eventhouse(
31
40
  or if no lakehouse attached, resolves to the workspace of the notebook.
32
41
  """
33
42
 
34
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
35
-
36
- payload = {"displayName": name}
37
-
38
- if description:
39
- payload["description"] = description
40
-
41
- _base_api(
42
- request=f"/v1/workspaces/{workspace_id}/eventhouses",
43
- method="post",
44
- status_codes=[201, 202],
45
- payload=payload,
46
- lro_return_status_code=True,
43
+ if definition is not None and not isinstance(definition, dict):
44
+ raise ValueError(f"{icons.red_dot} The definition must be a dictionary.")
45
+
46
+ definition_payload = (
47
+ {
48
+ "parts": [
49
+ {
50
+ "path": "EventhouseProperties.json",
51
+ "payload": _conv_b64(definition),
52
+ "payloadType": "InlineBase64",
53
+ }
54
+ ]
55
+ }
56
+ if definition is not None
57
+ else None
47
58
  )
48
- _print_success(
49
- item_name=name,
50
- item_type="eventhouse",
51
- workspace_name=workspace_name,
52
- action="created",
59
+
60
+ create_item(
61
+ name=name,
62
+ type="Eventhouse",
63
+ workspace=workspace,
64
+ description=description,
65
+ definition=definition_payload,
53
66
  )
54
67
 
55
68
 
@@ -113,13 +126,39 @@ def delete_eventhouse(name: str, workspace: Optional[str | UUID] = None):
113
126
  or if no lakehouse attached, resolves to the workspace of the notebook.
114
127
  """
115
128
 
116
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
117
- item_id = resolve_item_id(item=name, type="Eventhouse", workspace=workspace)
118
-
119
- fabric.delete_item(item_id=item_id, workspace=workspace)
120
- _print_success(
121
- item_name=name,
122
- item_type="eventhouse",
123
- workspace_name=workspace_name,
124
- action="deleted",
129
+ delete_item(item=name, type="Eventhouse", workspace=workspace)
130
+
131
+
132
+ def get_eventhouse_definition(
133
+ eventhouse: str | UUID,
134
+ workspace: Optional[str | UUID] = None,
135
+ return_dataframe: bool = False,
136
+ ) -> dict | pd.DataFrame:
137
+ """
138
+ Gets the eventhouse definition.
139
+
140
+ This is a wrapper function for the following API: `Items - Get Eventhouse Definition <https://learn.microsoft.com/rest/api/fabric/eventhouse/items/get-eventhouse-definition>`_.
141
+
142
+ Parameters
143
+ ----------
144
+ eventhouse : str
145
+ Name of the eventhouse.
146
+ workspace : str | uuid.UUID, default=None
147
+ The Fabric workspace name or ID in which the eventhouse resides.
148
+ Defaults to None which resolves to the workspace of the attached lakehouse
149
+ or if no lakehouse attached, resolves to the workspace of the notebook.
150
+ return_dataframe : bool, default=False
151
+ If True, returns a dataframe. If False, returns a json dictionary.
152
+
153
+ Returns
154
+ -------
155
+ dict | pandas.DataFrame
156
+ The eventhouse definition in .json format or as a pandas dataframe.
157
+ """
158
+
159
+ return get_item_definition(
160
+ item=eventhouse,
161
+ type="Eventhouse",
162
+ workspace=workspace,
163
+ return_dataframe=return_dataframe,
125
164
  )
@@ -1,14 +1,14 @@
1
- import sempy.fabric as fabric
2
1
  import pandas as pd
3
2
  from typing import Optional
4
3
  from sempy_labs._helper_functions import (
5
4
  resolve_workspace_name_and_id,
6
5
  _base_api,
7
- _print_success,
8
- resolve_item_id,
6
+ delete_item,
9
7
  _create_dataframe,
8
+ create_item,
10
9
  )
11
10
  from uuid import UUID
11
+ import sempy_labs._icons as icons
12
12
 
13
13
 
14
14
  def list_eventstreams(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
@@ -74,29 +74,14 @@ def create_eventstream(
74
74
  or if no lakehouse attached, resolves to the workspace of the notebook.
75
75
  """
76
76
 
77
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
78
-
79
- payload = {"displayName": name}
80
-
81
- if description:
82
- payload["description"] = description
83
-
84
- _base_api(
85
- request=f"/v1/workspaces/{workspace_id}/eventstreams",
86
- method="post",
87
- payload=payload,
88
- status_codes=[201, 202],
89
- lro_return_status_code=True,
90
- )
91
- _print_success(
92
- item_name=name,
93
- item_type="eventstream",
94
- workspace_name=workspace_name,
95
- action="created",
77
+ create_item(
78
+ name=name, description=description, type="Eventstream", workspace=workspace
96
79
  )
97
80
 
98
81
 
99
- def delete_eventstream(name: str | UUID, workspace: Optional[str | UUID] = None):
82
+ def delete_eventstream(
83
+ eventstream: str | UUID, workspace: Optional[str | UUID] = None, **kwargs
84
+ ):
100
85
  """
101
86
  Deletes a Fabric eventstream.
102
87
 
@@ -104,7 +89,7 @@ def delete_eventstream(name: str | UUID, workspace: Optional[str | UUID] = None)
104
89
 
105
90
  Parameters
106
91
  ----------
107
- name: str | uuid.UUID
92
+ eventstream: str | uuid.UUID
108
93
  Name or ID of the eventstream.
109
94
  workspace : str | uuid.UUID, default=None
110
95
  The Fabric workspace name or ID.
@@ -112,13 +97,10 @@ def delete_eventstream(name: str | UUID, workspace: Optional[str | UUID] = None)
112
97
  or if no lakehouse attached, resolves to the workspace of the notebook.
113
98
  """
114
99
 
115
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
116
- item_id = resolve_item_id(item=name, type="Eventstream", workspace=workspace)
117
-
118
- fabric.delete_item(item_id=item_id, workspace=workspace)
119
- _print_success(
120
- item_name=name,
121
- item_type="eventstream",
122
- workspace_name=workspace_name,
123
- action="deleted",
124
- )
100
+ if "name" in kwargs:
101
+ eventstream = kwargs["name"]
102
+ print(
103
+ f"{icons.warning} The 'name' parameter is deprecated. Please use 'eventstream' instead."
104
+ )
105
+
106
+ delete_item(item=eventstream, type="Eventstream", workspace=workspace)
sempy_labs/_gateways.py CHANGED
@@ -314,7 +314,7 @@ def create_vnet_gateway(
314
314
  The name of the subnet.
315
315
  """
316
316
 
317
- capacity_id = resolve_capacity_id(capacity)
317
+ capacity_id = resolve_capacity_id(capacity=capacity)
318
318
  payload = {
319
319
  "type": "VirtualNetwork",
320
320
  "displayName": name,
@@ -343,7 +343,7 @@ def create_vnet_gateway(
343
343
 
344
344
 
345
345
  def update_on_premises_gateway(
346
- gateway: str,
346
+ gateway: str | UUID,
347
347
  allow_cloud_connection_refresh: Optional[bool] = None,
348
348
  allow_custom_connectors: Optional[bool] = None,
349
349
  load_balancing_setting: Optional[str] = None,
@@ -396,7 +396,7 @@ def update_on_premises_gateway(
396
396
 
397
397
 
398
398
  def update_vnet_gateway(
399
- gateway: str,
399
+ gateway: str | UUID,
400
400
  capacity: str | UUID,
401
401
  inactivity_minutes_before_sleep: Optional[int] = None,
402
402
  number_of_member_gateways: Optional[int] = None,
@@ -425,7 +425,7 @@ def update_vnet_gateway(
425
425
  payload = {}
426
426
 
427
427
  if capacity is not None:
428
- capacity_id = resolve_capacity_id(capacity)
428
+ capacity_id = resolve_capacity_id(capacity=capacity)
429
429
  payload["capacityId"] = capacity_id
430
430
  if inactivity_minutes_before_sleep is not None:
431
431
  payload["inactivityMinutesBeforeSleep"] = inactivity_minutes_before_sleep
@@ -5,12 +5,12 @@ import os
5
5
  from typing import Optional, List
6
6
  from sempy._utils._log import log
7
7
  from sempy_labs._helper_functions import (
8
- resolve_lakehouse_name,
9
8
  resolve_workspace_name_and_id,
10
9
  resolve_dataset_name_and_id,
11
10
  _conv_b64,
12
11
  _decode_b64,
13
12
  _base_api,
13
+ _mount,
14
14
  )
15
15
  from sempy_labs.lakehouse._lakehouse import lakehouse_attached
16
16
  import sempy_labs._icons as icons
@@ -252,6 +252,7 @@ def deploy_semantic_model(
252
252
  target_workspace: Optional[str | UUID] = None,
253
253
  refresh_target_dataset: bool = True,
254
254
  overwrite: bool = False,
255
+ perspective: Optional[str] = None,
255
256
  ):
256
257
  """
257
258
  Deploys a semantic model based on an existing semantic model.
@@ -274,6 +275,8 @@ def deploy_semantic_model(
274
275
  If set to True, this will initiate a full refresh of the target semantic model in the target workspace.
275
276
  overwrite : bool, default=False
276
277
  If set to True, overwrites the existing semantic model in the workspace if it exists.
278
+ perspective : str, default=None
279
+ Set this to the name of a perspective in the model and it will reduce the deployed model down to the tables/columns/measures/hierarchies within that perspective.
277
280
  """
278
281
 
279
282
  (source_workspace_name, source_workspace_id) = resolve_workspace_name_and_id(
@@ -307,7 +310,21 @@ def deploy_semantic_model(
307
310
  f"{icons.warning} The '{target_dataset}' semantic model already exists within the '{target_workspace_name}' workspace. The 'overwrite' parameter is set to False so the source semantic model was not deployed to the target destination."
308
311
  )
309
312
 
310
- bim = get_semantic_model_bim(dataset=source_dataset, workspace=source_workspace_id)
313
+ if perspective is not None:
314
+
315
+ from sempy_labs.tom import connect_semantic_model
316
+
317
+ with connect_semantic_model(
318
+ dataset=source_dataset, workspace=source_workspace, readonly=True
319
+ ) as tom:
320
+
321
+ df_added = tom._reduce_model(perspective_name=perspective)
322
+ bim = tom.get_bim()
323
+
324
+ else:
325
+ bim = get_semantic_model_bim(
326
+ dataset=source_dataset, workspace=source_workspace_id
327
+ )
311
328
 
312
329
  # Create the semantic model if the model does not exist
313
330
  if dfD_filt.empty:
@@ -325,6 +342,9 @@ def deploy_semantic_model(
325
342
  if refresh_target_dataset:
326
343
  refresh_semantic_model(dataset=target_dataset, workspace=target_workspace_id)
327
344
 
345
+ if perspective is not None:
346
+ return df_added
347
+
328
348
 
329
349
  @log
330
350
  def get_semantic_model_bim(
@@ -368,16 +388,16 @@ def get_semantic_model_bim(
368
388
  f"{icons.red_dot} In order to save the model.bim file, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
369
389
  )
370
390
 
371
- lakehouse = resolve_lakehouse_name()
372
- folderPath = "/lakehouse/default/Files"
373
- fileExt = ".bim"
374
- if not save_to_file_name.endswith(fileExt):
375
- save_to_file_name = f"{save_to_file_name}{fileExt}"
376
- filePath = os.path.join(folderPath, save_to_file_name)
377
- with open(filePath, "w") as json_file:
391
+ local_path = _mount()
392
+ save_folder = f"{local_path}/Files"
393
+ file_ext = ".bim"
394
+ if not save_to_file_name.endswith(file_ext):
395
+ save_to_file_name = f"{save_to_file_name}{file_ext}"
396
+ file_path = os.path.join(save_folder, save_to_file_name)
397
+ with open(file_path, "w") as json_file:
378
398
  json.dump(bimJson, json_file, indent=4)
379
399
  print(
380
- f"{icons.green_dot} The {fileExt} file for the '{dataset_name}' semantic model has been saved to the '{lakehouse}' in this location: '{filePath}'.\n\n"
400
+ f"{icons.green_dot} The {file_ext} file for the '{dataset_name}' semantic model has been saved to the lakehouse attached to the notebook within: 'Files/{save_to_file_name}'.\n\n"
381
401
  )
382
402
 
383
403
  return bimJson
sempy_labs/_git.py CHANGED
@@ -4,6 +4,7 @@ from typing import Optional, List
4
4
  from sempy_labs._helper_functions import (
5
5
  resolve_workspace_name_and_id,
6
6
  _base_api,
7
+ _create_dataframe,
7
8
  )
8
9
  from uuid import UUID
9
10
 
@@ -126,7 +127,7 @@ def connect_workspace_to_github(
126
127
 
127
128
  def disconnect_workspace_from_git(workspace: Optional[str | UUID] = None):
128
129
  """
129
- Disconnects a workpsace from a git repository.
130
+ Disconnects a workspace from a git repository.
130
131
 
131
132
  This is a wrapper function for the following API: `Git - Disconnect <https://learn.microsoft.com/rest/api/fabric/core/git/disconnect>`_.
132
133
 
@@ -432,3 +433,91 @@ def update_from_git(
432
433
  print(
433
434
  f"{icons.green_dot} The '{workspace_name}' workspace has been updated with commits pushed to the connected branch."
434
435
  )
436
+
437
+
438
+ def get_my_git_credentials(
439
+ workspace: Optional[str | UUID] = None,
440
+ ) -> pd.DataFrame:
441
+ """
442
+ Returns the user's Git credentials configuration details.
443
+
444
+ This is a wrapper function for the following API: `Git - Get My Git Credentials <https://learn.microsoft.com/rest/api/fabric/core/git/get-my-git-credentials>`_.
445
+
446
+ Parameters
447
+ ----------
448
+ workspace : str | uuid.UUID, default=None
449
+ The workspace name or ID.
450
+ Defaults to None which resolves to the workspace of the attached lakehouse
451
+ or if no lakehouse attached, resolves to the workspace of the notebook.
452
+
453
+ Returns
454
+ -------
455
+ pandas.DataFrame
456
+ A pandas dataframe showing the user's Git credentials configuration details.
457
+ """
458
+
459
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
460
+
461
+ columns = {
462
+ "Source": "string",
463
+ }
464
+
465
+ df = _create_dataframe(columns)
466
+
467
+ response = _base_api(request=f"/v1/workspaces/{workspace_id}/git/myGitCredentials")
468
+
469
+ r = response.json()
470
+ new_data = {
471
+ "Source": r.get("source"),
472
+ "Connection Id": r.get("connectionId"),
473
+ }
474
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
475
+
476
+ return df
477
+
478
+
479
+ def update_my_git_credentials(
480
+ source: str,
481
+ connection_id: Optional[UUID] = None,
482
+ workspace: Optional[str | UUID] = None,
483
+ ):
484
+ """
485
+ Updates the user's Git credentials configuration details.
486
+
487
+ This is a wrapper function for the following API: `Git - Update My Git Credentials <https://learn.microsoft.com/rest/api/fabric/core/git/update-my-git-credentials>`_.
488
+
489
+ Parameters
490
+ ----------
491
+ source : str
492
+ The Git credentials source. Valid options: 'Automatic', 'ConfiguredConnection', 'None'.
493
+ connection_id : UUID, default=None
494
+ The object ID of the connection. Valid only for the 'ConfiguredConnection' source.
495
+ workspace : str | uuid.UUID, default=None
496
+ The workspace name or ID.
497
+ Defaults to None which resolves to the workspace of the attached lakehouse
498
+ or if no lakehouse attached, resolves to the workspace of the notebook.
499
+ """
500
+
501
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
502
+
503
+ if source == "ConfiguredConnection" and connection_id is None:
504
+ raise ValueError(
505
+ f"{icons.red_dot} The 'ConfiguredConnection' source requires a connection_id."
506
+ )
507
+
508
+ payload = {
509
+ "source": source,
510
+ }
511
+
512
+ if connection_id is not None:
513
+ payload["connectionId"] = connection_id
514
+
515
+ _base_api(
516
+ request=f"/v1/workspaces/{workspace_id}/git/myGitCredentials",
517
+ method="patch",
518
+ payload=payload,
519
+ )
520
+
521
+ print(
522
+ f"{icons.green_dot} The user's Git credentials have been updated accordingly."
523
+ )
sempy_labs/_graphQL.py CHANGED
@@ -5,7 +5,7 @@ from sempy_labs._helper_functions import (
5
5
  _base_api,
6
6
  _create_dataframe,
7
7
  resolve_workspace_name_and_id,
8
- _print_success,
8
+ create_item,
9
9
  )
10
10
 
11
11
 
@@ -73,23 +73,6 @@ def create_graphql_api(
73
73
  or if no lakehouse attached, resolves to the workspace of the notebook.
74
74
  """
75
75
 
76
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
77
-
78
- payload = {"displayName": name}
79
-
80
- if description:
81
- payload["description"] = description
82
-
83
- _base_api(
84
- request=f"/v1/workspaces/{workspace_id}/GraphQLApis",
85
- method="post",
86
- status_codes=[201, 202],
87
- payload=payload,
88
- lro_return_status_code=True,
89
- )
90
- _print_success(
91
- item_name=name,
92
- item_type="GraphQL API",
93
- workspace_name=workspace_name,
94
- action="created",
76
+ create_item(
77
+ name=name, description=description, type="GraphQLApi", workspace=workspace
95
78
  )