semantic-link-labs 0.9.4__py3-none-any.whl → 0.9.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (71) hide show
  1. {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.6.dist-info}/METADATA +19 -2
  2. {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.6.dist-info}/RECORD +71 -64
  3. {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.6.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +33 -4
  5. sempy_labs/_capacities.py +59 -128
  6. sempy_labs/_capacity_migration.py +19 -21
  7. sempy_labs/_connections.py +2 -4
  8. sempy_labs/_dashboards.py +60 -0
  9. sempy_labs/_data_pipelines.py +5 -31
  10. sempy_labs/_dataflows.py +2 -2
  11. sempy_labs/_dax_query_view.py +55 -0
  12. sempy_labs/_delta_analyzer.py +16 -14
  13. sempy_labs/_environments.py +28 -49
  14. sempy_labs/_eventhouses.py +27 -53
  15. sempy_labs/_eventstreams.py +16 -34
  16. sempy_labs/_external_data_shares.py +4 -10
  17. sempy_labs/_gateways.py +4 -4
  18. sempy_labs/_generate_semantic_model.py +2 -2
  19. sempy_labs/_git.py +90 -1
  20. sempy_labs/_graphQL.py +8 -21
  21. sempy_labs/_helper_functions.py +440 -91
  22. sempy_labs/_kql_databases.py +24 -35
  23. sempy_labs/_kql_querysets.py +15 -32
  24. sempy_labs/_list_functions.py +17 -192
  25. sempy_labs/_managed_private_endpoints.py +9 -2
  26. sempy_labs/_mirrored_databases.py +17 -49
  27. sempy_labs/_ml_experiments.py +6 -31
  28. sempy_labs/_ml_models.py +4 -28
  29. sempy_labs/_model_bpa.py +4 -11
  30. sempy_labs/_model_bpa_bulk.py +23 -27
  31. sempy_labs/_mounted_data_factories.py +119 -0
  32. sempy_labs/_notebooks.py +16 -26
  33. sempy_labs/_one_lake_integration.py +2 -1
  34. sempy_labs/_semantic_models.py +20 -0
  35. sempy_labs/_sql.py +13 -8
  36. sempy_labs/_sqldatabase.py +61 -100
  37. sempy_labs/_utils.py +42 -0
  38. sempy_labs/_vertipaq.py +25 -13
  39. sempy_labs/_warehouses.py +19 -20
  40. sempy_labs/_workloads.py +23 -9
  41. sempy_labs/_workspace_identity.py +6 -0
  42. sempy_labs/_workspaces.py +55 -7
  43. sempy_labs/admin/__init__.py +21 -1
  44. sempy_labs/admin/_apps.py +1 -1
  45. sempy_labs/admin/_artifacts.py +62 -0
  46. sempy_labs/admin/_basic_functions.py +3 -54
  47. sempy_labs/admin/_capacities.py +61 -0
  48. sempy_labs/admin/_reports.py +74 -0
  49. sempy_labs/admin/_scanner.py +2 -2
  50. sempy_labs/admin/_shared.py +4 -2
  51. sempy_labs/admin/_users.py +133 -0
  52. sempy_labs/admin/_workspaces.py +148 -0
  53. sempy_labs/directlake/_directlake_schema_compare.py +2 -1
  54. sempy_labs/directlake/_directlake_schema_sync.py +65 -19
  55. sempy_labs/directlake/_dl_helper.py +0 -6
  56. sempy_labs/directlake/_generate_shared_expression.py +10 -11
  57. sempy_labs/directlake/_guardrails.py +2 -1
  58. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +12 -25
  59. sempy_labs/directlake/_update_directlake_partition_entity.py +11 -3
  60. sempy_labs/lakehouse/__init__.py +2 -0
  61. sempy_labs/lakehouse/_lakehouse.py +6 -7
  62. sempy_labs/lakehouse/_shortcuts.py +198 -57
  63. sempy_labs/migration/_migration_validation.py +0 -4
  64. sempy_labs/report/_download_report.py +4 -6
  65. sempy_labs/report/_generate_report.py +15 -23
  66. sempy_labs/report/_report_bpa.py +12 -19
  67. sempy_labs/report/_report_functions.py +2 -1
  68. sempy_labs/report/_report_rebind.py +8 -6
  69. sempy_labs/tom/_model.py +34 -16
  70. {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.6.dist-info}/LICENSE +0 -0
  71. {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.6.dist-info}/top_level.txt +0 -0
@@ -52,6 +52,7 @@ def delta_analyzer(
52
52
  workspace: Optional[str | UUID] = None,
53
53
  column_stats: bool = True,
54
54
  skip_cardinality: bool = True,
55
+ schema: Optional[str] = None,
55
56
  ) -> Dict[str, pd.DataFrame]:
56
57
  """
57
58
  Analyzes a delta table and shows the results in dictionary containing a set of 5 dataframes. If 'export' is set to True, the results will be saved to delta tables in the lakehouse attached to the notebook.
@@ -85,6 +86,8 @@ def delta_analyzer(
85
86
  If True, collects data about column chunks and columns. If False, skips that step and only returns the other 3 dataframes.
86
87
  skip_cardinality : bool, default=True
87
88
  If True, skips the cardinality calculation for each column. If False, calculates the cardinality for each column.
89
+ schema : str, default=None
90
+ The name of the schema to which the table belongs (for schema-enabled lakehouses). If None, the default schema is used.
88
91
 
89
92
  Returns
90
93
  -------
@@ -96,25 +99,21 @@ def delta_analyzer(
96
99
  if not skip_cardinality:
97
100
  column_stats = True
98
101
 
99
- # display_toggle = notebookutils.common.configs.pandas_display
100
-
101
- # Turn off notebookutils display
102
- # if display_toggle is True:
103
- # notebookutils.common.configs.pandas_display = False
104
-
105
102
  prefix = "SLL_DeltaAnalyzer_"
106
103
  now = datetime.now()
107
104
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace=workspace)
108
105
  (lakehouse_name, lakehouse_id) = resolve_lakehouse_name_and_id(
109
106
  lakehouse=lakehouse, workspace=workspace
110
107
  )
111
- path = create_abfss_path(lakehouse_id, workspace_id, table_name)
112
- local_path = _mount(lakehouse=lakehouse, workspace=workspace)
113
- table_path = f"{local_path}/Tables/{table_name}"
114
- delta_table_path = create_abfss_path(lakehouse_id, workspace_id, table_name)
115
108
 
116
- # Set back to original value
117
- # notebookutils.common.configs.pandas_display = display_toggle
109
+ delta_table_path = create_abfss_path(
110
+ lakehouse_id, workspace_id, table_name, schema=schema
111
+ )
112
+ local_path = _mount(lakehouse=lakehouse, workspace=workspace)
113
+ if schema is not None:
114
+ table_path = f"{local_path}/Tables/{schema}/{table_name}"
115
+ else:
116
+ table_path = f"{local_path}/Tables/{table_name}"
118
117
 
119
118
  parquet_file_df_columns = {
120
119
  # "Dataset": "string",
@@ -183,7 +182,7 @@ def delta_analyzer(
183
182
  # min_reader_version = table_details.get("minReaderVersion")
184
183
  # min_writer_version = table_details.get("minWriterVersion")
185
184
 
186
- latest_files = _read_delta_table(path).inputFiles()
185
+ latest_files = _read_delta_table(delta_table_path).inputFiles()
187
186
  # file_paths = [f.split("/")[-1] for f in latest_files]
188
187
  all_parquet_files = get_parquet_file_infos(delta_table_path)
189
188
  common_file_paths = set(
@@ -430,6 +429,7 @@ def get_delta_table_history(
430
429
  table_name: str,
431
430
  lakehouse: Optional[str | UUID] = None,
432
431
  workspace: Optional[str | UUID] = None,
432
+ schema: Optional[str] = None,
433
433
  ) -> pd.DataFrame:
434
434
  """
435
435
  Returns the history of a delta table as a pandas dataframe.
@@ -445,6 +445,8 @@ def get_delta_table_history(
445
445
  The Fabric workspace name or ID used by the lakehouse.
446
446
  Defaults to None which resolves to the workspace of the attached lakehouse
447
447
  or if no lakehouse attached, resolves to the workspace of the notebook.
448
+ schema : str, default=None
449
+ The name of the schema to which the table belongs (for schema-enabled lakehouses). If None, the default schema is used.
448
450
 
449
451
  Returns
450
452
  -------
@@ -461,7 +463,7 @@ def get_delta_table_history(
461
463
  (lakehouse_name, lakehouse_id) = resolve_lakehouse_name_and_id(
462
464
  lakehouse=lakehouse, workspace=workspace
463
465
  )
464
- path = create_abfss_path(lakehouse_id, workspace_id, table_name)
466
+ path = create_abfss_path(lakehouse_id, workspace_id, table_name, schema)
465
467
 
466
468
  from delta import DeltaTable
467
469
 
@@ -4,8 +4,10 @@ from typing import Optional
4
4
  from sempy_labs._helper_functions import (
5
5
  resolve_workspace_name_and_id,
6
6
  _base_api,
7
- _print_success,
8
7
  _create_dataframe,
8
+ resolve_item_id,
9
+ delete_item,
10
+ create_item,
9
11
  )
10
12
  from uuid import UUID
11
13
 
@@ -32,25 +34,11 @@ def create_environment(
32
34
  or if no lakehouse attached, resolves to the workspace of the notebook.
33
35
  """
34
36
 
35
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
36
-
37
- payload = {"displayName": environment}
38
-
39
- if description:
40
- payload["description"] = description
41
-
42
- _base_api(
43
- request="/v1/workspaces/{workspace_id}/environments",
44
- method="post",
45
- payload=payload,
46
- status_codes=[201, 202],
47
- lro_return_status_code=True,
48
- )
49
- _print_success(
50
- item_name=environment,
51
- item_type="environment",
52
- workspace_name=workspace_name,
53
- action="created",
37
+ create_item(
38
+ name=environment,
39
+ description=description,
40
+ type="Environment",
41
+ workspace=workspace,
54
42
  )
55
43
 
56
44
 
@@ -60,6 +48,8 @@ def list_environments(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
60
48
 
61
49
  This is a wrapper function for the following API: `Items - List Environments <https://learn.microsoft.com/rest/api/fabric/environment/items/list-environments>`_.
62
50
 
51
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
52
+
63
53
  Parameters
64
54
  ----------
65
55
  workspace : str | uuid.UUID, default=None
@@ -83,7 +73,9 @@ def list_environments(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
83
73
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
84
74
 
85
75
  responses = _base_api(
86
- request=f"/v1/workspaces/{workspace_id}/environments", uses_pagination=True
76
+ request=f"/v1/workspaces/{workspace_id}/environments",
77
+ uses_pagination=True,
78
+ client="fabric_sp",
87
79
  )
88
80
 
89
81
  for r in responses:
@@ -98,7 +90,7 @@ def list_environments(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
98
90
  return df
99
91
 
100
92
 
101
- def delete_environment(environment: str, workspace: Optional[str | UUID] = None):
93
+ def delete_environment(environment: str | UUID, workspace: Optional[str | UUID] = None):
102
94
  """
103
95
  Deletes a Fabric environment.
104
96
 
@@ -106,61 +98,48 @@ def delete_environment(environment: str, workspace: Optional[str | UUID] = None)
106
98
 
107
99
  Parameters
108
100
  ----------
109
- environment: str
110
- Name of the environment.
101
+ environment: str | uuid.UUID
102
+ Name or ID of the environment.
111
103
  workspace : str | uuid.UUID, default=None
112
104
  The Fabric workspace name or ID.
113
105
  Defaults to None which resolves to the workspace of the attached lakehouse
114
106
  or if no lakehouse attached, resolves to the workspace of the notebook.
115
107
  """
116
108
 
117
- from sempy_labs._helper_functions import resolve_environment_id
118
-
119
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
120
- environment_id = resolve_environment_id(
121
- environment=environment, workspace=workspace_id
122
- )
123
-
124
- _base_api(
125
- request=f"/v1/workspaces/{workspace_id}/environments/{environment_id}",
126
- method="delete",
127
- )
128
- _print_success(
129
- item_name=environment,
130
- item_type="environment",
131
- workspace_name=workspace_name,
132
- action="deleted",
133
- )
109
+ delete_item(item=environment, type="Environment", workspace=workspace)
134
110
 
135
111
 
136
- def publish_environment(environment: str, workspace: Optional[str | UUID] = None):
112
+ def publish_environment(
113
+ environment: str | UUID, workspace: Optional[str | UUID] = None
114
+ ):
137
115
  """
138
116
  Publishes a Fabric environment.
139
117
 
140
118
  This is a wrapper function for the following API: `Spark Libraries - Publish Environment <https://learn.microsoft.com/rest/api/fabric/environment/spark-libraries/publish-environment>`_.
141
119
 
120
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
121
+
142
122
  Parameters
143
123
  ----------
144
- environment: str
145
- Name of the environment.
124
+ environment: str | uuid.UUID
125
+ Name or ID of the environment.
146
126
  workspace : str | uuid.UUID, default=None
147
127
  The Fabric workspace name or ID.
148
128
  Defaults to None which resolves to the workspace of the attached lakehouse
149
129
  or if no lakehouse attached, resolves to the workspace of the notebook.
150
130
  """
151
131
 
152
- from sempy_labs._helper_functions import resolve_environment_id
153
-
154
132
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
155
- environment_id = resolve_environment_id(
156
- environment=environment, workspace=workspace_id
133
+ item_id = resolve_item_id(
134
+ item=environment, type="Environment", workspace=workspace_id
157
135
  )
158
136
 
159
137
  _base_api(
160
- request=f"/v1/workspaces/{workspace_id}/environments/{environment_id}/staging/publish",
138
+ request=f"/v1/workspaces/{workspace_id}/environments/{item_id}/staging/publish",
161
139
  method="post",
162
140
  lro_return_status_code=True,
163
141
  status_codes=None,
142
+ client="fabric_sp",
164
143
  )
165
144
 
166
145
  print(
@@ -1,14 +1,15 @@
1
- import sempy.fabric as fabric
2
1
  import pandas as pd
3
2
  from typing import Optional
4
3
  from sempy_labs._helper_functions import (
5
4
  resolve_workspace_name_and_id,
6
5
  _base_api,
7
- _print_success,
8
6
  resolve_item_id,
9
7
  _create_dataframe,
10
8
  _conv_b64,
11
9
  _decode_b64,
10
+ delete_item,
11
+ create_item,
12
+ get_item_definition,
12
13
  )
13
14
  from uuid import UUID
14
15
  import sempy_labs._icons as icons
@@ -39,18 +40,11 @@ def create_eventhouse(
39
40
  or if no lakehouse attached, resolves to the workspace of the notebook.
40
41
  """
41
42
 
42
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
43
-
44
- payload = {"displayName": name}
45
-
46
- if description:
47
- payload["description"] = description
48
-
49
- if definition is not None:
50
- if not isinstance(definition, dict):
51
- raise ValueError(f"{icons.red_dot} The definition must be a dictionary.")
43
+ if definition is not None and not isinstance(definition, dict):
44
+ raise ValueError(f"{icons.red_dot} The definition must be a dictionary.")
52
45
 
53
- payload["definition"] = {
46
+ definition_payload = (
47
+ {
54
48
  "parts": [
55
49
  {
56
50
  "path": "EventhouseProperties.json",
@@ -59,19 +53,16 @@ def create_eventhouse(
59
53
  }
60
54
  ]
61
55
  }
62
-
63
- _base_api(
64
- request=f"/v1/workspaces/{workspace_id}/eventhouses",
65
- method="post",
66
- status_codes=[201, 202],
67
- payload=payload,
68
- lro_return_status_code=True,
56
+ if definition is not None
57
+ else None
69
58
  )
70
- _print_success(
71
- item_name=name,
72
- item_type="eventhouse",
73
- workspace_name=workspace_name,
74
- action="created",
59
+
60
+ create_item(
61
+ name=name,
62
+ type="Eventhouse",
63
+ workspace=workspace,
64
+ description=description,
65
+ definition=definition_payload,
75
66
  )
76
67
 
77
68
 
@@ -81,6 +72,8 @@ def list_eventhouses(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
81
72
 
82
73
  This is a wrapper function for the following API: `Items - List Eventhouses <https://learn.microsoft.com/rest/api/fabric/environment/items/list-eventhouses>`_.
83
74
 
75
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
76
+
84
77
  Parameters
85
78
  ----------
86
79
  workspace : str | uuid.UUID, default=None
@@ -104,7 +97,9 @@ def list_eventhouses(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
104
97
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
105
98
 
106
99
  responses = _base_api(
107
- request=f"/v1/workspaces/{workspace_id}/eventhouses", uses_pagination=True
100
+ request=f"/v1/workspaces/{workspace_id}/eventhouses",
101
+ uses_pagination=True,
102
+ client="fabric_sp",
108
103
  )
109
104
 
110
105
  for r in responses:
@@ -135,16 +130,7 @@ def delete_eventhouse(name: str, workspace: Optional[str | UUID] = None):
135
130
  or if no lakehouse attached, resolves to the workspace of the notebook.
136
131
  """
137
132
 
138
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
139
- item_id = resolve_item_id(item=name, type="Eventhouse", workspace=workspace)
140
-
141
- fabric.delete_item(item_id=item_id, workspace=workspace)
142
- _print_success(
143
- item_name=name,
144
- item_type="eventhouse",
145
- workspace_name=workspace_name,
146
- action="deleted",
147
- )
133
+ delete_item(item=name, type="Eventhouse", workspace=workspace)
148
134
 
149
135
 
150
136
  def get_eventhouse_definition(
@@ -174,21 +160,9 @@ def get_eventhouse_definition(
174
160
  The eventhouse definition in .json format or as a pandas dataframe.
175
161
  """
176
162
 
177
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
178
- item_id = resolve_item_id(item=eventhouse, type="Eventhouse", workspace=workspace)
179
-
180
- result = _base_api(
181
- request=f"/v1/workspaces/{workspace_id}/eventhouses/{item_id}/getDefinition",
182
- method="post",
183
- status_codes=None,
184
- lro_return_json=True,
163
+ return get_item_definition(
164
+ item=eventhouse,
165
+ type="Eventhouse",
166
+ workspace=workspace,
167
+ return_dataframe=return_dataframe,
185
168
  )
186
-
187
- df = pd.json_normalize(result["definition"]["parts"])
188
-
189
- if return_dataframe:
190
- return df
191
- else:
192
- df_filt = df[df["path"] == "EventhouseProperties.json"]
193
- payload = df_filt["payload"].iloc[0]
194
- return _decode_b64(payload)
@@ -1,14 +1,14 @@
1
- import sempy.fabric as fabric
2
1
  import pandas as pd
3
2
  from typing import Optional
4
3
  from sempy_labs._helper_functions import (
5
4
  resolve_workspace_name_and_id,
6
5
  _base_api,
7
- _print_success,
8
- resolve_item_id,
6
+ delete_item,
9
7
  _create_dataframe,
8
+ create_item,
10
9
  )
11
10
  from uuid import UUID
11
+ import sempy_labs._icons as icons
12
12
 
13
13
 
14
14
  def list_eventstreams(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
@@ -74,29 +74,14 @@ def create_eventstream(
74
74
  or if no lakehouse attached, resolves to the workspace of the notebook.
75
75
  """
76
76
 
77
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
78
-
79
- payload = {"displayName": name}
80
-
81
- if description:
82
- payload["description"] = description
83
-
84
- _base_api(
85
- request=f"/v1/workspaces/{workspace_id}/eventstreams",
86
- method="post",
87
- payload=payload,
88
- status_codes=[201, 202],
89
- lro_return_status_code=True,
90
- )
91
- _print_success(
92
- item_name=name,
93
- item_type="eventstream",
94
- workspace_name=workspace_name,
95
- action="created",
77
+ create_item(
78
+ name=name, description=description, type="Eventstream", workspace=workspace
96
79
  )
97
80
 
98
81
 
99
- def delete_eventstream(name: str | UUID, workspace: Optional[str | UUID] = None):
82
+ def delete_eventstream(
83
+ eventstream: str | UUID, workspace: Optional[str | UUID] = None, **kwargs
84
+ ):
100
85
  """
101
86
  Deletes a Fabric eventstream.
102
87
 
@@ -104,7 +89,7 @@ def delete_eventstream(name: str | UUID, workspace: Optional[str | UUID] = None)
104
89
 
105
90
  Parameters
106
91
  ----------
107
- name: str | uuid.UUID
92
+ eventstream: str | uuid.UUID
108
93
  Name or ID of the eventstream.
109
94
  workspace : str | uuid.UUID, default=None
110
95
  The Fabric workspace name or ID.
@@ -112,13 +97,10 @@ def delete_eventstream(name: str | UUID, workspace: Optional[str | UUID] = None)
112
97
  or if no lakehouse attached, resolves to the workspace of the notebook.
113
98
  """
114
99
 
115
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
116
- item_id = resolve_item_id(item=name, type="Eventstream", workspace=workspace)
117
-
118
- fabric.delete_item(item_id=item_id, workspace=workspace)
119
- _print_success(
120
- item_name=name,
121
- item_type="eventstream",
122
- workspace_name=workspace_name,
123
- action="deleted",
124
- )
100
+ if "name" in kwargs:
101
+ eventstream = kwargs["name"]
102
+ print(
103
+ f"{icons.warning} The 'name' parameter is deprecated. Please use 'eventstream' instead."
104
+ )
105
+
106
+ delete_item(item=eventstream, type="Eventstream", workspace=workspace)
@@ -1,4 +1,3 @@
1
- import sempy.fabric as fabric
2
1
  from uuid import UUID
3
2
  import pandas as pd
4
3
  from typing import Optional, List
@@ -7,6 +6,7 @@ from sempy_labs._helper_functions import (
7
6
  resolve_workspace_name_and_id,
8
7
  _base_api,
9
8
  _create_dataframe,
9
+ resolve_item_id,
10
10
  )
11
11
 
12
12
 
@@ -39,9 +39,7 @@ def create_external_data_share(
39
39
  """
40
40
 
41
41
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
42
- item_id = fabric.resolve_item_id(
43
- item_name=item_name, type=item_type, workspace=workspace_id
44
- )
42
+ item_id = resolve_item_id(item=item_name, type=item_type, workspace=workspace_id)
45
43
 
46
44
  if isinstance(paths, str):
47
45
  paths = [paths]
@@ -85,9 +83,7 @@ def revoke_external_data_share(
85
83
  """
86
84
 
87
85
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
88
- item_id = fabric.resolve_item_id(
89
- item_name=item_name, type=item_type, workspace=workspace_id
90
- )
86
+ item_id = resolve_item_id(item=item_name, type=item_type, workspace=workspace_id)
91
87
 
92
88
  _base_api(
93
89
  request=f"/v1/workspaces/{workspace_id}/items/{item_id}/externalDataShares/{external_data_share_id}/revoke",
@@ -124,9 +120,7 @@ def list_external_data_shares_in_item(
124
120
  """
125
121
 
126
122
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
127
- item_id = fabric.resolve_item_id(
128
- item_name=item_name, type=item_type, workspace=workspace_id
129
- )
123
+ item_id = resolve_item_id(item=item_name, type=item_type, workspace=workspace_id)
130
124
 
131
125
  columns = {
132
126
  "External Data Share Id": "string",
sempy_labs/_gateways.py CHANGED
@@ -314,7 +314,7 @@ def create_vnet_gateway(
314
314
  The name of the subnet.
315
315
  """
316
316
 
317
- capacity_id = resolve_capacity_id(capacity)
317
+ capacity_id = resolve_capacity_id(capacity=capacity)
318
318
  payload = {
319
319
  "type": "VirtualNetwork",
320
320
  "displayName": name,
@@ -343,7 +343,7 @@ def create_vnet_gateway(
343
343
 
344
344
 
345
345
  def update_on_premises_gateway(
346
- gateway: str,
346
+ gateway: str | UUID,
347
347
  allow_cloud_connection_refresh: Optional[bool] = None,
348
348
  allow_custom_connectors: Optional[bool] = None,
349
349
  load_balancing_setting: Optional[str] = None,
@@ -396,7 +396,7 @@ def update_on_premises_gateway(
396
396
 
397
397
 
398
398
  def update_vnet_gateway(
399
- gateway: str,
399
+ gateway: str | UUID,
400
400
  capacity: str | UUID,
401
401
  inactivity_minutes_before_sleep: Optional[int] = None,
402
402
  number_of_member_gateways: Optional[int] = None,
@@ -425,7 +425,7 @@ def update_vnet_gateway(
425
425
  payload = {}
426
426
 
427
427
  if capacity is not None:
428
- capacity_id = resolve_capacity_id(capacity)
428
+ capacity_id = resolve_capacity_id(capacity=capacity)
429
429
  payload["capacityId"] = capacity_id
430
430
  if inactivity_minutes_before_sleep is not None:
431
431
  payload["inactivityMinutesBeforeSleep"] = inactivity_minutes_before_sleep
@@ -5,13 +5,13 @@ import os
5
5
  from typing import Optional, List
6
6
  from sempy._utils._log import log
7
7
  from sempy_labs._helper_functions import (
8
- resolve_lakehouse_name,
9
8
  resolve_workspace_name_and_id,
10
9
  resolve_dataset_name_and_id,
11
10
  _conv_b64,
12
11
  _decode_b64,
13
12
  _base_api,
14
13
  _mount,
14
+ resolve_workspace_id,
15
15
  )
16
16
  from sempy_labs.lakehouse._lakehouse import lakehouse_attached
17
17
  import sempy_labs._icons as icons
@@ -286,7 +286,7 @@ def deploy_semantic_model(
286
286
 
287
287
  if target_workspace is None:
288
288
  target_workspace_name = source_workspace_name
289
- target_workspace_id = fabric.resolve_workspace_id(target_workspace_name)
289
+ target_workspace_id = resolve_workspace_id(workspace=target_workspace_name)
290
290
  else:
291
291
  (target_workspace_name, target_workspace_id) = resolve_workspace_name_and_id(
292
292
  target_workspace
sempy_labs/_git.py CHANGED
@@ -4,6 +4,7 @@ from typing import Optional, List
4
4
  from sempy_labs._helper_functions import (
5
5
  resolve_workspace_name_and_id,
6
6
  _base_api,
7
+ _create_dataframe,
7
8
  )
8
9
  from uuid import UUID
9
10
 
@@ -126,7 +127,7 @@ def connect_workspace_to_github(
126
127
 
127
128
  def disconnect_workspace_from_git(workspace: Optional[str | UUID] = None):
128
129
  """
129
- Disconnects a workpsace from a git repository.
130
+ Disconnects a workspace from a git repository.
130
131
 
131
132
  This is a wrapper function for the following API: `Git - Disconnect <https://learn.microsoft.com/rest/api/fabric/core/git/disconnect>`_.
132
133
 
@@ -432,3 +433,91 @@ def update_from_git(
432
433
  print(
433
434
  f"{icons.green_dot} The '{workspace_name}' workspace has been updated with commits pushed to the connected branch."
434
435
  )
436
+
437
+
438
+ def get_my_git_credentials(
439
+ workspace: Optional[str | UUID] = None,
440
+ ) -> pd.DataFrame:
441
+ """
442
+ Returns the user's Git credentials configuration details.
443
+
444
+ This is a wrapper function for the following API: `Git - Get My Git Credentials <https://learn.microsoft.com/rest/api/fabric/core/git/get-my-git-credentials>`_.
445
+
446
+ Parameters
447
+ ----------
448
+ workspace : str | uuid.UUID, default=None
449
+ The workspace name or ID.
450
+ Defaults to None which resolves to the workspace of the attached lakehouse
451
+ or if no lakehouse attached, resolves to the workspace of the notebook.
452
+
453
+ Returns
454
+ -------
455
+ pandas.DataFrame
456
+ A pandas dataframe showing the user's Git credentials configuration details.
457
+ """
458
+
459
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
460
+
461
+ columns = {
462
+ "Source": "string",
463
+ }
464
+
465
+ df = _create_dataframe(columns)
466
+
467
+ response = _base_api(request=f"/v1/workspaces/{workspace_id}/git/myGitCredentials")
468
+
469
+ r = response.json()
470
+ new_data = {
471
+ "Source": r.get("source"),
472
+ "Connection Id": r.get("connectionId"),
473
+ }
474
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
475
+
476
+ return df
477
+
478
+
479
+ def update_my_git_credentials(
480
+ source: str,
481
+ connection_id: Optional[UUID] = None,
482
+ workspace: Optional[str | UUID] = None,
483
+ ):
484
+ """
485
+ Updates the user's Git credentials configuration details.
486
+
487
+ This is a wrapper function for the following API: `Git - Update My Git Credentials <https://learn.microsoft.com/rest/api/fabric/core/git/update-my-git-credentials>`_.
488
+
489
+ Parameters
490
+ ----------
491
+ source : str
492
+ The Git credentials source. Valid options: 'Automatic', 'ConfiguredConnection', 'None'.
493
+ connection_id : UUID, default=None
494
+ The object ID of the connection. Valid only for the 'ConfiguredConnection' source.
495
+ workspace : str | uuid.UUID, default=None
496
+ The workspace name or ID.
497
+ Defaults to None which resolves to the workspace of the attached lakehouse
498
+ or if no lakehouse attached, resolves to the workspace of the notebook.
499
+ """
500
+
501
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
502
+
503
+ if source == "ConfiguredConnection" and connection_id is None:
504
+ raise ValueError(
505
+ f"{icons.red_dot} The 'ConfiguredConnection' source requires a connection_id."
506
+ )
507
+
508
+ payload = {
509
+ "source": source,
510
+ }
511
+
512
+ if connection_id is not None:
513
+ payload["connectionId"] = connection_id
514
+
515
+ _base_api(
516
+ request=f"/v1/workspaces/{workspace_id}/git/myGitCredentials",
517
+ method="patch",
518
+ payload=payload,
519
+ )
520
+
521
+ print(
522
+ f"{icons.green_dot} The user's Git credentials have been updated accordingly."
523
+ )