semantic-link-labs 0.8.2__py3-none-any.whl → 0.8.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (108) hide show
  1. {semantic_link_labs-0.8.2.dist-info → semantic_link_labs-0.8.4.dist-info}/METADATA +37 -8
  2. {semantic_link_labs-0.8.2.dist-info → semantic_link_labs-0.8.4.dist-info}/RECORD +108 -104
  3. {semantic_link_labs-0.8.2.dist-info → semantic_link_labs-0.8.4.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +38 -0
  5. sempy_labs/_bpa_translation/_model/_translations_am-ET.po +24 -5
  6. sempy_labs/_bpa_translation/_model/_translations_ar-AE.po +28 -4
  7. sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +34 -4
  8. sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +33 -4
  9. sempy_labs/_bpa_translation/_model/_translations_cs-CZ.po +31 -4
  10. sempy_labs/_bpa_translation/_model/_translations_da-DK.po +31 -4
  11. sempy_labs/_bpa_translation/_model/_translations_de-DE.po +34 -4
  12. sempy_labs/_bpa_translation/_model/_translations_el-GR.po +36 -4
  13. sempy_labs/_bpa_translation/_model/_translations_es-ES.po +90 -58
  14. sempy_labs/_bpa_translation/_model/_translations_fa-IR.po +31 -5
  15. sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +31 -4
  16. sempy_labs/_bpa_translation/_model/_translations_fr-FR.po +34 -4
  17. sempy_labs/_bpa_translation/_model/_translations_ga-IE.po +34 -4
  18. sempy_labs/_bpa_translation/_model/_translations_he-IL.po +28 -4
  19. sempy_labs/_bpa_translation/_model/_translations_hi-IN.po +32 -4
  20. sempy_labs/_bpa_translation/_model/_translations_hu-HU.po +32 -4
  21. sempy_labs/_bpa_translation/_model/_translations_id-ID.po +32 -4
  22. sempy_labs/_bpa_translation/_model/_translations_is-IS.po +31 -4
  23. sempy_labs/_bpa_translation/_model/_translations_it-IT.po +34 -4
  24. sempy_labs/_bpa_translation/_model/_translations_ja-JP.po +24 -4
  25. sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +72 -56
  26. sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +34 -4
  27. sempy_labs/_bpa_translation/_model/_translations_nl-NL.po +34 -4
  28. sempy_labs/_bpa_translation/_model/_translations_pl-PL.po +95 -71
  29. sempy_labs/_bpa_translation/_model/_translations_pt-BR.po +32 -4
  30. sempy_labs/_bpa_translation/_model/_translations_pt-PT.po +32 -4
  31. sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +33 -4
  32. sempy_labs/_bpa_translation/_model/_translations_ru-RU.po +34 -4
  33. sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +31 -4
  34. sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +32 -4
  35. sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +32 -4
  36. sempy_labs/_bpa_translation/_model/_translations_ta-IN.po +32 -4
  37. sempy_labs/_bpa_translation/_model/_translations_te-IN.po +31 -4
  38. sempy_labs/_bpa_translation/_model/_translations_th-TH.po +31 -4
  39. sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +32 -4
  40. sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +100 -72
  41. sempy_labs/_bpa_translation/_model/_translations_zh-CN.po +23 -5
  42. sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po +32 -4
  43. sempy_labs/_capacities.py +138 -25
  44. sempy_labs/_capacity_migration.py +161 -60
  45. sempy_labs/_clear_cache.py +3 -3
  46. sempy_labs/_data_pipelines.py +54 -0
  47. sempy_labs/_dataflows.py +4 -0
  48. sempy_labs/_deployment_pipelines.py +13 -7
  49. sempy_labs/_environments.py +6 -0
  50. sempy_labs/_eventhouses.py +6 -0
  51. sempy_labs/_eventstreams.py +6 -0
  52. sempy_labs/_external_data_shares.py +190 -0
  53. sempy_labs/_generate_semantic_model.py +26 -4
  54. sempy_labs/_git.py +15 -15
  55. sempy_labs/_helper_functions.py +186 -11
  56. sempy_labs/_icons.py +55 -22
  57. sempy_labs/_kql_databases.py +6 -0
  58. sempy_labs/_kql_querysets.py +6 -0
  59. sempy_labs/_list_functions.py +6 -3
  60. sempy_labs/_managed_private_endpoints.py +166 -0
  61. sempy_labs/_mirrored_warehouses.py +2 -0
  62. sempy_labs/_ml_experiments.py +6 -0
  63. sempy_labs/_ml_models.py +6 -0
  64. sempy_labs/_model_bpa.py +11 -6
  65. sempy_labs/_model_bpa_bulk.py +14 -30
  66. sempy_labs/_model_bpa_rules.py +8 -3
  67. sempy_labs/_notebooks.py +111 -15
  68. sempy_labs/_query_scale_out.py +8 -6
  69. sempy_labs/_refresh_semantic_model.py +299 -49
  70. sempy_labs/_spark.py +12 -5
  71. sempy_labs/_sql.py +2 -2
  72. sempy_labs/_translations.py +16 -14
  73. sempy_labs/_vertipaq.py +127 -116
  74. sempy_labs/_warehouses.py +90 -1
  75. sempy_labs/_workloads.py +128 -0
  76. sempy_labs/_workspace_identity.py +4 -4
  77. sempy_labs/_workspaces.py +14 -1
  78. sempy_labs/admin/__init__.py +2 -0
  79. sempy_labs/admin/_basic_functions.py +203 -58
  80. sempy_labs/admin/_domains.py +18 -18
  81. sempy_labs/directlake/__init__.py +2 -0
  82. sempy_labs/directlake/_directlake_schema_sync.py +2 -6
  83. sempy_labs/directlake/_dl_helper.py +4 -1
  84. sempy_labs/directlake/_generate_shared_expression.py +1 -1
  85. sempy_labs/directlake/_get_shared_expression.py +7 -1
  86. sempy_labs/directlake/_guardrails.py +3 -2
  87. sempy_labs/directlake/_show_unsupported_directlake_objects.py +2 -8
  88. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +78 -0
  89. sempy_labs/directlake/_update_directlake_partition_entity.py +13 -32
  90. sempy_labs/lakehouse/_get_lakehouse_tables.py +6 -2
  91. sempy_labs/lakehouse/_shortcuts.py +4 -0
  92. sempy_labs/migration/_create_pqt_file.py +2 -2
  93. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +3 -2
  94. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +2 -0
  95. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +2 -8
  96. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +17 -0
  97. sempy_labs/migration/_migration_validation.py +2 -0
  98. sempy_labs/migration/_refresh_calc_tables.py +1 -0
  99. sempy_labs/report/__init__.py +4 -1
  100. sempy_labs/report/_generate_report.py +16 -14
  101. sempy_labs/report/_paginated.py +74 -0
  102. sempy_labs/report/_report_bpa.py +8 -10
  103. sempy_labs/report/_report_functions.py +19 -19
  104. sempy_labs/report/_report_rebind.py +6 -1
  105. sempy_labs/report/_reportwrapper.py +3 -3
  106. sempy_labs/tom/_model.py +173 -67
  107. {semantic_link_labs-0.8.2.dist-info → semantic_link_labs-0.8.4.dist-info}/LICENSE +0 -0
  108. {semantic_link_labs-0.8.2.dist-info → semantic_link_labs-0.8.4.dist-info}/top_level.txt +0 -0
@@ -2,7 +2,7 @@ import sempy.fabric as fabric
2
2
  from sempy_labs._helper_functions import (
3
3
  resolve_dataset_id,
4
4
  is_default_semantic_model,
5
- get_adls_client,
5
+ _get_adls_client,
6
6
  )
7
7
  from typing import Optional
8
8
  import sempy_labs._icons as icons
@@ -213,7 +213,7 @@ def copy_semantic_model_backup_file(
213
213
  source_path = f"/{source_workspace}/{source_file_name}"
214
214
  target_path = f"/{target_workspace}/{target_file_name}"
215
215
 
216
- client = get_adls_client(account_name=storage_account)
216
+ client = _get_adls_client(account_name=storage_account)
217
217
 
218
218
  source_file_system_client = client.get_file_system_client(
219
219
  file_system=source_file_system
@@ -316,7 +316,7 @@ def list_storage_account_files(
316
316
  ]
317
317
  )
318
318
 
319
- onelake = get_adls_client(storage_account)
319
+ onelake = _get_adls_client(storage_account)
320
320
  fs = onelake.get_file_system_client(container)
321
321
 
322
322
  for x in list(fs.get_paths()):
@@ -6,6 +6,7 @@ from sempy_labs._helper_functions import (
6
6
  resolve_workspace_name_and_id,
7
7
  lro,
8
8
  pagination,
9
+ _decode_b64,
9
10
  )
10
11
  from sempy.fabric.exceptions import FabricHTTPException
11
12
 
@@ -14,6 +15,8 @@ def list_data_pipelines(workspace: Optional[str] = None) -> pd.DataFrame:
14
15
  """
15
16
  Shows the data pipelines within a workspace.
16
17
 
18
+ This is a wrapper function for the following API: `Items - List Data Pipelines <https://learn.microsoft.com/rest/api/fabric/datapipeline/items/list-data-pipelines`_.
19
+
17
20
  Parameters
18
21
  ----------
19
22
  workspace : str, default=None
@@ -56,6 +59,8 @@ def create_data_pipeline(
56
59
  """
57
60
  Creates a Fabric data pipeline.
58
61
 
62
+ This is a wrapper function for the following API: `Items - Create Data Pipeline <https://learn.microsoft.com/rest/api/fabric/datapipeline/items/create-data-pipeline`_.
63
+
59
64
  Parameters
60
65
  ----------
61
66
  name: str
@@ -91,6 +96,8 @@ def delete_data_pipeline(name: str, workspace: Optional[str] = None):
91
96
  """
92
97
  Deletes a Fabric data pipeline.
93
98
 
99
+ This is a wrapper function for the following API: `Items - Delete Data Pipeline <https://learn.microsoft.com/rest/api/fabric/datapipeline/items/delete-data-pipeline`_.
100
+
94
101
  Parameters
95
102
  ----------
96
103
  name: str
@@ -116,3 +123,50 @@ def delete_data_pipeline(name: str, workspace: Optional[str] = None):
116
123
  print(
117
124
  f"{icons.green_dot} The '{name}' data pipeline within the '{workspace}' workspace has been deleted."
118
125
  )
126
+
127
+
128
+ def get_data_pipeline_definition(
129
+ name: str, workspace: Optional[str] = None, decode: bool = True
130
+ ) -> dict | pd.DataFrame:
131
+ """
132
+ Obtains the definition of a data pipeline.
133
+
134
+ Parameters
135
+ ----------
136
+ name : str
137
+ The name of the data pipeline.
138
+ workspace : str, default=None
139
+ The Fabric workspace name.
140
+ Defaults to None which resolves to the workspace of the attached lakehouse
141
+ or if no lakehouse attached, resolves to the workspace of the notebook.
142
+ decode : bool, default=True
143
+ decode : bool, default=True
144
+ If True, decodes the data pipeline definition file into .json format.
145
+ If False, obtains the data pipeline definition file a pandas DataFrame format.
146
+
147
+ Returns
148
+ -------
149
+ dict | pandas.DataFrame
150
+ A pandas dataframe showing the data pipelines within a workspace.
151
+ """
152
+
153
+ workspace = fabric.resolve_workspace_name(workspace)
154
+ workspace_id = fabric.resolve_workspace_id(workspace)
155
+ item_id = fabric.resolve_item_id(
156
+ item_name=name, type="DataPipeline", workspace=workspace
157
+ )
158
+
159
+ client = fabric.FabricRestClient()
160
+ response = client.post(
161
+ f"/v1/workspaces/{workspace_id}/dataPipelines/{item_id}/getDefinition"
162
+ )
163
+ result = lro(client, response).json()
164
+
165
+ df = pd.json_normalize(result["definition"]["parts"])
166
+
167
+ if not decode:
168
+ return df
169
+ content = df[df["path"] == "pipeline-content.json"]
170
+ payload = content["payload"].iloc[0]
171
+
172
+ return _decode_b64(payload)
sempy_labs/_dataflows.py CHANGED
@@ -59,6 +59,8 @@ def assign_workspace_to_dataflow_storage(
59
59
  """
60
60
  Assigns a dataflow storage account to a workspace.
61
61
 
62
+ This is a wrapper function for the following API: `Dataflow Storage Accounts - Groups AssignToDataflowStorage <https://learn.microsoft.com/rest/api/power-bi/dataflow-storage-accounts/groups-assign-to-dataflow-storage`_.
63
+
62
64
  Parameters
63
65
  ----------
64
66
  dataflow_storage_account : str
@@ -99,6 +101,8 @@ def list_dataflow_storage_accounts() -> pd.DataFrame:
99
101
  """
100
102
  Shows the accessible dataflow storage accounts.
101
103
 
104
+ This is a wrapper function for the following API: `Dataflow Storage Accounts - Get Dataflow Storage Accounts <https://learn.microsoft.com/rest/api/power-bi/dataflow-storage-accounts/get-dataflow-storage-accounts`_.
105
+
102
106
  Returns
103
107
  -------
104
108
  pandas.DataFrame
@@ -11,6 +11,8 @@ def list_deployment_pipelines() -> pd.DataFrame:
11
11
  """
12
12
  Shows a list of deployment pipelines the user can access.
13
13
 
14
+ This is a wrapper function for the following API: `Deployment Pipelines - List Deployment Pipelines <https://learn.microsoft.com/rest/api/fabric/core/deployment-pipelines/list-deployment-pipelines`_.
15
+
14
16
  Returns
15
17
  -------
16
18
  pandas.DataFrame
@@ -45,6 +47,8 @@ def list_deployment_pipeline_stages(deployment_pipeline: str) -> pd.DataFrame:
45
47
  """
46
48
  Shows the specified deployment pipeline stages.
47
49
 
50
+ This is a wrapper function for the following API: `Deployment Pipelines - List Deployment Pipeline Stages <https://learn.microsoft.com/rest/api/fabric/core/deployment-pipelines/list-deployment-pipeline-stages`_.
51
+
48
52
  Parameters
49
53
  ----------
50
54
  deployment_pipeline : str
@@ -84,13 +88,13 @@ def list_deployment_pipeline_stages(deployment_pipeline: str) -> pd.DataFrame:
84
88
  for r in responses:
85
89
  for v in r.get("value", []):
86
90
  new_data = {
87
- "Deployment Pipeline Stage Id": v["id"],
88
- "Deployment Pipeline Stage Name": v["displayName"],
89
- "Description": v["description"],
90
- "Order": v["order"],
91
- "Workspace Id": v["workspaceId"],
92
- "Workspace Name": v["workspaceName"],
93
- "Public": v["isPublic"],
91
+ "Deployment Pipeline Stage Id": v.get("id"),
92
+ "Deployment Pipeline Stage Name": v.get("displayName"),
93
+ "Description": v.get("description"),
94
+ "Order": v.get("order"),
95
+ "Workspace Id": v.get("workspaceId"),
96
+ "Workspace Name": v.get("workspaceName"),
97
+ "Public": v.get("isPublic"),
94
98
  }
95
99
  df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
96
100
 
@@ -106,6 +110,8 @@ def list_deployment_pipeline_stage_items(
106
110
  """
107
111
  Shows the supported items from the workspace assigned to the specified stage of the specified deployment pipeline.
108
112
 
113
+ This is a wrapper function for the following API: `Deployment Pipelines - List Deployment Pipeline Stage Items <https://learn.microsoft.com/rest/api/fabric/core/deployment-pipelines/list-deployment-pipeline-stage-items`_.
114
+
109
115
  Parameters
110
116
  ----------
111
117
  deployment_pipeline : str
@@ -16,6 +16,8 @@ def create_environment(
16
16
  """
17
17
  Creates a Fabric environment.
18
18
 
19
+ This is a wrapper function for the following API: `Items - Create Environment <https://learn.microsoft.com/rest/api/fabric/environment/items/create-environment`_.
20
+
19
21
  Parameters
20
22
  ----------
21
23
  environment: str
@@ -51,6 +53,8 @@ def list_environments(workspace: Optional[str] = None) -> pd.DataFrame:
51
53
  """
52
54
  Shows the environments within a workspace.
53
55
 
56
+ This is a wrapper function for the following API: `Items - List Environments <https://learn.microsoft.com/rest/api/fabric/environment/items/list-environments`_.
57
+
54
58
  Parameters
55
59
  ----------
56
60
  workspace : str, default=None
@@ -91,6 +95,8 @@ def delete_environment(environment: str, workspace: Optional[str] = None):
91
95
  """
92
96
  Deletes a Fabric environment.
93
97
 
98
+ This is a wrapper function for the following API: `Items - Delete Environment <https://learn.microsoft.com/rest/api/fabric/environment/items/delete-environment`_.
99
+
94
100
  Parameters
95
101
  ----------
96
102
  environment: str
@@ -16,6 +16,8 @@ def create_eventhouse(
16
16
  """
17
17
  Creates a Fabric eventhouse.
18
18
 
19
+ This is a wrapper function for the following API: `Items - Create Eventhouse <https://learn.microsoft.com/rest/api/fabric/environment/items/create-eventhouse`_.
20
+
19
21
  Parameters
20
22
  ----------
21
23
  name: str
@@ -51,6 +53,8 @@ def list_eventhouses(workspace: Optional[str] = None) -> pd.DataFrame:
51
53
  """
52
54
  Shows the eventhouses within a workspace.
53
55
 
56
+ This is a wrapper function for the following API: `Items - List Eventhouses <https://learn.microsoft.com/rest/api/fabric/environment/items/list-eventhouses`_.
57
+
54
58
  Parameters
55
59
  ----------
56
60
  workspace : str, default=None
@@ -91,6 +95,8 @@ def delete_eventhouse(name: str, workspace: Optional[str] = None):
91
95
  """
92
96
  Deletes a Fabric eventhouse.
93
97
 
98
+ This is a wrapper function for the following API: `Items - Delete Eventhouse <https://learn.microsoft.com/rest/api/fabric/environment/items/delete-eventhouse`_.
99
+
94
100
  Parameters
95
101
  ----------
96
102
  name: str
@@ -14,6 +14,8 @@ def list_eventstreams(workspace: Optional[str] = None) -> pd.DataFrame:
14
14
  """
15
15
  Shows the eventstreams within a workspace.
16
16
 
17
+ This is a wrapper function for the following API: `Items - List Eventstreams <https://learn.microsoft.com/rest/api/fabric/environment/items/list-eventstreams`_.
18
+
17
19
  Parameters
18
20
  ----------
19
21
  workspace : str, default=None
@@ -56,6 +58,8 @@ def create_eventstream(
56
58
  """
57
59
  Creates a Fabric eventstream.
58
60
 
61
+ This is a wrapper function for the following API: `Items - Create Eventstream <https://learn.microsoft.com/rest/api/fabric/environment/items/create-eventstream`_.
62
+
59
63
  Parameters
60
64
  ----------
61
65
  name: str
@@ -91,6 +95,8 @@ def delete_eventstream(name: str, workspace: Optional[str] = None):
91
95
  """
92
96
  Deletes a Fabric eventstream.
93
97
 
98
+ This is a wrapper function for the following API: `Items - Delete Eventstream <https://learn.microsoft.com/rest/api/fabric/environment/items/delete-eventstream`_.
99
+
94
100
  Parameters
95
101
  ----------
96
102
  name: str
@@ -0,0 +1,190 @@
1
+ import sempy.fabric as fabric
2
+ from uuid import UUID
3
+ import pandas as pd
4
+ from typing import Optional, List
5
+ import sempy_labs._icons as icons
6
+ from sempy_labs._helper_functions import (
7
+ resolve_workspace_name_and_id,
8
+ pagination,
9
+ )
10
+ from sempy.fabric.exceptions import FabricHTTPException
11
+
12
+
13
+ def create_external_data_share(
14
+ item_name: str,
15
+ item_type: str,
16
+ paths: str | List[str],
17
+ recipient: str,
18
+ workspace: Optional[str] = None,
19
+ ):
20
+ """
21
+ Creates an external data share for a given path or list of paths in the specified item.
22
+
23
+ This is a wrapper function for the following API: `External Data Shares - Create External Data Share <https://learn.microsoft.com/rest/api/fabric/core/external-data-shares/create-external-data-share`_.
24
+
25
+ Parameters
26
+ ----------
27
+ item_name : str
28
+ The item name.
29
+ item_type : str
30
+ The `item type <https://learn.microsoft.com/rest/api/fabric/core/items/list-items?tabs=HTTP#itemtype>`_.
31
+ paths : str | List[str]
32
+ The path or list of paths that are to be externally shared. Currently, only a single path is supported.
33
+ recipient : str
34
+ The email address of the recipient.
35
+ workspace : str, default=None
36
+ The Fabric workspace name.
37
+ Defaults to None which resolves to the workspace of the attached lakehouse
38
+ or if no lakehouse attached, resolves to the workspace of the notebook.
39
+ """
40
+
41
+ # https://learn.microsoft.com/en-us/rest/api/fabric/core/external-data-shares/create-external-data-share?tabs=HTTP
42
+
43
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
44
+ item_id = fabric.resolve_item_id(
45
+ item_name=item_name, type=item_type, workspace=workspace
46
+ )
47
+
48
+ if isinstance(paths, str):
49
+ paths = [paths]
50
+
51
+ payload = {"paths": paths, "recipient": {"userPrincipalName": recipient}}
52
+
53
+ client = fabric.FabricRestClient()
54
+ response = client.post(
55
+ f"/v1/workspaces/{workspace_id}/items/{item_id}/externalDataShares",
56
+ json=payload,
57
+ )
58
+
59
+ if response.status_code != 201:
60
+ raise FabricHTTPException(response)
61
+
62
+ print(
63
+ f"{icons.green_dot} An external data share was created for the '{item_name}' {item_type} within the '{workspace}' workspace for the {paths} paths."
64
+ )
65
+
66
+
67
+ def revoke_external_data_share(
68
+ external_data_share_id: UUID,
69
+ item_name: str,
70
+ item_type: str,
71
+ workspace: Optional[str] = None,
72
+ ):
73
+ """
74
+ Revokes the specified external data share. Note: This action cannot be undone.
75
+
76
+ This is a wrapper function for the following API: `External Data Shares - Revoke External Data Share <https://learn.microsoft.com/rest/api/fabric/core/external-data-shares/revoke-external-data-share`_.
77
+
78
+ Parameters
79
+ ----------
80
+ external_data_share_id : UUID
81
+ The external data share ID.
82
+ item_name : str
83
+ The item name.
84
+ item_type : str
85
+ The `item type <https://learn.microsoft.com/rest/api/fabric/core/items/list-items?tabs=HTTP#itemtype>`_.
86
+ workspace : str, default=None
87
+ The Fabric workspace name.
88
+ Defaults to None which resolves to the workspace of the attached lakehouse
89
+ or if no lakehouse attached, resolves to the workspace of the notebook.
90
+ """
91
+
92
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
93
+ item_id = fabric.resolve_item_id(
94
+ item_name=item_name, type=item_type, workspace=workspace
95
+ )
96
+
97
+ client = fabric.FabricRestClient()
98
+ response = client.post(
99
+ f"/v1/workspaces/{workspace_id}/items/{item_id}/externalDataShares/{external_data_share_id}/revoke"
100
+ )
101
+
102
+ if response.status_code != 200:
103
+ raise FabricHTTPException(response)
104
+
105
+ print(
106
+ f"{icons.green_dot} The '{external_data_share_id}' external data share for the '{item_name}' {item_type} within the '{workspace}' workspace has been revoked."
107
+ )
108
+
109
+
110
+ def list_external_data_shares_in_item(
111
+ item_name: str, item_type: str, workspace: Optional[str] = None
112
+ ) -> pd.DataFrame:
113
+ """
114
+ Returns a list of the external data shares that exist for the specified item.
115
+
116
+ This is a wrapper function for the following API: `External Data Shares - List External Data Shares In Item <https://learn.microsoft.com/rest/api/fabric/core/external-data-shares/list-external-data-shares-in-item`_.
117
+
118
+ Parameters
119
+ ----------
120
+ item_name : str
121
+ The item name.
122
+ item_type : str
123
+ The `item type <https://learn.microsoft.com/rest/api/fabric/core/items/list-items?tabs=HTTP#itemtype>`_.
124
+ workspace : str, default=None
125
+ The Fabric workspace name.
126
+ Defaults to None which resolves to the workspace of the attached lakehouse
127
+ or if no lakehouse attached, resolves to the workspace of the notebook.
128
+
129
+ Returns
130
+ -------
131
+ pandas.DataFrame
132
+ A pandas dataframe showing a list of the external data shares that exist for the specified item.
133
+ """
134
+
135
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
136
+ item_id = fabric.resolve_item_id(
137
+ item_name=item_name, type=item_type, workspace=workspace
138
+ )
139
+
140
+ client = fabric.FabricRestClient()
141
+ response = client.get(
142
+ f"/v1/workspaces/{workspace_id}/items/{item_id}/externalDataShares"
143
+ )
144
+
145
+ if response.status_code != 200:
146
+ raise FabricHTTPException(response)
147
+
148
+ df = pd.DataFrame(
149
+ columns=[
150
+ "External Data Share Id",
151
+ "Paths",
152
+ "Creator Principal Id",
153
+ "Creater Principal Type",
154
+ "Recipient User Principal Name",
155
+ "Status",
156
+ "Expiration Time UTC",
157
+ "Workspace Id",
158
+ "Item Id",
159
+ "Item Name",
160
+ "Item Type",
161
+ "Invitation URL",
162
+ ]
163
+ )
164
+
165
+ responses = pagination(client, response)
166
+ dfs = []
167
+
168
+ for r in responses:
169
+ for i in r.get("value", []):
170
+ item_id = i.get("itemId")
171
+ new_data = {
172
+ "External Data Share Id": i.get("id"),
173
+ "Paths": [i.get("paths")],
174
+ "Creator Principal Id": i.get("creatorPrincipal", {}).get("id"),
175
+ "Creator Principal Type": i.get("creatorPrincipal", {}).get("type"),
176
+ "Recipient User Principal Name": i.get("recipient", {}).get(
177
+ "userPrincipalName"
178
+ ),
179
+ "Status": i.get("status"),
180
+ "Expiration Time UTC": i.get("expriationTimeUtc"),
181
+ "Workspace Id": i.get("workspaceId"),
182
+ "Item Id": item_id,
183
+ "Item Name": item_name,
184
+ "Item Type": item_type,
185
+ "Invitation URL": i.get("invitationUrl"),
186
+ }
187
+ dfs.append(pd.DataFrame(new_data, index=[0]))
188
+ df = pd.concat(dfs, ignore_index=True)
189
+
190
+ return df
@@ -64,11 +64,28 @@ def create_blank_semantic_model(
64
64
  "name": '{dataset}',
65
65
  "compatibilityLevel": {compatibility_level},
66
66
  "model": {{
67
- "culture": "en-US",
68
- "defaultPowerBIDataSourceVersion": "powerBI_V3"
67
+ "cultures": [
68
+ {{
69
+ "name": "en-US",
70
+ "linguisticMetadata": {{
71
+ "content": {{
72
+ "Version": "1.0.0",
73
+ "Language": "en-US"
74
+ }},
75
+ "contentType": "json"
76
+ }}
77
+ }}
78
+ ],
79
+ "collation": "Latin1_General_100_BIN2_UTF8",
80
+ "dataAccessOptions": {{
81
+ "legacyRedirects": true,
82
+ "returnErrorValuesAsNull": true,
83
+ }},
84
+ "defaultPowerBIDataSourceVersion": "powerBI_V3",
85
+ "sourceQueryCulture": "en-US",
86
+ }}
69
87
  }}
70
88
  }}
71
- }}
72
89
  }}
73
90
  """
74
91
 
@@ -85,6 +102,8 @@ def create_semantic_model_from_bim(
85
102
  """
86
103
  Creates a new semantic model based on a Model.bim file.
87
104
 
105
+ This is a wrapper function for the following API: `Items - Create Semantic Model <https://learn.microsoft.com/rest/api/fabric/semanticmodel/items/create-semantic-model`_.
106
+
88
107
  Parameters
89
108
  ----------
90
109
  dataset : str
@@ -149,6 +168,8 @@ def update_semantic_model_from_bim(
149
168
  """
150
169
  Updates a semantic model definition based on a Model.bim file.
151
170
 
171
+ This is a wrapper function for the following API: `Items - Update Semantic Model Definition <https://learn.microsoft.com/rest/api/fabric/semanticmodel/items/update-semantic-model-definition`_.
172
+
152
173
  Parameters
153
174
  ----------
154
175
  dataset : str
@@ -267,7 +288,6 @@ def deploy_semantic_model(
267
288
  dataset=target_dataset,
268
289
  bim_file=bim,
269
290
  workspace=target_workspace,
270
- overwrite=overwrite,
271
291
  )
272
292
  # Update the semantic model if the model exists
273
293
  else:
@@ -288,6 +308,8 @@ def get_semantic_model_bim(
288
308
  """
289
309
  Extracts the Model.bim file for a given semantic model.
290
310
 
311
+ This is a wrapper function for the following API: `Items - Get Semantic Model Definition <https://learn.microsoft.com/rest/api/fabric/semanticmodel/items/get-semantic-model-definition`_.
312
+
291
313
  Parameters
292
314
  ----------
293
315
  dataset : str
sempy_labs/_git.py CHANGED
@@ -21,6 +21,8 @@ def connect_workspace_to_git(
21
21
  """
22
22
  Connects a workspace to a git repository.
23
23
 
24
+ This is a wrapper function for the following API: `Git - Connect <https://learn.microsoft.com/rest/api/fabric/core/git/connect`_.
25
+
24
26
  Parameters
25
27
  ----------
26
28
  organization_name : str
@@ -41,8 +43,6 @@ def connect_workspace_to_git(
41
43
  or if no lakehouse attached, resolves to the workspace of the notebook.
42
44
  """
43
45
 
44
- # https://learn.microsoft.com/en-us/rest/api/fabric/core/git/connect?tabs=HTTP
45
-
46
46
  workspace, workspace_id = resolve_workspace_name_and_id(workspace)
47
47
 
48
48
  request_body = {
@@ -72,6 +72,8 @@ def disconnect_workspace_from_git(workspace: Optional[str] = None):
72
72
  """
73
73
  Disconnects a workpsace from a git repository.
74
74
 
75
+ This is a wrapper function for the following API: `Git - Disconnect <https://learn.microsoft.com/rest/api/fabric/core/git/disconnect`_.
76
+
75
77
  Parameters
76
78
  ----------
77
79
  workspace : str, default=None
@@ -80,8 +82,6 @@ def disconnect_workspace_from_git(workspace: Optional[str] = None):
80
82
  or if no lakehouse attached, resolves to the workspace of the notebook.
81
83
  """
82
84
 
83
- # https://learn.microsoft.com/en-us/rest/api/fabric/core/git/disconnect?tabs=HTTP
84
-
85
85
  workspace, workspace_id = resolve_workspace_name_and_id(workspace)
86
86
 
87
87
  client = fabric.FabricRestClient()
@@ -98,6 +98,8 @@ def get_git_status(workspace: Optional[str] = None) -> pd.DataFrame:
98
98
  """
99
99
  Obtains the Git status of items in the workspace, that can be committed to Git.
100
100
 
101
+ This is a wrapper function for the following API: `Git - Get Status <https://learn.microsoft.com/rest/api/fabric/core/git/get-status`_.
102
+
101
103
  Parameters
102
104
  ----------
103
105
  workspace : str, default=None
@@ -111,8 +113,6 @@ def get_git_status(workspace: Optional[str] = None) -> pd.DataFrame:
111
113
  A pandas dataframe showing the Git status of items in the workspace.
112
114
  """
113
115
 
114
- # https://learn.microsoft.com/en-us/rest/api/fabric/core/git/get-status?tabs=HTTP
115
-
116
116
  workspace, workspace_id = resolve_workspace_name_and_id(workspace)
117
117
 
118
118
  df = pd.DataFrame(
@@ -161,6 +161,8 @@ def get_git_connection(workspace: Optional[str] = None) -> pd.DataFrame:
161
161
  """
162
162
  Obtains the Git status of items in the workspace, that can be committed to Git.
163
163
 
164
+ This is a wrapper function for the following API: `Git - Get Connection <https://learn.microsoft.com/rest/api/fabric/core/git/get-connection`_.
165
+
164
166
  Parameters
165
167
  ----------
166
168
  workspace : str, default=None
@@ -174,8 +176,6 @@ def get_git_connection(workspace: Optional[str] = None) -> pd.DataFrame:
174
176
  A pandas dataframe showing the Git status of items in the workspace.
175
177
  """
176
178
 
177
- # https://learn.microsoft.com/en-us/rest/api/fabric/core/git/get-status?tabs=HTTP
178
-
179
179
  workspace, workspace_id = resolve_workspace_name_and_id(workspace)
180
180
 
181
181
  df = pd.DataFrame(
@@ -221,6 +221,8 @@ def initialize_git_connection(workspace: Optional[str] = None):
221
221
  """
222
222
  Initializes a connection for a workspace that is connected to Git.
223
223
 
224
+ This is a wrapper function for the following API: `Git - Initialize Connection <https://learn.microsoft.com/rest/api/fabric/core/git/initialize-connection`_.
225
+
224
226
  Parameters
225
227
  ----------
226
228
  workspace : str, default=None
@@ -229,8 +231,6 @@ def initialize_git_connection(workspace: Optional[str] = None):
229
231
  or if no lakehouse attached, resolves to the workspace of the notebook.
230
232
  """
231
233
 
232
- # https://learn.microsoft.com/en-us/rest/api/fabric/core/git/initialize-connection?tabs=HTTP
233
-
234
234
  workspace, workspace_id = resolve_workspace_name_and_id(workspace)
235
235
 
236
236
  client = fabric.FabricRestClient()
@@ -252,6 +252,8 @@ def commit_to_git(
252
252
  """
253
253
  Commits all or a selection of items within a workspace to Git.
254
254
 
255
+ This is a wrapper function for the following API: `Git - Commit to Git <https://learn.microsoft.com/rest/api/fabric/core/git/commit-to-git`_.
256
+
255
257
  Parameters
256
258
  ----------
257
259
  comment : str
@@ -265,8 +267,6 @@ def commit_to_git(
265
267
  or if no lakehouse attached, resolves to the workspace of the notebook.
266
268
  """
267
269
 
268
- # https://learn.microsoft.com/en-us/rest/api/fabric/core/git/commit-to-git?tabs=HTTP
269
-
270
270
  workspace, workspace_id = resolve_workspace_name_and_id(workspace)
271
271
 
272
272
  gs = get_git_status(workspace=workspace)
@@ -320,6 +320,8 @@ def update_from_git(
320
320
  """
321
321
  Updates the workspace with commits pushed to the connected branch.
322
322
 
323
+ This is a wrapper function for the following API: `Git - Update From Git <https://learn.microsoft.com/rest/api/fabric/core/git/update-from-git`_.
324
+
323
325
  Parameters
324
326
  ----------
325
327
  workspace_head : str
@@ -336,8 +338,6 @@ def update_from_git(
336
338
  or if no lakehouse attached, resolves to the workspace of the notebook.
337
339
  """
338
340
 
339
- # https://learn.microsoft.com/en-us/rest/api/fabric/core/git/update-from-git?tabs=HTTP
340
-
341
341
  workspace, workspace_id = resolve_workspace_name_and_id(workspace)
342
342
 
343
343
  conflict_resolution_policies = ["PreferWorkspace", "PreferRemote"]
@@ -372,7 +372,7 @@ def update_from_git(
372
372
  if response.status_code not in [200, 202]:
373
373
  raise FabricHTTPException(response)
374
374
 
375
- lro(client, response)
375
+ lro(client, response, return_status_code=True)
376
376
 
377
377
  print(
378
378
  f"{icons.green_dot} The '{workspace}' workspace has been updated with commits pushed to the connected branch."