semantic-link-labs 0.11.1__py3-none-any.whl → 0.11.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.11.1.dist-info → semantic_link_labs-0.11.3.dist-info}/METADATA +7 -6
- semantic_link_labs-0.11.3.dist-info/RECORD +212 -0
- sempy_labs/__init__.py +65 -71
- sempy_labs/_a_lib_info.py +1 -1
- sempy_labs/_ai.py +1 -1
- sempy_labs/_capacities.py +2 -2
- sempy_labs/_capacity_migration.py +5 -5
- sempy_labs/_clear_cache.py +1 -1
- sempy_labs/_connections.py +2 -2
- sempy_labs/_dashboards.py +16 -16
- sempy_labs/_data_pipelines.py +1 -1
- sempy_labs/_dataflows.py +101 -26
- sempy_labs/_dax.py +3 -3
- sempy_labs/_delta_analyzer.py +4 -4
- sempy_labs/_delta_analyzer_history.py +1 -1
- sempy_labs/_deployment_pipelines.py +1 -1
- sempy_labs/_environments.py +22 -21
- sempy_labs/_eventhouses.py +12 -11
- sempy_labs/_eventstreams.py +12 -11
- sempy_labs/_external_data_shares.py +78 -23
- sempy_labs/_gateways.py +47 -45
- sempy_labs/_generate_semantic_model.py +3 -3
- sempy_labs/_git.py +1 -1
- sempy_labs/_graphQL.py +12 -11
- sempy_labs/_helper_functions.py +169 -5
- sempy_labs/_job_scheduler.py +56 -54
- sempy_labs/_kql_databases.py +16 -17
- sempy_labs/_kql_querysets.py +12 -11
- sempy_labs/_kusto.py +2 -2
- sempy_labs/_labels.py +126 -0
- sempy_labs/_list_functions.py +2 -2
- sempy_labs/_managed_private_endpoints.py +18 -15
- sempy_labs/_mirrored_databases.py +16 -15
- sempy_labs/_mirrored_warehouses.py +12 -11
- sempy_labs/_ml_experiments.py +11 -10
- sempy_labs/_model_auto_build.py +3 -3
- sempy_labs/_model_bpa.py +5 -5
- sempy_labs/_model_bpa_bulk.py +3 -3
- sempy_labs/_model_dependencies.py +1 -1
- sempy_labs/_mounted_data_factories.py +12 -12
- sempy_labs/_notebooks.py +151 -2
- sempy_labs/_one_lake_integration.py +1 -1
- sempy_labs/_query_scale_out.py +1 -1
- sempy_labs/_refresh_semantic_model.py +1 -1
- sempy_labs/_semantic_models.py +30 -28
- sempy_labs/_spark.py +1 -1
- sempy_labs/_sql.py +1 -1
- sempy_labs/_sql_endpoints.py +12 -11
- sempy_labs/_sqldatabase.py +15 -15
- sempy_labs/_tags.py +11 -10
- sempy_labs/_translations.py +1 -1
- sempy_labs/_user_delegation_key.py +2 -2
- sempy_labs/_vertipaq.py +3 -3
- sempy_labs/_vpax.py +1 -1
- sempy_labs/_warehouses.py +15 -14
- sempy_labs/_workloads.py +1 -1
- sempy_labs/_workspace_identity.py +1 -1
- sempy_labs/_workspaces.py +14 -13
- sempy_labs/admin/__init__.py +18 -18
- sempy_labs/admin/_activities.py +46 -46
- sempy_labs/admin/_apps.py +28 -26
- sempy_labs/admin/_artifacts.py +15 -15
- sempy_labs/admin/_basic_functions.py +1 -2
- sempy_labs/admin/_capacities.py +84 -82
- sempy_labs/admin/_dataflows.py +2 -2
- sempy_labs/admin/_datasets.py +50 -48
- sempy_labs/admin/_domains.py +25 -19
- sempy_labs/admin/_external_data_share.py +24 -22
- sempy_labs/admin/_git.py +17 -17
- sempy_labs/admin/_items.py +47 -45
- sempy_labs/admin/_reports.py +61 -58
- sempy_labs/admin/_scanner.py +2 -2
- sempy_labs/admin/_shared.py +18 -18
- sempy_labs/admin/_tags.py +2 -2
- sempy_labs/admin/_tenant.py +57 -51
- sempy_labs/admin/_users.py +16 -15
- sempy_labs/admin/_workspaces.py +2 -2
- sempy_labs/directlake/__init__.py +12 -12
- sempy_labs/directlake/_directlake_schema_compare.py +3 -3
- sempy_labs/directlake/_directlake_schema_sync.py +9 -7
- sempy_labs/directlake/_dl_helper.py +5 -2
- sempy_labs/directlake/_generate_shared_expression.py +1 -1
- sempy_labs/directlake/_get_directlake_lakehouse.py +1 -1
- sempy_labs/directlake/_guardrails.py +1 -1
- sempy_labs/directlake/_list_directlake_model_calc_tables.py +3 -3
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +1 -1
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +3 -3
- sempy_labs/directlake/_update_directlake_partition_entity.py +4 -4
- sempy_labs/directlake/_warm_cache.py +3 -3
- sempy_labs/graph/__init__.py +3 -3
- sempy_labs/graph/_groups.py +81 -78
- sempy_labs/graph/_teams.py +21 -21
- sempy_labs/graph/_users.py +109 -10
- sempy_labs/lakehouse/__init__.py +7 -7
- sempy_labs/lakehouse/_blobs.py +30 -30
- sempy_labs/lakehouse/_get_lakehouse_columns.py +2 -2
- sempy_labs/lakehouse/_get_lakehouse_tables.py +29 -27
- sempy_labs/lakehouse/_helper.py +38 -1
- sempy_labs/lakehouse/_lakehouse.py +16 -7
- sempy_labs/lakehouse/_livy_sessions.py +47 -42
- sempy_labs/lakehouse/_shortcuts.py +22 -21
- sempy_labs/migration/__init__.py +8 -8
- sempy_labs/migration/_create_pqt_file.py +2 -2
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +35 -44
- sempy_labs/migration/_migrate_calctables_to_semantic_model.py +9 -20
- sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +5 -9
- sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +11 -20
- sempy_labs/migration/_migration_validation.py +1 -2
- sempy_labs/migration/_refresh_calc_tables.py +2 -2
- sempy_labs/mirrored_azure_databricks_catalog/__init__.py +2 -2
- sempy_labs/mirrored_azure_databricks_catalog/_discover.py +40 -40
- sempy_labs/mirrored_azure_databricks_catalog/_refresh_catalog_metadata.py +1 -1
- sempy_labs/ml_model/__init__.py +23 -0
- sempy_labs/ml_model/_functions.py +427 -0
- sempy_labs/report/__init__.py +10 -10
- sempy_labs/report/_download_report.py +2 -2
- sempy_labs/report/_export_report.py +2 -2
- sempy_labs/report/_generate_report.py +1 -1
- sempy_labs/report/_paginated.py +1 -1
- sempy_labs/report/_report_bpa.py +4 -3
- sempy_labs/report/_report_functions.py +3 -3
- sempy_labs/report/_report_list_functions.py +3 -3
- sempy_labs/report/_report_rebind.py +1 -1
- sempy_labs/report/_reportwrapper.py +248 -250
- sempy_labs/report/_save_report.py +3 -3
- sempy_labs/theme/_org_themes.py +19 -6
- sempy_labs/tom/__init__.py +1 -1
- sempy_labs/tom/_model.py +13 -8
- sempy_labs/variable_library/__init__.py +19 -0
- sempy_labs/variable_library/_functions.py +403 -0
- semantic_link_labs-0.11.1.dist-info/RECORD +0 -210
- sempy_labs/_dax_query_view.py +0 -57
- sempy_labs/_ml_models.py +0 -110
- sempy_labs/_variable_libraries.py +0 -91
- {semantic_link_labs-0.11.1.dist-info → semantic_link_labs-0.11.3.dist-info}/WHEEL +0 -0
- {semantic_link_labs-0.11.1.dist-info → semantic_link_labs-0.11.3.dist-info}/licenses/LICENSE +0 -0
- {semantic_link_labs-0.11.1.dist-info → semantic_link_labs-0.11.3.dist-info}/top_level.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import pandas as pd
|
|
2
2
|
from typing import Optional
|
|
3
|
-
from
|
|
3
|
+
from ._helper_functions import (
|
|
4
4
|
resolve_workspace_id,
|
|
5
5
|
_base_api,
|
|
6
6
|
_create_dataframe,
|
|
@@ -49,19 +49,19 @@ def list_mounted_data_factories(
|
|
|
49
49
|
uses_pagination=True,
|
|
50
50
|
)
|
|
51
51
|
|
|
52
|
-
|
|
52
|
+
rows = []
|
|
53
53
|
for r in responses:
|
|
54
54
|
for v in r.get("value", []):
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
if
|
|
64
|
-
df = pd.
|
|
55
|
+
rows.append(
|
|
56
|
+
{
|
|
57
|
+
"Mounted Data Factory Name": v.get("displayName"),
|
|
58
|
+
"Mounted Data Factory Id": v.get("id"),
|
|
59
|
+
"Description": v.get("description"),
|
|
60
|
+
}
|
|
61
|
+
)
|
|
62
|
+
|
|
63
|
+
if rows:
|
|
64
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
65
65
|
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
66
66
|
|
|
67
67
|
return df
|
sempy_labs/_notebooks.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import sempy.fabric as fabric
|
|
2
2
|
import pandas as pd
|
|
3
3
|
import sempy_labs._icons as icons
|
|
4
|
-
from typing import Optional
|
|
4
|
+
from typing import Optional, List
|
|
5
5
|
import base64
|
|
6
6
|
import requests
|
|
7
7
|
from sempy._utils._log import log
|
|
@@ -12,9 +12,10 @@ from sempy_labs._helper_functions import (
|
|
|
12
12
|
_base_api,
|
|
13
13
|
resolve_item_id,
|
|
14
14
|
create_item,
|
|
15
|
+
_create_dataframe,
|
|
15
16
|
)
|
|
16
17
|
from sempy.fabric.exceptions import FabricHTTPException
|
|
17
|
-
import
|
|
18
|
+
from os import PathLike
|
|
18
19
|
from uuid import UUID
|
|
19
20
|
|
|
20
21
|
_notebook_prefix = "notebook-content."
|
|
@@ -114,6 +115,7 @@ def import_notebook_from_web(
|
|
|
114
115
|
description: Optional[str] = None,
|
|
115
116
|
workspace: Optional[str | UUID] = None,
|
|
116
117
|
overwrite: bool = False,
|
|
118
|
+
folder: Optional[str | PathLike] = None,
|
|
117
119
|
):
|
|
118
120
|
"""
|
|
119
121
|
Creates a new notebook within a workspace based on a Jupyter notebook hosted in the web.
|
|
@@ -136,6 +138,9 @@ def import_notebook_from_web(
|
|
|
136
138
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
137
139
|
overwrite : bool, default=False
|
|
138
140
|
If set to True, overwrites the existing notebook in the workspace if it exists.
|
|
141
|
+
folder : str | os.PathLike, default=None
|
|
142
|
+
The folder within the workspace where the notebook will be created.
|
|
143
|
+
Defaults to None which places the notebook in the root of the workspace.
|
|
139
144
|
"""
|
|
140
145
|
|
|
141
146
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
@@ -161,6 +166,7 @@ def import_notebook_from_web(
|
|
|
161
166
|
workspace=workspace_id,
|
|
162
167
|
description=description,
|
|
163
168
|
format="ipynb",
|
|
169
|
+
folder=folder,
|
|
164
170
|
)
|
|
165
171
|
elif len(dfI_filt) > 0 and overwrite:
|
|
166
172
|
print(f"{icons.info} Overwrite of notebooks is currently not supported.")
|
|
@@ -181,6 +187,7 @@ def create_notebook(
|
|
|
181
187
|
description: Optional[str] = None,
|
|
182
188
|
workspace: Optional[str | UUID] = None,
|
|
183
189
|
format: Optional[str] = None,
|
|
190
|
+
folder: Optional[str | PathLike] = None,
|
|
184
191
|
):
|
|
185
192
|
"""
|
|
186
193
|
Creates a new notebook with a definition within a workspace.
|
|
@@ -203,6 +210,9 @@ def create_notebook(
|
|
|
203
210
|
format : str, default=None
|
|
204
211
|
If 'ipynb' is provided than notebook_content should be standard ipynb format
|
|
205
212
|
otherwise notebook_content should be GIT friendly format
|
|
213
|
+
folder : str | os.PathLike, default=None
|
|
214
|
+
The folder within the workspace where the notebook will be created.
|
|
215
|
+
Defaults to None which places the notebook in the root of the workspace.
|
|
206
216
|
"""
|
|
207
217
|
|
|
208
218
|
notebook_payload = base64.b64encode(notebook_content).decode("utf-8")
|
|
@@ -226,6 +236,7 @@ def create_notebook(
|
|
|
226
236
|
workspace=workspace,
|
|
227
237
|
description=description,
|
|
228
238
|
definition=definition_payload,
|
|
239
|
+
folder=folder,
|
|
229
240
|
)
|
|
230
241
|
|
|
231
242
|
|
|
@@ -287,3 +298,141 @@ def update_notebook_definition(
|
|
|
287
298
|
print(
|
|
288
299
|
f"{icons.green_dot} The '{name}' notebook was updated within the '{workspace_name}' workspace."
|
|
289
300
|
)
|
|
301
|
+
|
|
302
|
+
|
|
303
|
+
@log
|
|
304
|
+
def list_notebooks(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
305
|
+
"""
|
|
306
|
+
Shows the notebooks within a workspace.
|
|
307
|
+
|
|
308
|
+
Parameters
|
|
309
|
+
----------
|
|
310
|
+
workspace : str | uuid.UUID, default=None
|
|
311
|
+
The Fabric workspace name or ID.
|
|
312
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
313
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
314
|
+
|
|
315
|
+
Returns
|
|
316
|
+
-------
|
|
317
|
+
pandas.DataFrame
|
|
318
|
+
A pandas dataframe showing the SQL endpoints within a workspace.
|
|
319
|
+
"""
|
|
320
|
+
|
|
321
|
+
columns = {
|
|
322
|
+
"Notebook Id": "string",
|
|
323
|
+
"Notebook Name": "string",
|
|
324
|
+
"Description": "string",
|
|
325
|
+
}
|
|
326
|
+
df = _create_dataframe(columns=columns)
|
|
327
|
+
|
|
328
|
+
workspace_id = resolve_workspace_id(workspace)
|
|
329
|
+
|
|
330
|
+
responses = _base_api(
|
|
331
|
+
request=f"/v1/workspaces/{workspace_id}/notebooks", uses_pagination=True
|
|
332
|
+
)
|
|
333
|
+
|
|
334
|
+
rows = []
|
|
335
|
+
for r in responses:
|
|
336
|
+
for v in r.get("value", []):
|
|
337
|
+
rows.append(
|
|
338
|
+
{
|
|
339
|
+
"Notebook Id": v.get("id"),
|
|
340
|
+
"Notebook Name": v.get("displayName"),
|
|
341
|
+
"Description": v.get("description"),
|
|
342
|
+
}
|
|
343
|
+
)
|
|
344
|
+
|
|
345
|
+
if rows:
|
|
346
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
347
|
+
|
|
348
|
+
return df
|
|
349
|
+
|
|
350
|
+
|
|
351
|
+
@log
|
|
352
|
+
def search_notebooks(
|
|
353
|
+
search_string: str,
|
|
354
|
+
notebook: Optional[str | UUID] = None,
|
|
355
|
+
workspace: Optional[str | UUID | List[str | UUID]] = None,
|
|
356
|
+
) -> pd.DataFrame:
|
|
357
|
+
"""
|
|
358
|
+
Searches notebooks within a workspace or across multiple workspaces for a given search string.
|
|
359
|
+
|
|
360
|
+
Parameters
|
|
361
|
+
----------
|
|
362
|
+
search_string : str
|
|
363
|
+
The string to search for within the notebook definitions.
|
|
364
|
+
notebook : str | uuid.UUID, default=None
|
|
365
|
+
The name or ID of a specific notebook to search within.
|
|
366
|
+
Defaults to None which searches across all notebooks in the specified workspace(s).
|
|
367
|
+
workspace : str | uuid.UUID | list, default=None
|
|
368
|
+
The name or ID of the workspace or a list of workspaces to search within.
|
|
369
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
370
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
371
|
+
If a list is provided, it should contain workspace names or IDs.
|
|
372
|
+
|
|
373
|
+
Returns
|
|
374
|
+
-------
|
|
375
|
+
pandas.DataFrame
|
|
376
|
+
A pandas dataframe showing the notebooks that contain the search string in their definitions.
|
|
377
|
+
The dataframe includes the workspace name, workspace ID, notebook name, and notebook ID.
|
|
378
|
+
"""
|
|
379
|
+
|
|
380
|
+
if not workspace:
|
|
381
|
+
workspace_id = resolve_workspace_id(workspace)
|
|
382
|
+
workspace_ids = [workspace_id]
|
|
383
|
+
elif isinstance(workspace, str):
|
|
384
|
+
workspace_id = resolve_workspace_id(workspace)
|
|
385
|
+
workspace_ids = [workspace_id]
|
|
386
|
+
elif isinstance(workspace, list):
|
|
387
|
+
workspace_ids = [resolve_workspace_id(ws) for ws in workspace]
|
|
388
|
+
else:
|
|
389
|
+
raise ValueError(
|
|
390
|
+
"Workspace must be a string, UUID, or a list of strings/UUIDs."
|
|
391
|
+
)
|
|
392
|
+
|
|
393
|
+
dfW = fabric.list_workspaces()
|
|
394
|
+
dfW_filt = dfW[dfW["Id"].isin(workspace_ids)]
|
|
395
|
+
|
|
396
|
+
columns = {
|
|
397
|
+
"Workspace Name": "string",
|
|
398
|
+
"Workspace Id": "string",
|
|
399
|
+
"Notebook Name": "string",
|
|
400
|
+
"Notebook Id": "string",
|
|
401
|
+
}
|
|
402
|
+
df = _create_dataframe(columns=columns)
|
|
403
|
+
|
|
404
|
+
rows = []
|
|
405
|
+
for _, r in dfW_filt.iterrows():
|
|
406
|
+
w_id = r["Id"]
|
|
407
|
+
w_name = r["Name"]
|
|
408
|
+
dfN = list_notebooks(workspace=w_id)
|
|
409
|
+
if notebook is not None:
|
|
410
|
+
item_id = resolve_item_id(item=notebook, type="Notebook", workspace=w_id)
|
|
411
|
+
dfN = dfN[dfN["Notebook Id"] == item_id]
|
|
412
|
+
for _, n in dfN.iterrows():
|
|
413
|
+
notebook_id = n["Notebook Id"]
|
|
414
|
+
notebook_name = n["Notebook Name"]
|
|
415
|
+
definition = _base_api(
|
|
416
|
+
request=f"v1/workspaces/{w_id}/notebooks/{notebook_id}/getDefinition",
|
|
417
|
+
method="post",
|
|
418
|
+
client="fabric_sp",
|
|
419
|
+
status_codes=None,
|
|
420
|
+
lro_return_json=True,
|
|
421
|
+
)
|
|
422
|
+
for part in definition.get("definition").get("parts"):
|
|
423
|
+
payload = _decode_b64(part["payload"])
|
|
424
|
+
if part["path"] == "notebook-content.py":
|
|
425
|
+
if search_string in payload:
|
|
426
|
+
rows.append(
|
|
427
|
+
{
|
|
428
|
+
"Workspace Name": w_name,
|
|
429
|
+
"Workspace Id": w_id,
|
|
430
|
+
"Notebook Name": notebook_name,
|
|
431
|
+
"Notebook Id": notebook_id,
|
|
432
|
+
}
|
|
433
|
+
)
|
|
434
|
+
|
|
435
|
+
if rows:
|
|
436
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
437
|
+
|
|
438
|
+
return df
|
|
@@ -2,7 +2,7 @@ import sempy.fabric as fabric
|
|
|
2
2
|
import pandas as pd
|
|
3
3
|
from typing import Optional
|
|
4
4
|
from sempy._utils._log import log
|
|
5
|
-
from
|
|
5
|
+
from ._helper_functions import (
|
|
6
6
|
resolve_workspace_name_and_id,
|
|
7
7
|
resolve_dataset_name_and_id,
|
|
8
8
|
resolve_workspace_id,
|
sempy_labs/_query_scale_out.py
CHANGED
sempy_labs/_semantic_models.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from uuid import UUID
|
|
2
2
|
from typing import Optional, List
|
|
3
3
|
import pandas as pd
|
|
4
|
-
from
|
|
4
|
+
from ._helper_functions import (
|
|
5
5
|
_create_dataframe,
|
|
6
6
|
_base_api,
|
|
7
7
|
_update_dataframe_datatypes,
|
|
@@ -298,39 +298,41 @@ def list_semantic_model_datasources(
|
|
|
298
298
|
client="fabric_sp",
|
|
299
299
|
)
|
|
300
300
|
|
|
301
|
-
|
|
301
|
+
rows = []
|
|
302
302
|
for item in response.json().get("value", []):
|
|
303
303
|
ds_type = item.get("datasourceType")
|
|
304
304
|
conn_details = item.get("connectionDetails", {})
|
|
305
305
|
ds_id = item.get("datasourceId")
|
|
306
306
|
gateway_id = item.get("gatewayId")
|
|
307
307
|
if expand_details:
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
308
|
+
rows.append(
|
|
309
|
+
{
|
|
310
|
+
"Datasource Type": ds_type,
|
|
311
|
+
"Connection Server": conn_details.get("server"),
|
|
312
|
+
"Connection Database": conn_details.get("database"),
|
|
313
|
+
"Connection Path": conn_details.get("path"),
|
|
314
|
+
"Connection Account": conn_details.get("account"),
|
|
315
|
+
"Connection Domain": conn_details.get("domain"),
|
|
316
|
+
"Connection Kind": conn_details.get("kind"),
|
|
317
|
+
"Connection Email Address": conn_details.get("emailAddress"),
|
|
318
|
+
"Connection URL": conn_details.get("url"),
|
|
319
|
+
"Connection Class Info": conn_details.get("classInfo"),
|
|
320
|
+
"Connection Login Server": conn_details.get("loginServer"),
|
|
321
|
+
"Datasource Id": ds_id,
|
|
322
|
+
"Gateway Id": gateway_id,
|
|
323
|
+
}
|
|
324
|
+
)
|
|
324
325
|
else:
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
326
|
+
rows.append(
|
|
327
|
+
{
|
|
328
|
+
"Datasource Type": ds_type,
|
|
329
|
+
"Connection Details": conn_details,
|
|
330
|
+
"Datasource Id": ds_id,
|
|
331
|
+
"Gateway Id": gateway_id,
|
|
332
|
+
}
|
|
333
|
+
)
|
|
334
|
+
|
|
335
|
+
if rows:
|
|
336
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
335
337
|
|
|
336
338
|
return df
|
sempy_labs/_spark.py
CHANGED
sempy_labs/_sql.py
CHANGED
|
@@ -3,7 +3,7 @@ from typing import Optional, Union, List
|
|
|
3
3
|
from sempy._utils._log import log
|
|
4
4
|
import struct
|
|
5
5
|
from itertools import chain, repeat
|
|
6
|
-
from
|
|
6
|
+
from ._helper_functions import (
|
|
7
7
|
resolve_lakehouse_name_and_id,
|
|
8
8
|
resolve_item_name_and_id,
|
|
9
9
|
resolve_workspace_name_and_id,
|
sempy_labs/_sql_endpoints.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from typing import Optional, Literal
|
|
2
2
|
from uuid import UUID
|
|
3
3
|
import pandas as pd
|
|
4
|
-
from
|
|
4
|
+
from ._helper_functions import (
|
|
5
5
|
_base_api,
|
|
6
6
|
_create_dataframe,
|
|
7
7
|
resolve_workspace_name_and_id,
|
|
@@ -44,19 +44,19 @@ def list_sql_endpoints(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
|
44
44
|
request=f"/v1/workspaces/{workspace_id}/sqlEndpoints", uses_pagination=True
|
|
45
45
|
)
|
|
46
46
|
|
|
47
|
-
|
|
47
|
+
rows = []
|
|
48
48
|
for r in responses:
|
|
49
49
|
for v in r.get("value", []):
|
|
50
|
+
rows.append(
|
|
51
|
+
{
|
|
52
|
+
"SQL Endpoint Id": v.get("id"),
|
|
53
|
+
"SQL Endpoint Name": v.get("displayName"),
|
|
54
|
+
"Description": v.get("description"),
|
|
55
|
+
}
|
|
56
|
+
)
|
|
50
57
|
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
"SQL Endpoint Name": v.get("displayName"),
|
|
54
|
-
"Description": v.get("description"),
|
|
55
|
-
}
|
|
56
|
-
dfs.append(pd.DataFrame(new_data, index=[0]))
|
|
57
|
-
|
|
58
|
-
if dfs:
|
|
59
|
-
df = pd.concat(dfs, ignore_index=True)
|
|
58
|
+
if rows:
|
|
59
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
60
60
|
|
|
61
61
|
return df
|
|
62
62
|
|
|
@@ -142,6 +142,7 @@ def refresh_sql_endpoint_metadata(
|
|
|
142
142
|
result = _base_api(
|
|
143
143
|
request=f"v1/workspaces/{workspace_id}/sqlEndpoints/{sql_endpoint_id}/refreshMetadata?preview=true",
|
|
144
144
|
method="post",
|
|
145
|
+
client="fabric_sp",
|
|
145
146
|
status_codes=[200, 202],
|
|
146
147
|
lro_return_json=True,
|
|
147
148
|
payload=payload,
|
sempy_labs/_sqldatabase.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from
|
|
1
|
+
from ._helper_functions import (
|
|
2
2
|
resolve_workspace_id,
|
|
3
3
|
_base_api,
|
|
4
4
|
_create_dataframe,
|
|
@@ -100,23 +100,23 @@ def list_sql_databases(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
|
100
100
|
client="fabric_sp",
|
|
101
101
|
)
|
|
102
102
|
|
|
103
|
-
|
|
103
|
+
rows = []
|
|
104
104
|
for r in responses:
|
|
105
105
|
for v in r.get("value", []):
|
|
106
106
|
prop = v.get("properties", {})
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
if
|
|
119
|
-
df = pd.
|
|
107
|
+
rows.append(
|
|
108
|
+
{
|
|
109
|
+
"SQL Database Name": v.get("displayName"),
|
|
110
|
+
"SQL Database Id": v.get("id"),
|
|
111
|
+
"Description": v.get("description"),
|
|
112
|
+
"Connection Info": prop.get("connectionInfo"),
|
|
113
|
+
"Database Name": prop.get("databaseName"),
|
|
114
|
+
"Server FQDN": prop.get("serverFqdn"),
|
|
115
|
+
}
|
|
116
|
+
)
|
|
117
|
+
|
|
118
|
+
if rows:
|
|
119
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
120
120
|
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
121
121
|
|
|
122
122
|
return df
|
sempy_labs/_tags.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from
|
|
1
|
+
from ._helper_functions import (
|
|
2
2
|
_base_api,
|
|
3
3
|
_create_dataframe,
|
|
4
4
|
_update_dataframe_datatypes,
|
|
@@ -40,17 +40,18 @@ def list_tags() -> pd.DataFrame:
|
|
|
40
40
|
client="fabric_sp",
|
|
41
41
|
)
|
|
42
42
|
|
|
43
|
-
|
|
43
|
+
rows = []
|
|
44
44
|
for r in responses:
|
|
45
45
|
for v in r.get("value", []):
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
46
|
+
rows.append(
|
|
47
|
+
{
|
|
48
|
+
"Tag Name": v.get("displayName"),
|
|
49
|
+
"Tag Id": v.get("id"),
|
|
50
|
+
}
|
|
51
|
+
)
|
|
52
|
+
|
|
53
|
+
if rows:
|
|
54
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
54
55
|
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
55
56
|
|
|
56
57
|
return df
|
sempy_labs/_translations.py
CHANGED
sempy_labs/_vertipaq.py
CHANGED
|
@@ -6,7 +6,7 @@ import os
|
|
|
6
6
|
import shutil
|
|
7
7
|
import datetime
|
|
8
8
|
import warnings
|
|
9
|
-
from
|
|
9
|
+
from ._helper_functions import (
|
|
10
10
|
format_dax_object_name,
|
|
11
11
|
save_as_delta_table,
|
|
12
12
|
resolve_workspace_capacity,
|
|
@@ -17,8 +17,8 @@ from sempy_labs._helper_functions import (
|
|
|
17
17
|
resolve_workspace_id,
|
|
18
18
|
resolve_workspace_name,
|
|
19
19
|
)
|
|
20
|
-
from
|
|
21
|
-
from
|
|
20
|
+
from ._list_functions import list_relationships, list_tables
|
|
21
|
+
from .lakehouse import lakehouse_attached, get_lakehouse_tables
|
|
22
22
|
from typing import Optional
|
|
23
23
|
from sempy._utils._log import log
|
|
24
24
|
import sempy_labs._icons as icons
|
sempy_labs/_vpax.py
CHANGED
|
@@ -6,7 +6,7 @@ import sys
|
|
|
6
6
|
from pathlib import Path
|
|
7
7
|
from typing import Optional
|
|
8
8
|
from uuid import UUID
|
|
9
|
-
from
|
|
9
|
+
from ._helper_functions import (
|
|
10
10
|
resolve_workspace_name_and_id,
|
|
11
11
|
resolve_dataset_name_and_id,
|
|
12
12
|
resolve_lakehouse_name_and_id,
|
sempy_labs/_warehouses.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from
|
|
1
|
+
from ._helper_functions import (
|
|
2
2
|
resolve_workspace_name_and_id,
|
|
3
3
|
_base_api,
|
|
4
4
|
_create_dataframe,
|
|
@@ -111,23 +111,24 @@ def list_warehouses(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
|
111
111
|
client="fabric_sp",
|
|
112
112
|
)
|
|
113
113
|
|
|
114
|
-
|
|
114
|
+
rows = []
|
|
115
115
|
for r in responses:
|
|
116
116
|
for v in r.get("value", []):
|
|
117
117
|
prop = v.get("properties", {})
|
|
118
118
|
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
119
|
+
rows.append(
|
|
120
|
+
{
|
|
121
|
+
"Warehouse Name": v.get("displayName"),
|
|
122
|
+
"Warehouse Id": v.get("id"),
|
|
123
|
+
"Description": v.get("description"),
|
|
124
|
+
"Connection Info": prop.get("connectionInfo"),
|
|
125
|
+
"Created Date": prop.get("createdDate"),
|
|
126
|
+
"Last Updated Time": prop.get("lastUpdatedTime"),
|
|
127
|
+
}
|
|
128
|
+
)
|
|
129
|
+
|
|
130
|
+
if rows:
|
|
131
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
131
132
|
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
132
133
|
|
|
133
134
|
return df
|
sempy_labs/_workloads.py
CHANGED
sempy_labs/_workspaces.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import pandas as pd
|
|
2
2
|
import sempy_labs._icons as icons
|
|
3
3
|
from typing import Optional
|
|
4
|
-
from
|
|
4
|
+
from ._helper_functions import (
|
|
5
5
|
resolve_workspace_name_and_id,
|
|
6
6
|
resolve_capacity_id,
|
|
7
7
|
_base_api,
|
|
@@ -144,21 +144,22 @@ def list_workspace_users(workspace: Optional[str | UUID] = None) -> pd.DataFrame
|
|
|
144
144
|
client="fabric_sp",
|
|
145
145
|
)
|
|
146
146
|
|
|
147
|
-
|
|
147
|
+
rows = []
|
|
148
148
|
for r in responses:
|
|
149
149
|
for v in r.get("value", []):
|
|
150
150
|
p = v.get("principal", {})
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
151
|
+
rows.append(
|
|
152
|
+
{
|
|
153
|
+
"User Name": p.get("displayName"),
|
|
154
|
+
"User ID": p.get("id"),
|
|
155
|
+
"Type": p.get("type"),
|
|
156
|
+
"Role": v.get("role"),
|
|
157
|
+
"Email Address": p.get("userDetails", {}).get("userPrincipalName"),
|
|
158
|
+
}
|
|
159
|
+
)
|
|
160
|
+
|
|
161
|
+
if rows:
|
|
162
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
162
163
|
|
|
163
164
|
return df
|
|
164
165
|
|