semantic-link-labs 0.11.3__py3-none-any.whl → 0.12.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (77) hide show
  1. {semantic_link_labs-0.11.3.dist-info → semantic_link_labs-0.12.0.dist-info}/METADATA +5 -4
  2. {semantic_link_labs-0.11.3.dist-info → semantic_link_labs-0.12.0.dist-info}/RECORD +77 -73
  3. sempy_labs/__init__.py +6 -0
  4. sempy_labs/_a_lib_info.py +1 -1
  5. sempy_labs/_authentication.py +81 -32
  6. sempy_labs/_capacities.py +2 -2
  7. sempy_labs/_capacity_migration.py +4 -4
  8. sempy_labs/_clear_cache.py +1 -1
  9. sempy_labs/_connections.py +107 -70
  10. sempy_labs/_dashboards.py +6 -2
  11. sempy_labs/_data_pipelines.py +1 -1
  12. sempy_labs/_dataflows.py +1 -1
  13. sempy_labs/_dax.py +3 -3
  14. sempy_labs/_delta_analyzer.py +4 -4
  15. sempy_labs/_delta_analyzer_history.py +1 -1
  16. sempy_labs/_deployment_pipelines.py +1 -1
  17. sempy_labs/_environments.py +1 -1
  18. sempy_labs/_eventhouses.py +9 -3
  19. sempy_labs/_eventstreams.py +1 -1
  20. sempy_labs/_external_data_shares.py +1 -1
  21. sempy_labs/_gateways.py +14 -7
  22. sempy_labs/_generate_semantic_model.py +7 -12
  23. sempy_labs/_git.py +1 -1
  24. sempy_labs/_graphQL.py +1 -1
  25. sempy_labs/_helper_functions.py +161 -54
  26. sempy_labs/_job_scheduler.py +12 -1
  27. sempy_labs/_kql_databases.py +1 -1
  28. sempy_labs/_kql_querysets.py +10 -2
  29. sempy_labs/_kusto.py +2 -2
  30. sempy_labs/_list_functions.py +1 -1
  31. sempy_labs/_managed_private_endpoints.py +1 -1
  32. sempy_labs/_mirrored_databases.py +40 -16
  33. sempy_labs/_mirrored_warehouses.py +1 -1
  34. sempy_labs/_ml_experiments.py +1 -1
  35. sempy_labs/_model_bpa.py +6 -6
  36. sempy_labs/_model_bpa_bulk.py +3 -3
  37. sempy_labs/_model_dependencies.py +1 -1
  38. sempy_labs/_mounted_data_factories.py +3 -3
  39. sempy_labs/_notebooks.py +2 -1
  40. sempy_labs/_query_scale_out.py +2 -2
  41. sempy_labs/_refresh_semantic_model.py +1 -1
  42. sempy_labs/_semantic_models.py +15 -3
  43. sempy_labs/_spark.py +1 -1
  44. sempy_labs/_sql.py +3 -3
  45. sempy_labs/_sql_endpoints.py +5 -3
  46. sempy_labs/_sqldatabase.py +5 -1
  47. sempy_labs/_tags.py +3 -1
  48. sempy_labs/_translations.py +7 -360
  49. sempy_labs/_user_delegation_key.py +2 -2
  50. sempy_labs/_utils.py +27 -0
  51. sempy_labs/_vertipaq.py +3 -3
  52. sempy_labs/_vpax.py +1 -1
  53. sempy_labs/_warehouses.py +5 -0
  54. sempy_labs/_workloads.py +1 -1
  55. sempy_labs/_workspace_identity.py +1 -1
  56. sempy_labs/_workspaces.py +145 -11
  57. sempy_labs/admin/__init__.py +6 -0
  58. sempy_labs/admin/_capacities.py +34 -11
  59. sempy_labs/admin/_items.py +2 -2
  60. sempy_labs/admin/_tenant_keys.py +89 -0
  61. sempy_labs/directlake/_dl_helper.py +1 -1
  62. sempy_labs/lakehouse/__init__.py +4 -0
  63. sempy_labs/lakehouse/_materialized_lake_views.py +76 -0
  64. sempy_labs/lakehouse/_shortcuts.py +8 -2
  65. sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
  66. sempy_labs/report/_bpareporttemplate/.platform +11 -0
  67. sempy_labs/report/_download_report.py +4 -1
  68. sempy_labs/report/_export_report.py +12 -5
  69. sempy_labs/report/_generate_report.py +11 -3
  70. sempy_labs/report/_paginated.py +21 -15
  71. sempy_labs/report/_report_functions.py +19 -11
  72. sempy_labs/report/_report_rebind.py +21 -10
  73. sempy_labs/theme/_org_themes.py +5 -6
  74. sempy_labs/tom/_model.py +5 -16
  75. {semantic_link_labs-0.11.3.dist-info → semantic_link_labs-0.12.0.dist-info}/WHEEL +0 -0
  76. {semantic_link_labs-0.11.3.dist-info → semantic_link_labs-0.12.0.dist-info}/licenses/LICENSE +0 -0
  77. {semantic_link_labs-0.11.3.dist-info → semantic_link_labs-0.12.0.dist-info}/top_level.txt +0 -0
@@ -21,6 +21,7 @@ from jsonpath_ng.ext import parse
21
21
  from jsonpath_ng.jsonpath import Fields, Index
22
22
  from sempy._utils._log import log
23
23
  from os import PathLike
24
+ import sempy_labs._utils as utils
24
25
 
25
26
 
26
27
  def _build_url(url: str, params: dict) -> str:
@@ -226,8 +227,6 @@ def delete_item(
226
227
  or if no lakehouse attached, resolves to the workspace of the notebook.
227
228
  """
228
229
 
229
- from sempy_labs._utils import item_types
230
-
231
230
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
232
231
  (item_name, item_id) = resolve_item_name_and_id(item, type, workspace_id)
233
232
  item_type = item_types.get(type)[0].lower()
@@ -281,11 +280,10 @@ def create_item(
281
280
  The folder within the workspace where the item will be created.
282
281
  Defaults to None which places the item in the root of the workspace.
283
282
  """
284
- from sempy_labs._utils import item_types
285
283
 
286
284
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
287
- item_type = item_types.get(type)[0].lower()
288
- item_type_url = item_types.get(type)[1]
285
+ item_type = utils.item_types.get(type)[0].lower()
286
+ item_type_url = utils.item_types.get(type)[1]
289
287
 
290
288
  payload = {
291
289
  "displayName": name,
@@ -319,10 +317,13 @@ def copy_item(
319
317
  source_workspace: Optional[str | UUID] = None,
320
318
  target_workspace: Optional[str | UUID] = None,
321
319
  overwrite: bool = False,
320
+ keep_existing_bindings: bool = False,
322
321
  ):
323
322
  """
324
323
  Copies an item (with its definition) from one location to another location.
325
324
 
325
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
326
+
326
327
  Parameters
327
328
  ----------
328
329
  item : str | uuid.UUID
@@ -339,40 +340,14 @@ def copy_item(
339
340
  The workspace name or ID to which the item will be copied.
340
341
  Defaults to None which resolves to the workspace of the attached lakehouse
341
342
  or if no lakehouse attached, resolves to the workspace of the notebook.
342
- overwrite: bool, default=False
343
+ overwrite : bool, default=False
343
344
  If True, overwrites the item in the target workspace if it already exists.
345
+ keep_existing_bindings : bool, default=False
346
+ If True, ensures that reports are re-bound to the original semantic model.
347
+ If False, reports are binded to the semantic model to which the item is bound.
344
348
  """
345
349
 
346
- items = {
347
- "CopyJob": "copyJobs",
348
- "Dataflow": "dataflows",
349
- "Eventhouse": "eventhouses",
350
- "GraphQLApi": "GraphQLApis",
351
- "Report": "reports",
352
- "SemanticModel": "semanticModels",
353
- # "Environment": "environments",
354
- "KQLDatabase": "kqlDatabases",
355
- "KQLDashboard": "kqlDashboards",
356
- "KQLQueryset": "kqlQuerysets",
357
- "DataPipeline": "dataPipelines",
358
- "Notebook": "notebooks",
359
- "SparkJobDefinition": "sparkJobDefinitions",
360
- "Eventstream": "eventstreams",
361
- "MirroredWarehouse": "mirroredWarehouses",
362
- "MirroredDatabase": "mirroredDatabases",
363
- "MountedDataFactory": "mountedDataFactories",
364
- "VariableLibrary": "variableLibraries",
365
- "ApacheAirFlowJob": "ApacheAirflowJobs",
366
- "WarehouseSnapshot": "warehousesnapshots",
367
- "DigitalTwinBuilder": "digitaltwinbuilders",
368
- "DigitalTwinBuilderFlow": "DigitalTwinBuilderFlows",
369
- "MirroredAzureDatabricksCatalog": "mirroredAzureDatabricksCatalogs",
370
- }
371
- if type not in items:
372
- raise ValueError(
373
- f"{icons.red_dot} The '{type}' item type does not have a definition and cannot be copied."
374
- )
375
- type_url = items.get(type)
350
+ from sempy_labs.report import report_rebind
376
351
 
377
352
  (item_name, item_id) = resolve_item_name_and_id(
378
353
  item=item, type=type, workspace=source_workspace
@@ -392,18 +367,19 @@ def copy_item(
392
367
  f"{icons.red_dot} The source and target workspaces are the same and the target name is the same as the source name. No action taken."
393
368
  )
394
369
 
370
+ type_url = utils.items.get(type)
395
371
  result = _base_api(
396
372
  request=f"v1/workspaces/{source_workspace_id}/{type_url}/{item_id}",
397
373
  client="fabric_sp",
398
374
  )
399
375
  description = result.json().get("description")
400
376
 
401
- payload = _base_api(
402
- request=f"v1/workspaces/{source_workspace_id}/{type_url}/{item_id}/getDefinition",
403
- method="post",
404
- client="fabric_sp",
405
- status_codes=None,
406
- lro_return_json=True,
377
+ payload = get_item_definition(
378
+ item=item_id,
379
+ type=type,
380
+ workspace=source_workspace_id,
381
+ return_dataframe=False,
382
+ decode=False,
407
383
  )
408
384
  payload["displayName"] = target_name
409
385
  if description:
@@ -428,6 +404,13 @@ def copy_item(
428
404
  print(
429
405
  f"{icons.in_progress} Updating existing item '{target_name}' of type '{type}' in the target workspace '{target_workspace_name}'..."
430
406
  )
407
+ # Get the existing source model
408
+ if type == "Report" and keep_existing_bindings:
409
+ result = _base_api(
410
+ request=f"v1.0/myorg/groups/{target_workspace_id}/reports/{target_item_id}"
411
+ ).json()
412
+ dataset_id = result.get("datasetId")
413
+ dataset_workspace_id = result.get("datasetWorkspaceId")
431
414
  _base_api(
432
415
  request=f"/v1/workspaces/{target_workspace_id}/{type_url}/{target_item_id}/updateDefinition",
433
416
  method="post",
@@ -439,6 +422,15 @@ def copy_item(
439
422
  print(
440
423
  f"{icons.green_dot} The item '{target_name}' of type '{type}' has been successfully updated in the target workspace '{target_workspace_name}'."
441
424
  )
425
+
426
+ if keep_existing_bindings:
427
+ report_rebind(
428
+ report=target_item_id,
429
+ dataset=dataset_id,
430
+ report_workspace=target_workspace,
431
+ dataset_workspace=dataset_workspace_id,
432
+ )
433
+
442
434
  else:
443
435
  print(
444
436
  f"{icons.in_progress} Creating new item '{target_name}' of type '{type}' in the target workspace '{target_workspace_name}'..."
@@ -451,8 +443,115 @@ def copy_item(
451
443
  )
452
444
 
453
445
 
446
+ @log
447
+ def is_base64(s):
448
+ try:
449
+ # Add padding if needed
450
+ s_padded = s + "=" * (-len(s) % 4)
451
+ decoded = base64.b64decode(s_padded, validate=True)
452
+ # Optional: check if re-encoding gives the original (excluding padding)
453
+ return base64.b64encode(decoded).decode().rstrip("=") == s.rstrip("=")
454
+ except Exception:
455
+ return False
456
+
457
+
458
+ @log
459
+ def decode_payload(payload):
460
+
461
+ if is_base64(payload):
462
+ try:
463
+ decoded_payload = json.loads(base64.b64decode(payload).decode("utf-8"))
464
+ except Exception:
465
+ decoded_payload = base64.b64decode(payload)
466
+ elif isinstance(payload, dict):
467
+ decoded_payload = payload
468
+ else:
469
+ raise ValueError("Payload must be a dictionary or a base64 encoded value.")
470
+
471
+ return decoded_payload
472
+
473
+
454
474
  @log
455
475
  def get_item_definition(
476
+ item: str | UUID,
477
+ type: str,
478
+ workspace: Optional[str | UUID] = None,
479
+ return_dataframe: bool = False,
480
+ decode: bool = True,
481
+ format: Optional[str] = None,
482
+ ) -> dict | pd.DataFrame:
483
+ """
484
+ Gets a Fabric item's defintion.
485
+
486
+ This is a wrapper function for the following API: `<https://learn.microsoft.com/rest/api/fabric/core/items/get-item-definition>`_.
487
+
488
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
489
+
490
+ Parameters
491
+ ----------
492
+ item : str | uuid.UUID
493
+ The name or ID of the item to be copied.
494
+ type : str
495
+ The `type <https://learn.microsoft.com/rest/api/fabric/core/items/list-items?tabs=HTTP#itemtype>`_ of the item.
496
+ target_name: str, default=None
497
+ The name of the item in the target workspace. Defaults to the same name as the source item.
498
+ workspace : str | uuid.UUID, default=None
499
+ The workspace name or ID.
500
+ Defaults to None which resolves to the workspace of the attached lakehouse
501
+ or if no lakehouse attached, resolves to the workspace of the notebook.
502
+ return_dataframe : bool, default=False
503
+ If True, returns a pandas dataframe.
504
+ If False, returns a dictionary.
505
+ decode : bool, default=True
506
+ If True, decodes the base64 payload.
507
+ format : str, default=None
508
+ The `format <https://learn.microsoft.com/rest/api/fabric/core/items/get-item-definition?tabs=HTTP#itemdefinition>`_ of the item definition.
509
+ """
510
+
511
+ workspace_id = resolve_workspace_id(workspace)
512
+ item_id = resolve_item_id(item=item, type=type, workspace=workspace_id)
513
+
514
+ item_type_url = utils.items.get(type)
515
+ if not item_type_url:
516
+ raise ValueError(f"{icons.red_dot} Invalid item type '{type}'.")
517
+
518
+ url = f"/v1/workspaces/{workspace_id}/{item_type_url}/{item_id}/getDefinition"
519
+ if format:
520
+ url += f"?format={format}"
521
+
522
+ result = _base_api(
523
+ request=url,
524
+ method="post",
525
+ status_codes=None,
526
+ lro_return_json=True,
527
+ )
528
+
529
+ if return_dataframe:
530
+ return pd.json_normalize(result["definition"]["parts"]).rename(
531
+ columns={
532
+ "path": "Path",
533
+ "payload": "Payload",
534
+ "payloadType": "Payload Type",
535
+ }
536
+ )
537
+
538
+ definition = {"definition": {"parts": []}}
539
+ if decode:
540
+ for part in result.get("definition", {}).get("parts", []):
541
+ path = part.get("path")
542
+ payload = part.get("payload")
543
+ decoded_payload = decode_payload(payload)
544
+
545
+ # Keep structure similar to original but replace payload with decoded version
546
+ definition["definition"]["parts"].append(
547
+ {"path": path, "payload": decoded_payload}
548
+ )
549
+ else:
550
+ return result
551
+
552
+
553
+ @log
554
+ def _get_item_definition(
456
555
  item: str | UUID,
457
556
  type: str,
458
557
  workspace: Optional[str | UUID] = None,
@@ -460,12 +559,11 @@ def get_item_definition(
460
559
  return_dataframe: bool = True,
461
560
  decode: bool = True,
462
561
  ):
463
- from sempy_labs._utils import item_types
464
562
 
465
563
  workspace_id = resolve_workspace_id(workspace)
466
564
  item_id = resolve_item_id(item, type, workspace_id)
467
- item_type_url = item_types.get(type)[1]
468
- path = item_types.get(type)[2]
565
+ item_type_url = utils.item_types.get(type)[1]
566
+ path = utils.item_types.get(type)[2]
469
567
 
470
568
  url = f"/v1/workspaces/{workspace_id}/{item_type_url}/{item_id}/getDefinition"
471
569
  if format:
@@ -986,7 +1084,9 @@ def resolve_workspace_id(
986
1084
 
987
1085
 
988
1086
  @log
989
- def resolve_workspace_name(workspace_id: Optional[UUID] = None) -> str:
1087
+ def resolve_workspace_name(
1088
+ workspace_id: Optional[UUID] = None, throw_error: bool = True
1089
+ ) -> str:
990
1090
 
991
1091
  if workspace_id is None:
992
1092
  workspace_id = _get_fabric_context_setting(name="trident.workspace.id")
@@ -996,9 +1096,12 @@ def resolve_workspace_name(workspace_id: Optional[UUID] = None) -> str:
996
1096
  request=f"/v1/workspaces/{workspace_id}", client="fabric_sp"
997
1097
  ).json()
998
1098
  except FabricHTTPException:
999
- raise ValueError(
1000
- f"{icons.red_dot} The '{workspace_id}' workspace was not found."
1001
- )
1099
+ if throw_error:
1100
+ raise ValueError(
1101
+ f"{icons.red_dot} The '{workspace_id}' workspace was not found."
1102
+ )
1103
+ else:
1104
+ return workspace_id
1002
1105
 
1003
1106
  return response.get("displayName")
1004
1107
 
@@ -2090,17 +2193,21 @@ def _base_api(
2090
2193
  if (lro_return_json or lro_return_status_code) and status_codes is None:
2091
2194
  status_codes = [200, 202]
2092
2195
 
2093
- def get_token(audience="pbi"):
2094
- return notebookutils.credentials.getToken(audience)
2196
+ class FabricDefaultCredential(TokenCredential):
2197
+
2198
+ def get_token(self, *scopes, **kwargs) -> AccessToken:
2199
+ from sempy.fabric._credentials import build_access_token
2200
+
2201
+ return build_access_token(notebookutils.credentials.getToken("pbi"))
2095
2202
 
2096
2203
  if isinstance(status_codes, int):
2097
2204
  status_codes = [status_codes]
2098
2205
 
2099
2206
  if client == "fabric":
2100
- c = fabric.FabricRestClient(token_provider=get_token)
2207
+ c = fabric.FabricRestClient(credential=FabricDefaultCredential())
2101
2208
  elif client == "fabric_sp":
2102
- token = auth.token_provider.get() or get_token
2103
- c = fabric.FabricRestClient(token_provider=token)
2209
+ token = auth.token_provider.get() or FabricDefaultCredential()
2210
+ c = fabric.FabricRestClient(credential=token)
2104
2211
  elif client in ["azure", "graph"]:
2105
2212
  pass
2106
2213
  else:
@@ -1,7 +1,7 @@
1
1
  from sempy._utils._log import log
2
2
  import pandas as pd
3
3
  from typing import Optional, List
4
- from ._helper_functions import (
4
+ from sempy_labs._helper_functions import (
5
5
  resolve_workspace_name_and_id,
6
6
  resolve_item_name_and_id,
7
7
  _update_dataframe_datatypes,
@@ -22,6 +22,8 @@ def list_item_job_instances(
22
22
 
23
23
  This is a wrapper function for the following API: `Job Scheduler - List Item Job Instances <https://learn.microsoft.com/rest/api/fabric/core/job-scheduler/list-item-job-instances>`_.
24
24
 
25
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
26
+
25
27
  Parameters
26
28
  ----------
27
29
  item : str | uuid.UUID
@@ -62,6 +64,7 @@ def list_item_job_instances(
62
64
  responses = _base_api(
63
65
  request=f"v1/workspaces/{workspace_id}/items/{item_id}/jobs/instances",
64
66
  uses_pagination=True,
67
+ client="fabric_sp",
65
68
  )
66
69
 
67
70
  if not responses[0].get("value"):
@@ -185,6 +188,9 @@ def list_item_schedules(
185
188
  "Times": "string",
186
189
  "Owner Id": "string",
187
190
  "Owner Type": "string",
191
+ "Recurrence": "int",
192
+ "Occurrence Type": "string",
193
+ "Occurrence Day of Month": "int",
188
194
  }
189
195
  df = _create_dataframe(columns=columns)
190
196
 
@@ -210,6 +216,11 @@ def list_item_schedules(
210
216
  "Times": config.get("times"),
211
217
  "Owner Id": own.get("id"),
212
218
  "Owner Type": own.get("type"),
219
+ "Recurrence": config.get("recurrence"),
220
+ "Occurrence Type": config.get("occurence", {}).get("occurrenceType"),
221
+ "Occurrence Day of Month": config.get("occurrence", {}).get(
222
+ "dayOfMonth"
223
+ ),
213
224
  }
214
225
  )
215
226
 
@@ -1,6 +1,6 @@
1
1
  import pandas as pd
2
2
  from typing import Optional
3
- from ._helper_functions import (
3
+ from sempy_labs._helper_functions import (
4
4
  _base_api,
5
5
  _create_dataframe,
6
6
  delete_item,
@@ -1,7 +1,7 @@
1
1
  import pandas as pd
2
2
  import sempy_labs._icons as icons
3
3
  from typing import Optional
4
- from ._helper_functions import (
4
+ from sempy_labs._helper_functions import (
5
5
  resolve_workspace_id,
6
6
  _base_api,
7
7
  _create_dataframe,
@@ -19,6 +19,8 @@ def list_kql_querysets(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
19
19
 
20
20
  This is a wrapper function for the following API: `Items - List KQL Querysets <https://learn.microsoft.com/rest/api/fabric/kqlqueryset/items/list-kql-querysets>`_.
21
21
 
22
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
23
+
22
24
  Parameters
23
25
  ----------
24
26
  workspace : str | uuid.UUID, default=None
@@ -42,7 +44,9 @@ def list_kql_querysets(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
42
44
  workspace_id = resolve_workspace_id(workspace)
43
45
 
44
46
  responses = _base_api(
45
- request=f"v1/workspaces/{workspace_id}/kqlQuerysets", uses_pagination=True
47
+ request=f"v1/workspaces/{workspace_id}/kqlQuerysets",
48
+ uses_pagination=True,
49
+ client="fabric_sp",
46
50
  )
47
51
 
48
52
  rows = []
@@ -71,6 +75,8 @@ def create_kql_queryset(
71
75
 
72
76
  This is a wrapper function for the following API: `Items - Create KQL Queryset <https://learn.microsoft.com/rest/api/fabric/kqlqueryset/items/create-kql-queryset>`_.
73
77
 
78
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
79
+
74
80
  Parameters
75
81
  ----------
76
82
  name: str
@@ -97,6 +103,8 @@ def delete_kql_queryset(
97
103
 
98
104
  This is a wrapper function for the following API: `Items - Delete KQL Queryset <https://learn.microsoft.com/rest/api/fabric/kqlqueryset/items/delete-kql-queryset>`_.
99
105
 
106
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
107
+
100
108
  Parameters
101
109
  ----------
102
110
  kql_queryset: str | uuid.UUID
sempy_labs/_kusto.py CHANGED
@@ -5,8 +5,8 @@ from sempy._utils._log import log
5
5
  import sempy_labs._icons as icons
6
6
  from typing import Optional
7
7
  from uuid import UUID
8
- from ._kql_databases import _resolve_cluster_uri
9
- from ._helper_functions import resolve_item_id
8
+ from sempy_labs._kql_databases import _resolve_cluster_uri
9
+ from sempy_labs._helper_functions import resolve_item_id
10
10
 
11
11
 
12
12
  @log
@@ -1,5 +1,5 @@
1
1
  import sempy.fabric as fabric
2
- from ._helper_functions import (
2
+ from sempy_labs._helper_functions import (
3
3
  resolve_workspace_name_and_id,
4
4
  create_relationship_name,
5
5
  format_dax_object_name,
@@ -1,7 +1,7 @@
1
1
  import pandas as pd
2
2
  import sempy_labs._icons as icons
3
3
  from typing import Optional
4
- from ._helper_functions import (
4
+ from sempy_labs._helper_functions import (
5
5
  resolve_workspace_name_and_id,
6
6
  _is_valid_uuid,
7
7
  _base_api,
@@ -1,6 +1,6 @@
1
1
  import pandas as pd
2
2
  from typing import Optional
3
- from ._helper_functions import (
3
+ from sempy_labs._helper_functions import (
4
4
  resolve_workspace_name_and_id,
5
5
  _update_dataframe_datatypes,
6
6
  _base_api,
@@ -8,7 +8,7 @@ from ._helper_functions import (
8
8
  _create_dataframe,
9
9
  delete_item,
10
10
  create_item,
11
- get_item_definition,
11
+ _get_item_definition,
12
12
  resolve_workspace_id,
13
13
  )
14
14
  import sempy_labs._icons as icons
@@ -91,6 +91,8 @@ def create_mirrored_database(
91
91
 
92
92
  This is a wrapper function for the following API: `Items - Create Mirrored Database <https://learn.microsoft.com/rest/api/fabric/mirroreddatabase/items/create-mirrored-database>`_.
93
93
 
94
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
95
+
94
96
  Parameters
95
97
  ----------
96
98
  name: str
@@ -117,6 +119,8 @@ def delete_mirrored_database(
117
119
 
118
120
  This is a wrapper function for the following API: `Items - Delete Mirrored Database <https://learn.microsoft.com/rest/api/fabric/mirroreddatabase/items/delete-mirrored-database>`_.
119
121
 
122
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
123
+
120
124
  Parameters
121
125
  ----------
122
126
  mirrored_database: str
@@ -139,6 +143,8 @@ def get_mirroring_status(
139
143
 
140
144
  This is a wrapper function for the following API: `Mirroring - Get Mirroring Status <https://learn.microsoft.com/rest/api/fabric/mirroreddatabase/mirroring/get-mirroring-status>`_.
141
145
 
146
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
147
+
142
148
  Parameters
143
149
  ----------
144
150
  mirrored_database: str | uuid.UUID
@@ -154,13 +160,14 @@ def get_mirroring_status(
154
160
  The status of a mirrored database.
155
161
  """
156
162
 
157
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
163
+ workspace_id = resolve_workspace_id(workspace)
158
164
  item_id = resolve_item_id(
159
165
  item=mirrored_database, type="MirroredDatabase", workspace=workspace
160
166
  )
161
167
  response = _base_api(
162
168
  request=f"/v1/workspaces/{workspace_id}/mirroredDatabases/{item_id}/getMirroringStatus",
163
169
  status_codes=200,
170
+ client="fabric_sp",
164
171
  )
165
172
 
166
173
  return response.json().get("status", {})
@@ -175,6 +182,8 @@ def get_tables_mirroring_status(
175
182
 
176
183
  This is a wrapper function for the following API: `Mirroring - Get Tables Mirroring Status <https://learn.microsoft.com/rest/api/fabric/mirroreddatabase/mirroring/get-tables-mirroring-status>`_.
177
184
 
185
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
186
+
178
187
  Parameters
179
188
  ----------
180
189
  mirrored_database: str | uuid.UUID
@@ -190,7 +199,7 @@ def get_tables_mirroring_status(
190
199
  A pandas dataframe showing the mirroring status of the tables.
191
200
  """
192
201
 
193
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
202
+ workspace_id = resolve_workspace_id(workspace)
194
203
  item_id = resolve_item_id(
195
204
  item=mirrored_database, type="MirroredDatabase", workspace=workspace
196
205
  )
@@ -199,6 +208,7 @@ def get_tables_mirroring_status(
199
208
  method="post",
200
209
  status_codes=200,
201
210
  uses_pagination=True,
211
+ client="fabric_sp",
202
212
  )
203
213
 
204
214
  columns = {
@@ -211,21 +221,24 @@ def get_tables_mirroring_status(
211
221
  }
212
222
  df = _create_dataframe(columns=columns)
213
223
 
224
+ rows = []
214
225
  for r in responses:
215
226
  for v in r.get("data", []):
216
227
  m = v.get("metrics", {})
217
- new_data = {
218
- "Source Schema Name": v.get("sourceSchemaName"),
219
- "Source Table Name": v.get("sourceTableName"),
220
- "Status": v.get("status"),
221
- "Processed Bytes": m.get("processedBytes"),
222
- "Processed Rows": m.get("processedRows"),
223
- "Last Sync Date": m.get("lastSyncDateTime"),
224
- }
225
-
226
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
228
+ rows.append(
229
+ {
230
+ "Source Schema Name": v.get("sourceSchemaName"),
231
+ "Source Table Name": v.get("sourceTableName"),
232
+ "Status": v.get("status"),
233
+ "Processed Bytes": m.get("processedBytes"),
234
+ "Processed Rows": m.get("processedRows"),
235
+ "Last Sync Date": m.get("lastSyncDateTime"),
236
+ }
237
+ )
227
238
 
228
- _update_dataframe_datatypes(dataframe=df, column_map=columns)
239
+ if rows:
240
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
241
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
229
242
 
230
243
  return df
231
244
 
@@ -239,6 +252,8 @@ def start_mirroring(
239
252
 
240
253
  This is a wrapper function for the following API: `Mirroring - Start Mirroring <https://learn.microsoft.com/rest/api/fabric/mirroreddatabase/mirroring/start-mirroring>`_.
241
254
 
255
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
256
+
242
257
  Parameters
243
258
  ----------
244
259
  mirrored_database: str | uuid.UUID
@@ -257,6 +272,7 @@ def start_mirroring(
257
272
  request=f"/v1/workspaces/{workspace_id}/mirroredDatabases/{item_id}/startMirroring",
258
273
  method="post",
259
274
  status_codes=200,
275
+ client="fabric_sp",
260
276
  )
261
277
 
262
278
  print(
@@ -273,6 +289,8 @@ def stop_mirroring(
273
289
 
274
290
  This is a wrapper function for the following API: `Mirroring - Stop Mirroring <https://learn.microsoft.com/rest/api/fabric/mirroreddatabase/mirroring/stop-mirroring>`_.
275
291
 
292
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
293
+
276
294
  Parameters
277
295
  ----------
278
296
  mirrored_database: str | uuid.UUID
@@ -291,6 +309,7 @@ def stop_mirroring(
291
309
  request=f"/v1/workspaces/{workspace_id}/mirroredDatabases/{item_id}/stopMirroring",
292
310
  method="post",
293
311
  status_codes=200,
312
+ client="fabric_sp",
294
313
  )
295
314
 
296
315
  print(
@@ -309,6 +328,8 @@ def get_mirrored_database_definition(
309
328
 
310
329
  This is a wrapper function for the following API: `Items - Get Mirrored Database Definition <https://learn.microsoft.com/rest/api/fabric/mirroreddatabase/items/get-mirrored-database-definition>`_.
311
330
 
331
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
332
+
312
333
  Parameters
313
334
  ----------
314
335
  mirrored_database : str | uuid.UUID
@@ -327,7 +348,7 @@ def get_mirrored_database_definition(
327
348
  The mirrored database definition.
328
349
  """
329
350
 
330
- return get_item_definition(
351
+ return _get_item_definition(
331
352
  item=mirrored_database,
332
353
  type="MirroredDatabase",
333
354
  workspace=workspace,
@@ -345,6 +366,8 @@ def update_mirrored_database_definition(
345
366
  """
346
367
  Updates an existing notebook with a new definition.
347
368
 
369
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
370
+
348
371
  Parameters
349
372
  ----------
350
373
  mirrored_database : str | uuid.UUID
@@ -383,6 +406,7 @@ def update_mirrored_database_definition(
383
406
  json=request_body,
384
407
  status_codes=None,
385
408
  lro_return_status_code=True,
409
+ client="fabric_sp",
386
410
  )
387
411
 
388
412
  print(
@@ -1,6 +1,6 @@
1
1
  import pandas as pd
2
2
  from typing import Optional
3
- from ._helper_functions import (
3
+ from sempy_labs._helper_functions import (
4
4
  resolve_workspace_id,
5
5
  _base_api,
6
6
  _create_dataframe,
@@ -1,6 +1,6 @@
1
1
  import pandas as pd
2
2
  from typing import Optional
3
- from ._helper_functions import (
3
+ from sempy_labs._helper_functions import (
4
4
  resolve_workspace_id,
5
5
  _base_api,
6
6
  delete_item,