semantic-link-labs 0.9.0__py3-none-any.whl → 0.9.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (83) hide show
  1. {semantic_link_labs-0.9.0.dist-info → semantic_link_labs-0.9.2.dist-info}/METADATA +68 -7
  2. {semantic_link_labs-0.9.0.dist-info → semantic_link_labs-0.9.2.dist-info}/RECORD +83 -76
  3. sempy_labs/__init__.py +14 -12
  4. sempy_labs/_authentication.py +0 -2
  5. sempy_labs/_capacities.py +120 -142
  6. sempy_labs/_capacity_migration.py +61 -94
  7. sempy_labs/_clear_cache.py +9 -8
  8. sempy_labs/_connections.py +72 -105
  9. sempy_labs/_data_pipelines.py +47 -49
  10. sempy_labs/_dataflows.py +45 -51
  11. sempy_labs/_dax.py +228 -6
  12. sempy_labs/_delta_analyzer.py +303 -0
  13. sempy_labs/_deployment_pipelines.py +72 -66
  14. sempy_labs/_environments.py +39 -36
  15. sempy_labs/_eventhouses.py +35 -35
  16. sempy_labs/_eventstreams.py +38 -39
  17. sempy_labs/_external_data_shares.py +29 -42
  18. sempy_labs/_gateways.py +57 -101
  19. sempy_labs/_generate_semantic_model.py +22 -30
  20. sempy_labs/_git.py +46 -66
  21. sempy_labs/_graphQL.py +95 -0
  22. sempy_labs/_helper_functions.py +175 -30
  23. sempy_labs/_job_scheduler.py +47 -59
  24. sempy_labs/_kql_databases.py +27 -34
  25. sempy_labs/_kql_querysets.py +23 -30
  26. sempy_labs/_list_functions.py +262 -164
  27. sempy_labs/_managed_private_endpoints.py +52 -47
  28. sempy_labs/_mirrored_databases.py +110 -134
  29. sempy_labs/_mirrored_warehouses.py +13 -13
  30. sempy_labs/_ml_experiments.py +36 -36
  31. sempy_labs/_ml_models.py +37 -38
  32. sempy_labs/_model_dependencies.py +2 -0
  33. sempy_labs/_notebooks.py +28 -29
  34. sempy_labs/_one_lake_integration.py +2 -0
  35. sempy_labs/_query_scale_out.py +63 -81
  36. sempy_labs/_refresh_semantic_model.py +12 -14
  37. sempy_labs/_spark.py +54 -79
  38. sempy_labs/_sql.py +7 -11
  39. sempy_labs/_vertipaq.py +8 -3
  40. sempy_labs/_warehouses.py +30 -33
  41. sempy_labs/_workloads.py +15 -20
  42. sempy_labs/_workspace_identity.py +13 -17
  43. sempy_labs/_workspaces.py +49 -48
  44. sempy_labs/admin/__init__.py +2 -0
  45. sempy_labs/admin/_basic_functions.py +244 -281
  46. sempy_labs/admin/_domains.py +188 -103
  47. sempy_labs/admin/_external_data_share.py +26 -31
  48. sempy_labs/admin/_git.py +17 -22
  49. sempy_labs/admin/_items.py +34 -48
  50. sempy_labs/admin/_scanner.py +20 -13
  51. sempy_labs/directlake/_directlake_schema_compare.py +2 -0
  52. sempy_labs/directlake/_dl_helper.py +10 -11
  53. sempy_labs/directlake/_generate_shared_expression.py +4 -5
  54. sempy_labs/directlake/_get_directlake_lakehouse.py +1 -0
  55. sempy_labs/directlake/_list_directlake_model_calc_tables.py +1 -0
  56. sempy_labs/directlake/_show_unsupported_directlake_objects.py +2 -0
  57. sempy_labs/directlake/_warm_cache.py +2 -0
  58. sempy_labs/graph/__init__.py +33 -0
  59. sempy_labs/graph/_groups.py +402 -0
  60. sempy_labs/graph/_teams.py +113 -0
  61. sempy_labs/graph/_users.py +191 -0
  62. sempy_labs/lakehouse/__init__.py +4 -0
  63. sempy_labs/lakehouse/_get_lakehouse_columns.py +10 -10
  64. sempy_labs/lakehouse/_get_lakehouse_tables.py +14 -20
  65. sempy_labs/lakehouse/_lakehouse.py +101 -4
  66. sempy_labs/lakehouse/_shortcuts.py +42 -20
  67. sempy_labs/migration/__init__.py +4 -0
  68. sempy_labs/migration/_direct_lake_to_import.py +66 -0
  69. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +1 -0
  70. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +1 -0
  71. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +1 -0
  72. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +2 -0
  73. sempy_labs/report/_download_report.py +8 -13
  74. sempy_labs/report/_generate_report.py +49 -46
  75. sempy_labs/report/_paginated.py +20 -26
  76. sempy_labs/report/_report_functions.py +50 -45
  77. sempy_labs/report/_report_list_functions.py +2 -0
  78. sempy_labs/report/_report_rebind.py +6 -10
  79. sempy_labs/report/_reportwrapper.py +187 -220
  80. sempy_labs/tom/_model.py +8 -5
  81. {semantic_link_labs-0.9.0.dist-info → semantic_link_labs-0.9.2.dist-info}/LICENSE +0 -0
  82. {semantic_link_labs-0.9.0.dist-info → semantic_link_labs-0.9.2.dist-info}/WHEEL +0 -0
  83. {semantic_link_labs-0.9.0.dist-info → semantic_link_labs-0.9.2.dist-info}/top_level.txt +0 -0
sempy_labs/_graphQL.py ADDED
@@ -0,0 +1,95 @@
1
+ import pandas as pd
2
+ from uuid import UUID
3
+ from typing import Optional
4
+ from sempy_labs._helper_functions import (
5
+ _base_api,
6
+ _create_dataframe,
7
+ resolve_workspace_name_and_id,
8
+ _print_success,
9
+ )
10
+
11
+
12
+ def list_graphql_apis(workspace: Optional[str | UUID]) -> pd.DataFrame:
13
+ """
14
+ Shows the Graph QL APIs within a workspace.
15
+
16
+ This is a wrapper function for the following API: `Items - List GraphQLApis <https://learn.microsoft.com/rest/api/fabric/graphqlapi/items/list-graphqlapi-s>`_.
17
+
18
+ Parameters
19
+ ----------
20
+ workspace : str | uuid.UUID, default=None
21
+ The Fabric workspace name or ID.
22
+ Defaults to None which resolves to the workspace of the attached lakehouse
23
+ or if no lakehouse attached, resolves to the workspace of the notebook.
24
+
25
+ Returns
26
+ -------
27
+ pandas.DataFrame
28
+ A pandas dataframe showing the GraphQL APIs within a workspace.
29
+ """
30
+
31
+ columns = {
32
+ "GraphQL API Name": "string",
33
+ "GraphQL API Id": "string",
34
+ "Description": "string",
35
+ }
36
+ df = _create_dataframe(columns=columns)
37
+
38
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
39
+
40
+ responses = _base_api(
41
+ request=f"/v1/workspaces/{workspace_id}/GraphQLApis", uses_pagination=True
42
+ )
43
+
44
+ for r in responses:
45
+ for v in r.get("value", []):
46
+ new_data = {
47
+ "GraphQL API Name": v.get("displayName"),
48
+ "GraphQL API Id": v.get("id"),
49
+ "Description": v.get("description"),
50
+ }
51
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
52
+
53
+ return df
54
+
55
+
56
+ def create_graphql_api(
57
+ name: str, description: Optional[str] = None, workspace: Optional[str | UUID] = None
58
+ ):
59
+ """
60
+ Creates a GraphQL API.
61
+
62
+ This is a wrapper function for the following API: `Items - Create GraphQLApi <https://learn.microsoft.com/rest/api/fabric/graphqlapi/items/create-graphqlapi>`_.
63
+
64
+ Parameters
65
+ ----------
66
+ name: str
67
+ Name of the GraphQL API.
68
+ description : str, default=None
69
+ A description of the GraphQL API.
70
+ workspace : str | uuid.UUID, default=None
71
+ The Fabric workspace name or ID.
72
+ Defaults to None which resolves to the workspace of the attached lakehouse
73
+ or if no lakehouse attached, resolves to the workspace of the notebook.
74
+ """
75
+
76
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
77
+
78
+ payload = {"displayName": name}
79
+
80
+ if description:
81
+ payload["description"] = description
82
+
83
+ _base_api(
84
+ request=f"/v1/workspaces/{workspace_id}/GraphQLApis",
85
+ method="post",
86
+ status_codes=[201, 202],
87
+ payload=payload,
88
+ lro_return_status_code=True,
89
+ )
90
+ _print_success(
91
+ item_name=name,
92
+ item_type="GraphQL API",
93
+ workspace_name=workspace_name,
94
+ action="created",
95
+ )
@@ -15,6 +15,8 @@ from azure.core.credentials import TokenCredential, AccessToken
15
15
  import urllib.parse
16
16
  import numpy as np
17
17
  from IPython.display import display, HTML
18
+ import requests
19
+ import sempy_labs._authentication as auth
18
20
 
19
21
 
20
22
  def _build_url(url: str, params: dict) -> str:
@@ -36,9 +38,9 @@ def create_abfss_path(
36
38
 
37
39
  Parameters
38
40
  ----------
39
- lakehouse_id : UUID
41
+ lakehouse_id : uuid.UUID
40
42
  ID of the Fabric lakehouse.
41
- lakehouse_workspace_id : UUID
43
+ lakehouse_workspace_id : uuid.UUID
42
44
  ID of the Fabric workspace.
43
45
  delta_table_name : str
44
46
  Name of the delta table name.
@@ -49,7 +51,9 @@ def create_abfss_path(
49
51
  An abfss path which can be used to save/reference a delta table in a Fabric lakehouse.
50
52
  """
51
53
 
52
- return f"abfss://{lakehouse_workspace_id}@onelake.dfs.fabric.microsoft.com/{lakehouse_id}/Tables/{delta_table_name}"
54
+ fp = _get_default_file_path()
55
+
56
+ return f"abfss://{lakehouse_workspace_id}@{fp}/{lakehouse_id}/Tables/{delta_table_name}"
53
57
 
54
58
 
55
59
  def _get_default_file_path() -> str:
@@ -166,6 +170,16 @@ def resolve_report_name(report_id: UUID, workspace: Optional[str | UUID] = None)
166
170
  )
167
171
 
168
172
 
173
+ def resolve_item_id(
174
+ item: str | UUID, type: str, workspace: Optional[str] = None
175
+ ) -> UUID:
176
+
177
+ if _is_valid_uuid(item):
178
+ return item
179
+ else:
180
+ return fabric.resolve_item_id(item_name=item, type=type, workspace=workspace)
181
+
182
+
169
183
  def resolve_item_name_and_id(
170
184
  item: str | UUID, type: Optional[str] = None, workspace: Optional[str | UUID] = None
171
185
  ) -> Tuple[str, UUID]:
@@ -244,7 +258,7 @@ def resolve_dataset_id(
244
258
 
245
259
  Returns
246
260
  -------
247
- UUID
261
+ uuid.UUID
248
262
  The ID of the semantic model.
249
263
  """
250
264
 
@@ -719,7 +733,7 @@ def resolve_item_type(item_id: UUID, workspace: Optional[str | UUID] = None) ->
719
733
 
720
734
  if dfI_filt.empty:
721
735
  raise ValueError(
722
- f"Invalid 'item_id' parameter. The '{item_id}' item was not found in the '{workspace_name}' workspace."
736
+ f"{icons.red_dot} Invalid 'item_id' parameter. The '{item_id}' item was not found in the '{workspace_name}' workspace."
723
737
  )
724
738
  return dfI_filt["Type"].iloc[0]
725
739
 
@@ -780,7 +794,7 @@ def resolve_workspace_capacity(
780
794
 
781
795
  Returns
782
796
  -------
783
- Tuple[UUID, str]
797
+ Tuple[uuid.UUID, str]
784
798
  capacity Id; capacity came.
785
799
  """
786
800
 
@@ -811,7 +825,7 @@ def get_capacity_id(workspace: Optional[str | UUID] = None) -> UUID:
811
825
 
812
826
  Returns
813
827
  -------
814
- UUID
828
+ uuid.UUID
815
829
  The capacity Id.
816
830
  """
817
831
 
@@ -849,7 +863,7 @@ def get_capacity_name(workspace: Optional[str | UUID] = None) -> str:
849
863
  capacity_id = get_capacity_id(workspace)
850
864
  dfC = fabric.list_capacities()
851
865
  dfC_filt = dfC[dfC["Id"] == capacity_id]
852
- if len(dfC_filt) == 0:
866
+ if dfC_filt.empty:
853
867
  raise ValueError(
854
868
  f"{icons.red_dot} The '{capacity_id}' capacity Id does not exist."
855
869
  )
@@ -880,7 +894,7 @@ def resolve_capacity_name(capacity_id: Optional[UUID] = None) -> str:
880
894
  dfC = fabric.list_capacities()
881
895
  dfC_filt = dfC[dfC["Id"] == capacity_id]
882
896
 
883
- if len(dfC_filt) == 0:
897
+ if dfC_filt.empty:
884
898
  raise ValueError(
885
899
  f"{icons.red_dot} The '{capacity_id}' capacity Id does not exist."
886
900
  )
@@ -901,7 +915,7 @@ def resolve_capacity_id(capacity_name: Optional[str] = None) -> UUID:
901
915
 
902
916
  Returns
903
917
  -------
904
- UUID
918
+ uuid.UUID
905
919
  The capacity Id.
906
920
  """
907
921
 
@@ -911,7 +925,7 @@ def resolve_capacity_id(capacity_name: Optional[str] = None) -> UUID:
911
925
  dfC = fabric.list_capacities()
912
926
  dfC_filt = dfC[dfC["Display Name"] == capacity_name]
913
927
 
914
- if len(dfC_filt) == 0:
928
+ if dfC_filt.empty:
915
929
  raise ValueError(
916
930
  f"{icons.red_dot} The '{capacity_name}' capacity does not exist."
917
931
  )
@@ -994,14 +1008,14 @@ def pagination(client, response):
994
1008
  return responses
995
1009
 
996
1010
 
997
- def resolve_deployment_pipeline_id(deployment_pipeline: str) -> UUID:
1011
+ def resolve_deployment_pipeline_id(deployment_pipeline: str | UUID) -> UUID:
998
1012
  """
999
1013
  Obtains the Id for a given deployment pipeline.
1000
1014
 
1001
1015
  Parameters
1002
1016
  ----------
1003
- deployment_pipeline : str
1004
- The deployment pipeline name
1017
+ deployment_pipeline : str | uuid.UUID
1018
+ The deployment pipeline name or ID.
1005
1019
 
1006
1020
  Returns
1007
1021
  -------
@@ -1011,15 +1025,17 @@ def resolve_deployment_pipeline_id(deployment_pipeline: str) -> UUID:
1011
1025
 
1012
1026
  from sempy_labs._deployment_pipelines import list_deployment_pipelines
1013
1027
 
1014
- dfP = list_deployment_pipelines()
1015
- dfP_filt = dfP[dfP["Deployment Pipeline Name"] == deployment_pipeline]
1016
- if len(dfP_filt) == 0:
1017
- raise ValueError(
1018
- f"{icons.red_dot} The '{deployment_pipeline}' deployment pipeline is not valid."
1019
- )
1020
- deployment_pipeline_id = dfP_filt["Deployment Pipeline Id"].iloc[0]
1028
+ if _is_valid_uuid(deployment_pipeline):
1029
+ return deployment_pipeline
1030
+ else:
1021
1031
 
1022
- return deployment_pipeline_id
1032
+ dfP = list_deployment_pipelines()
1033
+ dfP_filt = dfP[dfP["Deployment Pipeline Name"] == deployment_pipeline]
1034
+ if len(dfP_filt) == 0:
1035
+ raise ValueError(
1036
+ f"{icons.red_dot} The '{deployment_pipeline}' deployment pipeline is not valid."
1037
+ )
1038
+ return dfP_filt["Deployment Pipeline Id"].iloc[0]
1023
1039
 
1024
1040
 
1025
1041
  class FabricTokenCredential(TokenCredential):
@@ -1209,7 +1225,7 @@ def resolve_notebook_id(notebook: str, workspace: Optional[str | UUID] = None) -
1209
1225
 
1210
1226
  Returns
1211
1227
  -------
1212
- UUID
1228
+ uuid.UUID
1213
1229
  The notebook Id.
1214
1230
  """
1215
1231
 
@@ -1224,23 +1240,34 @@ def generate_guid():
1224
1240
 
1225
1241
 
1226
1242
  def _get_column_aggregate(
1227
- lakehouse: str,
1228
1243
  table_name: str,
1229
1244
  column_name: str = "RunId",
1245
+ lakehouse: Optional[str | UUID] = None,
1246
+ workspace: Optional[str | UUID] = None,
1230
1247
  function: str = "max",
1231
1248
  default_value: int = 0,
1232
1249
  ) -> int:
1233
1250
 
1234
1251
  from pyspark.sql import SparkSession
1252
+ from pyspark.sql.functions import approx_count_distinct
1253
+ from pyspark.sql import functions as F
1235
1254
 
1236
- spark = SparkSession.builder.getOrCreate()
1237
1255
  function = function.upper()
1238
- query = f"SELECT {function}({column_name}) FROM {lakehouse}.{table_name}"
1239
- if "COUNT" in function and "DISTINCT" in function:
1240
- query = f"SELECT COUNT(DISTINCT({column_name})) FROM {lakehouse}.{table_name}"
1241
- dfSpark = spark.sql(query)
1256
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
1257
+ lakehouse_id = resolve_lakehouse_id(lakehouse, workspace)
1258
+ path = create_abfss_path(lakehouse_id, workspace_id, table_name)
1259
+
1260
+ spark = SparkSession.builder.getOrCreate()
1261
+ df = spark.read.format("delta").load(path)
1262
+
1263
+ if function in {"COUNTDISTINCT", "DISTINCTCOUNT"}:
1264
+ result = df.select(F.count_distinct(F.col(column_name)))
1265
+ elif "APPROX" in function:
1266
+ result = df.select(approx_count_distinct(column_name))
1267
+ else:
1268
+ result = df.selectExpr(f"{function}({column_name})")
1242
1269
 
1243
- return dfSpark.collect()[0][0] or default_value
1270
+ return result.collect()[0][0] or default_value
1244
1271
 
1245
1272
 
1246
1273
  def _make_list_unique(my_list):
@@ -1446,3 +1473,121 @@ def _get_fabric_context_setting(name: str):
1446
1473
  def get_tenant_id():
1447
1474
 
1448
1475
  _get_fabric_context_setting(name="trident.tenant.id")
1476
+
1477
+
1478
+ def _base_api(
1479
+ request: str,
1480
+ client: str = "fabric",
1481
+ method: str = "get",
1482
+ payload: Optional[str] = None,
1483
+ status_codes: Optional[int] = 200,
1484
+ uses_pagination: bool = False,
1485
+ lro_return_json: bool = False,
1486
+ lro_return_status_code: bool = False,
1487
+ ):
1488
+
1489
+ from sempy_labs._authentication import _get_headers
1490
+
1491
+ if (lro_return_json or lro_return_status_code) and status_codes is None:
1492
+ status_codes = [200, 202]
1493
+
1494
+ if isinstance(status_codes, int):
1495
+ status_codes = [status_codes]
1496
+
1497
+ if client == "fabric":
1498
+ c = fabric.FabricRestClient()
1499
+ elif client == "fabric_sp":
1500
+ c = fabric.FabricRestClient(token_provider=auth.token_provider.get())
1501
+ elif client in ["azure", "graph"]:
1502
+ pass
1503
+ else:
1504
+ raise ValueError(f"{icons.red_dot} The '{client}' client is not supported.")
1505
+
1506
+ if client not in ["azure", "graph"]:
1507
+ if method == "get":
1508
+ response = c.get(request)
1509
+ elif method == "delete":
1510
+ response = c.delete(request)
1511
+ elif method == "post":
1512
+ response = c.post(request, json=payload)
1513
+ elif method == "patch":
1514
+ response = c.patch(request, json=payload)
1515
+ elif method == "put":
1516
+ response = c.put(request, json=payload)
1517
+ else:
1518
+ raise NotImplementedError
1519
+ else:
1520
+ headers = _get_headers(auth.token_provider.get(), audience=client)
1521
+ response = requests.request(
1522
+ method.upper(),
1523
+ f"https://graph.microsoft.com/v1.0/{request}",
1524
+ headers=headers,
1525
+ json=payload,
1526
+ )
1527
+
1528
+ if lro_return_json:
1529
+ return lro(c, response, status_codes).json()
1530
+ elif lro_return_status_code:
1531
+ return lro(c, response, status_codes, return_status_code=True)
1532
+ else:
1533
+ if response.status_code not in status_codes:
1534
+ raise FabricHTTPException(response)
1535
+ if uses_pagination:
1536
+ responses = pagination(c, response)
1537
+ return responses
1538
+ else:
1539
+ return response
1540
+
1541
+
1542
+ def _create_dataframe(columns: dict) -> pd.DataFrame:
1543
+
1544
+ return pd.DataFrame(columns=list(columns.keys()))
1545
+
1546
+
1547
+ def _update_dataframe_datatypes(dataframe: pd.DataFrame, column_map: dict):
1548
+ """
1549
+ Updates the datatypes of columns in a pandas dataframe based on a column map.
1550
+
1551
+ Example:
1552
+ {
1553
+ "Order": "int",
1554
+ "Public": "bool",
1555
+ }
1556
+ """
1557
+
1558
+ for column, data_type in column_map.items():
1559
+ if column in dataframe.columns:
1560
+ if data_type == "int":
1561
+ dataframe[column] = dataframe[column].astype(int)
1562
+ elif data_type == "bool":
1563
+ dataframe[column] = dataframe[column].astype(bool)
1564
+ elif data_type == "float":
1565
+ dataframe[column] = dataframe[column].astype(float)
1566
+ elif data_type == "datetime":
1567
+ dataframe[column] = pd.to_datetime(dataframe[column])
1568
+ # This is for a special case in admin.list_reports where datetime itself does not work. Coerce fixes the issue.
1569
+ elif data_type == "datetime_coerce":
1570
+ dataframe[column] = pd.to_datetime(dataframe[column], errors="coerce")
1571
+ # This is for list_synonyms since the weight column is float and can have NaN values.
1572
+ elif data_type == "float_fillna":
1573
+ dataframe[column] = dataframe[column].fillna(0).astype(float)
1574
+ # This is to avoid NaN values in integer columns (for delta analyzer)
1575
+ elif data_type == "int_fillna":
1576
+ dataframe[column] = dataframe[column].fillna(0).astype(int)
1577
+ elif data_type in ["str", "string"]:
1578
+ dataframe[column] = dataframe[column].astype(str)
1579
+ else:
1580
+ raise NotImplementedError
1581
+
1582
+
1583
+ def _print_success(item_name, item_type, workspace_name, action="created"):
1584
+ if action == "created":
1585
+ print(
1586
+ f"{icons.green_dot} The '{item_name}' {item_type} has been successfully created in the '{workspace_name}' workspace."
1587
+ )
1588
+ elif action == "deleted":
1589
+ print(
1590
+ f"{icons.green_dot} The '{item_name}' {item_type} has been successfully deleted from the '{workspace_name}' workspace."
1591
+ )
1592
+ else:
1593
+ raise NotImplementedError
@@ -1,14 +1,13 @@
1
- import sempy.fabric as fabric
2
1
  from sempy._utils._log import log
3
2
  import pandas as pd
4
3
  from typing import Optional
5
4
  from sempy_labs._helper_functions import (
6
5
  resolve_workspace_name_and_id,
7
6
  resolve_item_name_and_id,
8
- pagination,
9
- lro,
7
+ _update_dataframe_datatypes,
8
+ _base_api,
9
+ _create_dataframe,
10
10
  )
11
- from sempy.fabric.exceptions import FabricHTTPException
12
11
  from uuid import UUID
13
12
  import sempy_labs._icons as icons
14
13
 
@@ -44,32 +43,26 @@ def list_item_job_instances(
44
43
  item=item, type=type, workspace=workspace
45
44
  )
46
45
 
47
- client = fabric.FabricRestClient()
48
- response = client.get(
49
- f"v1/workspaces/{workspace_id}/items/{item_id}/jobs/instances"
46
+ columns = {
47
+ "Job Instance Id": "string",
48
+ "Item Name": "string",
49
+ "Item Id": "string",
50
+ "Item Type": "string",
51
+ "Job Type": "string",
52
+ "Invoke Type": "string",
53
+ "Status": "string",
54
+ "Root Activity Id": "string",
55
+ "Start Time UTC": "datetime",
56
+ "End Time UTC": "string",
57
+ "Error Message": "string",
58
+ }
59
+ df = _create_dataframe(columns=columns)
60
+
61
+ responses = _base_api(
62
+ request=f"v1/workspaces/{workspace_id}/items/{item_id}/jobs/instances",
63
+ uses_pagination=True,
50
64
  )
51
65
 
52
- if response.status_code != 200:
53
- raise FabricHTTPException(response)
54
-
55
- df = pd.DataFrame(
56
- columns=[
57
- "Job Instance Id",
58
- "Item Name",
59
- "Item Id",
60
- "Item Type",
61
- "Job Type",
62
- "Invoke Type",
63
- "Status",
64
- "Root Activity Id",
65
- "Start Time UTC",
66
- "End Time UTC",
67
- "Failure Reason",
68
- ]
69
- )
70
-
71
- responses = pagination(client, response)
72
-
73
66
  if not responses[0].get("value"):
74
67
  return df
75
68
 
@@ -95,6 +88,8 @@ def list_item_job_instances(
95
88
  if dfs:
96
89
  df = pd.concat(dfs, ignore_index=True)
97
90
 
91
+ df = _update_dataframe_datatypes(dataframe=df, column_map=columns)
92
+
98
93
  return df
99
94
 
100
95
 
@@ -134,31 +129,26 @@ def list_item_schedules(
134
129
  item=item, type=type, workspace=workspace
135
130
  )
136
131
 
137
- df = pd.DataFrame(
138
- columns=[
139
- "Job Schedule Id",
140
- "Enabled",
141
- "Created Date Time",
142
- "Start Date Time",
143
- "End Date Time",
144
- "Local Time Zone Id",
145
- "Type",
146
- "Interval",
147
- "Weekdays",
148
- "Times",
149
- "Owner Id",
150
- "Owner Type",
151
- ]
132
+ columns = {
133
+ "Job Schedule Id": "string",
134
+ "Enabled": "bool",
135
+ "Created Date Time": "datetime",
136
+ "Start Date Time": "datetime",
137
+ "End Date Time": "string",
138
+ "Local Time Zone Id": "string",
139
+ "Type": "string",
140
+ "Interval": "string",
141
+ "Weekdays": "string",
142
+ "Times": "string",
143
+ "Owner Id": "string",
144
+ "Owner Type": "string",
145
+ }
146
+ df = _create_dataframe(columns=columns)
147
+
148
+ response = _base_api(
149
+ request=f"v1/workspaces/{workspace_id}/items/{item_id}/jobs/{job_type}/schedules"
152
150
  )
153
151
 
154
- client = fabric.FabricRestClient()
155
- response = client.get(
156
- f"v1/workspaces/{workspace_id}/items/{item_id}/jobs/{job_type}/schedules"
157
- )
158
-
159
- if response.status_code != 200:
160
- raise FabricHTTPException(response)
161
-
162
152
  for v in response.json().get("value", []):
163
153
  config = v.get("configuration", {})
164
154
  own = v.get("owner", {})
@@ -179,9 +169,7 @@ def list_item_schedules(
179
169
 
180
170
  df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
181
171
 
182
- df["Enabled"] = df["Enabled"].astype(bool)
183
- df["Created Date Time"] = pd.to_datetime(df["Created Date Time"])
184
- df["Start Date Time"] = pd.to_datetime(df["Start Date Time"])
172
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
185
173
 
186
174
  return df
187
175
 
@@ -217,11 +205,11 @@ def run_on_demand_item_job(
217
205
  item=item, type=type, workspace=workspace
218
206
  )
219
207
 
220
- client = fabric.FabricRestClient()
221
- response = client.post(
222
- f"v1/workspaces/{workspace_id}/items/{item_id}/jobs/instances?jobType={job_type}"
208
+ _base_api(
209
+ request=f"v1/workspaces/{workspace_id}/items/{item_id}/jobs/instances?jobType={job_type}",
210
+ method="post",
211
+ lro_return_status_code=True,
212
+ status_codes=202,
223
213
  )
224
214
 
225
- lro(client, response, return_status_code=True)
226
-
227
215
  print(f"{icons.green_dot} The '{item_name}' {type.lower()} has been executed.")
@@ -4,10 +4,9 @@ import sempy_labs._icons as icons
4
4
  from typing import Optional
5
5
  from sempy_labs._helper_functions import (
6
6
  resolve_workspace_name_and_id,
7
- lro,
8
- pagination,
7
+ _base_api,
8
+ _create_dataframe,
9
9
  )
10
- from sempy.fabric.exceptions import FabricHTTPException
11
10
  from uuid import UUID
12
11
 
13
12
 
@@ -30,26 +29,22 @@ def list_kql_databases(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
30
29
  A pandas dataframe showing the KQL databases within a workspace.
31
30
  """
32
31
 
33
- df = pd.DataFrame(
34
- columns=[
35
- "KQL Database Name",
36
- "KQL Database Id",
37
- "Description",
38
- "Parent Eventhouse Item Id",
39
- "Query Service URI",
40
- "Ingestion Service URI",
41
- "Database Type",
42
- ]
43
- )
32
+ columns = {
33
+ "KQL Database Name": "string",
34
+ "KQL Database Id": "string",
35
+ "Description": "string",
36
+ "Parent Eventhouse Item Id": "string",
37
+ "Query Service URI": "string",
38
+ "Ingestion Service URI": "string",
39
+ "Database Type": "string",
40
+ }
41
+ df = _create_dataframe(columns=columns)
44
42
 
45
43
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
46
44
 
47
- client = fabric.FabricRestClient()
48
- response = client.get(f"/v1/workspaces/{workspace_id}/kqlDatabases")
49
- if response.status_code != 200:
50
- raise FabricHTTPException(response)
51
-
52
- responses = pagination(client, response)
45
+ responses = _base_api(
46
+ request=f"v1/workspaces/{workspace_id}/kqlDatabases", uses_pagination=True
47
+ )
53
48
 
54
49
  for r in responses:
55
50
  for v in r.get("value", []):
@@ -91,18 +86,19 @@ def create_kql_database(
91
86
 
92
87
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
93
88
 
94
- request_body = {"displayName": name}
89
+ payload = {"displayName": name}
95
90
 
96
91
  if description:
97
- request_body["description"] = description
98
-
99
- client = fabric.FabricRestClient()
100
- response = client.post(
101
- f"/v1/workspaces/{workspace_id}/kqlDatabases", json=request_body
92
+ payload["description"] = description
93
+
94
+ _base_api(
95
+ request=f"v1/workspaces/{workspace_id}/kqlDatabases",
96
+ method="post",
97
+ payload=payload,
98
+ status_codes=[201, 202],
99
+ lro_return_status_code=True,
102
100
  )
103
101
 
104
- lro(client, response, status_codes=[201, 202])
105
-
106
102
  print(
107
103
  f"{icons.green_dot} The '{name}' KQL database has been created within the '{workspace_name}' workspace."
108
104
  )
@@ -129,13 +125,10 @@ def delete_kql_database(name: str, workspace: Optional[str | UUID] = None):
129
125
  item_name=name, type="KQLDatabase", workspace=workspace_id
130
126
  )
131
127
 
132
- client = fabric.FabricRestClient()
133
- response = client.delete(
134
- f"/v1/workspaces/{workspace_id}/kqlDatabases/{kql_database_id}"
128
+ _base_api(
129
+ request=f"/v1/workspaces/{workspace_id}/kqlDatabases/{kql_database_id}",
130
+ method="delete",
135
131
  )
136
-
137
- if response.status_code != 200:
138
- raise FabricHTTPException(response)
139
132
  print(
140
133
  f"{icons.green_dot} The '{name}' KQL database within the '{workspace_name}' workspace has been deleted."
141
134
  )