msfabricpysdkcore 0.0.9__py3-none-any.whl → 0.0.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- msfabricpysdkcore/admin_item.py +7 -0
- msfabricpysdkcore/admin_workspace.py +20 -1
- msfabricpysdkcore/adminapi.py +133 -7
- msfabricpysdkcore/auth.py +9 -6
- msfabricpysdkcore/client.py +5 -4
- msfabricpysdkcore/coreapi.py +341 -17
- msfabricpysdkcore/deployment_pipeline.py +240 -0
- msfabricpysdkcore/environment.py +209 -0
- msfabricpysdkcore/item.py +12 -11
- msfabricpysdkcore/lakehouse.py +42 -1
- msfabricpysdkcore/long_running_operation.py +2 -6
- msfabricpysdkcore/otheritems.py +122 -3
- msfabricpysdkcore/spark_custom_pool.py +118 -0
- msfabricpysdkcore/tests/test_admin_apis.py +20 -9
- msfabricpysdkcore/tests/test_datapipelines.py +48 -0
- msfabricpysdkcore/tests/test_deployment_pipeline.py +64 -0
- msfabricpysdkcore/tests/test_domains.py +3 -2
- msfabricpysdkcore/tests/test_environments.py +65 -0
- msfabricpysdkcore/tests/test_evenstreams.py +44 -0
- msfabricpysdkcore/tests/test_git.py +3 -1
- msfabricpysdkcore/tests/test_items_incl_lakehouse.py +81 -109
- msfabricpysdkcore/tests/test_jobs.py +4 -0
- msfabricpysdkcore/tests/test_kqldatabases.py +48 -0
- msfabricpysdkcore/tests/test_shortcuts.py +3 -1
- msfabricpysdkcore/tests/test_spark.py +91 -0
- msfabricpysdkcore/tests/test_workspaces_capacities.py +6 -5
- msfabricpysdkcore/workspace.py +358 -32
- {msfabricpysdkcore-0.0.9.dist-info → msfabricpysdkcore-0.0.11.dist-info}/METADATA +82 -32
- msfabricpysdkcore-0.0.11.dist-info/RECORD +38 -0
- msfabricpysdkcore-0.0.9.dist-info/RECORD +0 -29
- {msfabricpysdkcore-0.0.9.dist-info → msfabricpysdkcore-0.0.11.dist-info}/LICENSE +0 -0
- {msfabricpysdkcore-0.0.9.dist-info → msfabricpysdkcore-0.0.11.dist-info}/WHEEL +0 -0
- {msfabricpysdkcore-0.0.9.dist-info → msfabricpysdkcore-0.0.11.dist-info}/top_level.txt +0 -0
msfabricpysdkcore/workspace.py
CHANGED
@@ -3,9 +3,11 @@ import requests
|
|
3
3
|
from time import sleep
|
4
4
|
from msfabricpysdkcore.item import Item
|
5
5
|
from msfabricpysdkcore.lakehouse import Lakehouse
|
6
|
+
from msfabricpysdkcore.environment import Environment
|
6
7
|
from msfabricpysdkcore.long_running_operation import check_long_running_operation
|
7
8
|
from msfabricpysdkcore.otheritems import DataPipeline, Eventstream, KQLDatabase, KQLQueryset, SparkJobDefinition
|
8
|
-
from msfabricpysdkcore.otheritems import MLExperiment, MLModel, Notebook, Report, SemanticModel, Warehouse
|
9
|
+
from msfabricpysdkcore.otheritems import Eventhouse, MLExperiment, MLModel, Notebook, Report, SemanticModel, Warehouse
|
10
|
+
from msfabricpysdkcore.spark_custom_pool import SparkCustomPool
|
9
11
|
|
10
12
|
|
11
13
|
class Workspace:
|
@@ -138,9 +140,7 @@ class Workspace:
|
|
138
140
|
sleep(10)
|
139
141
|
continue
|
140
142
|
if response.status_code not in (200, 429):
|
141
|
-
|
142
|
-
print(response.text)
|
143
|
-
raise Exception(f"Error updating workspace: {response.text}")
|
143
|
+
raise Exception(f"Error updating workspace: {response.status_code}, {response.text}")
|
144
144
|
break
|
145
145
|
|
146
146
|
assert response.status_code == 200
|
@@ -227,6 +227,8 @@ class Workspace:
|
|
227
227
|
return self.get_data_pipeline(item_dict["id"])
|
228
228
|
if item_dict["type"] == "Eventstream":
|
229
229
|
return self.get_eventstream(item_dict["id"])
|
230
|
+
if item_dict["type"] == "Eventhouse":
|
231
|
+
return self.get_eventhouse(item_dict["id"])
|
230
232
|
if item_dict["type"] == "KQLDatabase":
|
231
233
|
return self.get_kql_database(item_dict["id"])
|
232
234
|
if item_dict["type"] == "KQLQueryset":
|
@@ -247,11 +249,13 @@ class Workspace:
|
|
247
249
|
return self.get_spark_job_definition(item_dict["id"])
|
248
250
|
if item_dict["type"] == "Warehouse":
|
249
251
|
return self.get_warehouse(item_dict["id"])
|
252
|
+
if item_dict["type"] == "Environment":
|
253
|
+
return self.get_environment(item_dict["id"])
|
250
254
|
|
251
255
|
item_obj = Item.from_dict(item_dict, auth=self.auth)
|
252
256
|
return item_obj
|
253
257
|
|
254
|
-
def create_item(self, display_name, type, definition = None, description = None):
|
258
|
+
def create_item(self, display_name, type, definition = None, description = None, **kwargs):
|
255
259
|
"""Create an item in a workspace"""
|
256
260
|
|
257
261
|
url = f"https://api.fabric.microsoft.com/v1/workspaces/{self.id}/items"
|
@@ -265,8 +269,25 @@ class Workspace:
|
|
265
269
|
if description:
|
266
270
|
body['description'] = description
|
267
271
|
|
268
|
-
if type in ["
|
269
|
-
"
|
272
|
+
if type in ["dataPipelines",
|
273
|
+
"environments",
|
274
|
+
"eventhouses",
|
275
|
+
"eventstreams",
|
276
|
+
"kqlDatabases",
|
277
|
+
"lakehouses",
|
278
|
+
"mlExperiments",
|
279
|
+
"mlModels",
|
280
|
+
"notebooks",
|
281
|
+
"reports",
|
282
|
+
"semanticModels",
|
283
|
+
"sparkJobDefinitions",
|
284
|
+
"warehouses"]:
|
285
|
+
|
286
|
+
if type == "kqlDatabases":
|
287
|
+
if "creation_payload" not in kwargs:
|
288
|
+
raise Exception("creation_payload is required for KQLDatabase")
|
289
|
+
body["creationPayload"] = kwargs["creation_payload"]
|
290
|
+
|
270
291
|
url = f"https://api.fabric.microsoft.com/v1/workspaces/{self.id}/{type}"
|
271
292
|
body.pop('type')
|
272
293
|
|
@@ -291,7 +312,11 @@ class Workspace:
|
|
291
312
|
item = None
|
292
313
|
i = 0
|
293
314
|
|
294
|
-
type_mapping = {"
|
315
|
+
type_mapping = {"dataPipelines": "DataPipeline",
|
316
|
+
"environments": "Environment",
|
317
|
+
"eventhouses": "Eventhouse",
|
318
|
+
"eventstreams": "Eventstream",
|
319
|
+
"kqlDatabases": "KQLDatabase",
|
295
320
|
"lakehouses": "Lakehouse",
|
296
321
|
"mlExperiments": "MLExperiment",
|
297
322
|
"mlModels": "MLModel",
|
@@ -299,7 +324,8 @@ class Workspace:
|
|
299
324
|
"reports": "Report",
|
300
325
|
"semanticModels": "SemanticModel",
|
301
326
|
"sparkJobDefinitions": "SparkJobDefinition",
|
302
|
-
"warehouses": "Warehouse"
|
327
|
+
"warehouses": "Warehouse"
|
328
|
+
}
|
303
329
|
|
304
330
|
if type in type_mapping.keys():
|
305
331
|
type = type_mapping[type]
|
@@ -433,9 +459,9 @@ class Workspace:
|
|
433
459
|
|
434
460
|
return items
|
435
461
|
|
436
|
-
def get_item_definition(self, item_id):
|
462
|
+
def get_item_definition(self, item_id, type = None, format = None):
|
437
463
|
"""Get the definition of an item from a workspace"""
|
438
|
-
return self.get_item(item_id).get_definition()
|
464
|
+
return self.get_item(item_id).get_definition(type=type, format=format)
|
439
465
|
|
440
466
|
def update_item(self, item_id, display_name = None, description = None):
|
441
467
|
"""Update an item in a workspace"""
|
@@ -626,15 +652,24 @@ class Workspace:
|
|
626
652
|
|
627
653
|
return response.status_code
|
628
654
|
|
629
|
-
def list_tables(self,
|
630
|
-
|
655
|
+
def list_tables(self, lakehouse_id):
|
656
|
+
"""List tables in a workspace"""
|
657
|
+
return self.get_lakehouse(lakehouse_id=lakehouse_id).list_tables()
|
631
658
|
|
632
|
-
def load_table(self,
|
659
|
+
def load_table(self, lakehouse_id, table_name, path_type, relative_path,
|
633
660
|
file_extension = None, format_options = None,
|
634
661
|
mode = None, recursive = None, wait_for_completion = True):
|
635
|
-
|
662
|
+
|
663
|
+
return self.get_lakehouse(lakehouse_id=lakehouse_id).load_table(table_name, path_type, relative_path,
|
636
664
|
file_extension, format_options,
|
637
665
|
mode, recursive, wait_for_completion)
|
666
|
+
|
667
|
+
def run_on_demand_table_maintenance(self, lakehouse_id, execution_data,
|
668
|
+
job_type = "TableMaintenance", wait_for_completion = True):
|
669
|
+
"""Run on demand table maintenance"""
|
670
|
+
return self.get_lakehouse(lakehouse_id=lakehouse_id).run_on_demand_table_maintenance(execution_data,
|
671
|
+
job_type,
|
672
|
+
wait_for_completion)
|
638
673
|
|
639
674
|
def list_dashboards(self):
|
640
675
|
"""List dashboards in a workspace"""
|
@@ -656,9 +691,18 @@ class Workspace:
|
|
656
691
|
"""List mirrored warehouses in a workspace"""
|
657
692
|
return self.list_items(type="mirroredWarehouses")
|
658
693
|
|
694
|
+
# datapipelines
|
695
|
+
|
696
|
+
def create_data_pipeline(self, display_name, definition = None, description = None):
|
697
|
+
"""Create a data pipeline in a workspace"""
|
698
|
+
return self.create_item(display_name = display_name,
|
699
|
+
type = "dataPipelines",
|
700
|
+
definition = definition,
|
701
|
+
description = description)
|
702
|
+
|
659
703
|
def list_data_pipelines(self, with_properties = False):
|
660
704
|
"""List data pipelines in a workspace"""
|
661
|
-
return self.list_items(
|
705
|
+
return self.list_items(type="dataPipelines", with_properties=with_properties)
|
662
706
|
|
663
707
|
def get_data_pipeline(self, data_pipeline_id = None, data_pipeline_name = None):
|
664
708
|
"""Get a data pipeline from a workspace"""
|
@@ -682,9 +726,122 @@ class Workspace:
|
|
682
726
|
"""Update a data pipeline in a workspace"""
|
683
727
|
return self.get_item(item_id=data_pipeline_id).update(display_name=display_name, description=description, type="dataPipelines")
|
684
728
|
|
685
|
-
|
729
|
+
# environments
|
730
|
+
|
731
|
+
def list_environments(self, with_properties = False):
|
732
|
+
"""List environments in a workspace"""
|
733
|
+
return self.list_items(type="environments", with_properties = with_properties)
|
734
|
+
|
735
|
+
def create_environment(self, display_name, description = None):
|
736
|
+
"""Create an environment in a workspace"""
|
737
|
+
return self.create_item(display_name = display_name,
|
738
|
+
type = "environments",
|
739
|
+
definition = None,
|
740
|
+
description = description)
|
741
|
+
|
742
|
+
def get_environment(self, environment_id = None, environment_name = None):
|
743
|
+
"""Get an environment from a workspace"""
|
744
|
+
if environment_id is None and environment_name is not None:
|
745
|
+
return self.get_item_by_name(environment_name, "Environment")
|
746
|
+
elif environment_id is None:
|
747
|
+
raise Exception("environment_id or the environment_name is required")
|
748
|
+
|
749
|
+
url = f"https://api.fabric.microsoft.com/v1/workspaces/{self.id}/environments/{environment_id}"
|
750
|
+
|
751
|
+
item_dict = self.get_item_internal(url)
|
752
|
+
env = Environment.from_dict(item_dict, auth=self.auth)
|
753
|
+
return env
|
754
|
+
|
755
|
+
def delete_environment(self, environment_id):
|
756
|
+
"""Delete an environment from a workspace"""
|
757
|
+
return self.get_item(item_id=environment_id).delete(type="environments")
|
758
|
+
|
759
|
+
def update_environment(self, environment_id, display_name = None, description = None):
|
760
|
+
"""Update an environment in a workspace"""
|
761
|
+
return self.get_item(item_id=environment_id).update(display_name=display_name,
|
762
|
+
description=description,
|
763
|
+
type="environments")
|
764
|
+
|
765
|
+
# environment spark compute
|
766
|
+
|
767
|
+
def get_published_settings(self, environment_id):
|
768
|
+
return self.get_environment(environment_id).get_published_settings()
|
769
|
+
|
770
|
+
def get_staging_settings(self, environment_id):
|
771
|
+
return self.get_environment(environment_id).get_staging_settings()
|
772
|
+
|
773
|
+
def update_staging_settings(self, environment_id,
|
774
|
+
driver_cores = None, driver_memory = None, dynamic_executor_allocation = None,
|
775
|
+
executor_cores = None, executor_memory = None, instance_pool = None,
|
776
|
+
runtime_version = None, spark_properties = None):
|
777
|
+
return self.get_environment(environment_id).update_staging_settings(driver_cores=driver_cores,
|
778
|
+
driver_memory=driver_memory,
|
779
|
+
dynamic_executor_allocation=dynamic_executor_allocation,
|
780
|
+
executor_cores=executor_cores,
|
781
|
+
executor_memory=executor_memory,
|
782
|
+
instance_pool=instance_pool,
|
783
|
+
runtime_version=runtime_version,
|
784
|
+
spark_properties=spark_properties)
|
785
|
+
|
786
|
+
# environment spark libraries
|
787
|
+
|
788
|
+
def get_published_libraries(self, environment_id):
|
789
|
+
return self.get_environment(environment_id).get_published_libraries()
|
790
|
+
|
791
|
+
def get_staging_libraries(self, environment_id):
|
792
|
+
return self.get_environment(environment_id).get_staging_libraries()
|
793
|
+
|
794
|
+
def upload_staging_library(self, environment_id, file_path):
|
795
|
+
return self.get_environment(environment_id).upload_staging_library(file_path)
|
796
|
+
|
797
|
+
def publish_environment(self, environment_id):
|
798
|
+
return self.get_environment(environment_id).publish_environment()
|
799
|
+
|
800
|
+
def delete_staging_library(self, environment_id, library_to_delete):
|
801
|
+
return self.get_environment(environment_id).delete_staging_library(library_to_delete)
|
802
|
+
|
803
|
+
def cancel_publish(self, environment_id):
|
804
|
+
return self.get_environment(environment_id).cancel_publish()
|
805
|
+
|
806
|
+
# eventhouses
|
807
|
+
def list_eventhouses(self, with_properties = False):
|
808
|
+
"""List eventhouses in a workspace"""
|
809
|
+
return self.list_items(type="eventhouses", with_properties=with_properties)
|
810
|
+
|
811
|
+
def create_eventhouse(self, display_name, description = None):
|
812
|
+
"""Create an eventhouse in a workspace"""
|
813
|
+
return self.create_item(display_name = display_name,
|
814
|
+
type = "eventhouses",
|
815
|
+
definition = None,
|
816
|
+
description = description)
|
817
|
+
|
818
|
+
def get_eventhouse(self, eventhouse_id = None, eventhouse_name = None):
|
819
|
+
"""Get an eventhouse from a workspace"""
|
820
|
+
if eventhouse_id is None and eventhouse_name is not None:
|
821
|
+
return self.get_item_by_name(eventhouse_name, "Eventhouse")
|
822
|
+
elif eventhouse_id is None:
|
823
|
+
raise Exception("eventhouse_id or the eventhouse_name is required")
|
824
|
+
|
825
|
+
url = f"https://api.fabric.microsoft.com/v1/workspaces/{self.id}/eventhouses/{eventhouse_id}"
|
826
|
+
|
827
|
+
item_dict = self.get_item_internal(url)
|
828
|
+
return Eventhouse.from_dict(item_dict, auth=self.auth)
|
829
|
+
|
830
|
+
def delete_eventhouse(self, eventhouse_id):
|
831
|
+
"""Delete an eventhouse from a workspace"""
|
832
|
+
return self.get_item(item_id=eventhouse_id).delete(type="eventhouses")
|
833
|
+
|
834
|
+
def update_eventhouse(self, eventhouse_id, display_name = None, description = None):
|
835
|
+
"""Update an eventhouse in a workspace"""
|
836
|
+
return self.get_item(item_id=eventhouse_id).update(display_name=display_name,
|
837
|
+
description=description,
|
838
|
+
type="eventhouses")
|
839
|
+
|
840
|
+
# eventstreams
|
841
|
+
|
842
|
+
def list_eventstreams(self, with_properties = False):
|
686
843
|
"""List eventstreams in a workspace"""
|
687
|
-
return self.list_items(type="eventstreams")
|
844
|
+
return self.list_items(type="eventstreams", with_properties=with_properties)
|
688
845
|
|
689
846
|
def create_eventstream(self, display_name, description = None):
|
690
847
|
"""Create an eventstream in a workspace"""
|
@@ -717,9 +874,16 @@ class Workspace:
|
|
717
874
|
|
718
875
|
# kqlDatabases
|
719
876
|
|
720
|
-
def list_kql_databases(self):
|
877
|
+
def list_kql_databases(self, with_properties = False):
|
721
878
|
"""List kql databases in a workspace"""
|
722
|
-
return self.list_items(type="kqlDatabases")
|
879
|
+
return self.list_items(type="kqlDatabases", with_properties = with_properties)
|
880
|
+
|
881
|
+
def create_kql_database(self, creation_payload, display_name, description = None, ):
|
882
|
+
"""Create a kql database in a workspace"""
|
883
|
+
return self.create_item(display_name = display_name,
|
884
|
+
type = "kqlDatabases",
|
885
|
+
description = description,
|
886
|
+
creation_payload = creation_payload)
|
723
887
|
|
724
888
|
def get_kql_database(self, kql_database_id = None, kql_database_name = None):
|
725
889
|
"""Get a kql database from a workspace"""
|
@@ -746,9 +910,9 @@ class Workspace:
|
|
746
910
|
|
747
911
|
# kqlQuerysets
|
748
912
|
|
749
|
-
def list_kql_querysets(self):
|
913
|
+
def list_kql_querysets(self, with_properties = False):
|
750
914
|
"""List kql querysets in a workspace"""
|
751
|
-
return self.list_items(type="kqlQuerysets")
|
915
|
+
return self.list_items(type="kqlQuerysets", with_properties = with_properties)
|
752
916
|
|
753
917
|
def get_kql_queryset(self, kql_queryset_id = None, kql_queryset_name = None):
|
754
918
|
"""Get a kql queryset from a workspace"""
|
@@ -812,9 +976,9 @@ class Workspace:
|
|
812
976
|
|
813
977
|
# mlExperiments
|
814
978
|
|
815
|
-
def list_ml_experiments(self):
|
979
|
+
def list_ml_experiments(self, with_properties = False):
|
816
980
|
"""List ml experiments in a workspace"""
|
817
|
-
return self.list_items(type="mlExperiments")
|
981
|
+
return self.list_items(type="mlExperiments", with_properties = with_properties)
|
818
982
|
|
819
983
|
def create_ml_experiment(self, display_name, description = None):
|
820
984
|
"""Create an ml experiment in a workspace"""
|
@@ -847,9 +1011,9 @@ class Workspace:
|
|
847
1011
|
|
848
1012
|
# mlModels
|
849
1013
|
|
850
|
-
def list_ml_models(self):
|
1014
|
+
def list_ml_models(self, with_properties = False):
|
851
1015
|
"""List ml models in a workspace"""
|
852
|
-
return self.list_items(type="mlModels")
|
1016
|
+
return self.list_items(type="mlModels", with_properties = with_properties)
|
853
1017
|
|
854
1018
|
def create_ml_model(self, display_name, description = None):
|
855
1019
|
"""Create an ml model in a workspace"""
|
@@ -917,6 +1081,10 @@ class Workspace:
|
|
917
1081
|
description=description,
|
918
1082
|
type="notebooks")
|
919
1083
|
|
1084
|
+
def get_notebook_definition(self, notebook_id, format = None):
|
1085
|
+
"""Get the definition of a notebook from a workspace"""
|
1086
|
+
return self.get_notebook(notebook_id=notebook_id).get_definition(format=format)
|
1087
|
+
|
920
1088
|
def update_notebook_definition(self, notebook_id, definition):
|
921
1089
|
"""Update the definition of a notebook in a workspace"""
|
922
1090
|
return self.get_notebook(notebook_id=notebook_id).update_definition(definition=definition)
|
@@ -952,6 +1120,10 @@ class Workspace:
|
|
952
1120
|
"""Delete a report from a workspace"""
|
953
1121
|
return self.get_item(item_id=report_id).delete(type="reports")
|
954
1122
|
|
1123
|
+
def get_report_definition(self, report_id, format = None):
|
1124
|
+
"""Get the definition of a report from a workspace"""
|
1125
|
+
return self.get_report(report_id=report_id).get_definition(format=format)
|
1126
|
+
|
955
1127
|
def update_report_definition(self, report_id, definition):
|
956
1128
|
"""Update the definition of a report in a workspace"""
|
957
1129
|
return self.get_report(report_id=report_id).update_definition(definition=definition)
|
@@ -987,12 +1159,16 @@ class Workspace:
|
|
987
1159
|
"""Delete a semantic model from a workspace"""
|
988
1160
|
return self.get_item(item_id=semantic_model_id).delete(type="semanticModels")
|
989
1161
|
|
990
|
-
def update_semantic_model(self, semantic_model_id, display_name = None, description = None):
|
991
|
-
|
992
|
-
|
993
|
-
|
994
|
-
|
1162
|
+
# def update_semantic_model(self, semantic_model_id, display_name = None, description = None):
|
1163
|
+
# """Update a semantic model in a workspace"""
|
1164
|
+
# return self.get_item(item_id=semantic_model_id).update(display_name=display_name,
|
1165
|
+
# description=description,
|
1166
|
+
# type="semanticModels")
|
995
1167
|
|
1168
|
+
def get_semantic_model_definition(self, semantic_model_id, format = None):
|
1169
|
+
"""Get the definition of a semantic model from a workspace"""
|
1170
|
+
return self.get_semantic_model(semantic_model_id=semantic_model_id).get_definition(format=format)
|
1171
|
+
|
996
1172
|
def update_semantic_model_definition(self, semantic_model_id, definition):
|
997
1173
|
"""Update the definition of a semantic model in a workspace"""
|
998
1174
|
return self.get_semantic_model(semantic_model_id=semantic_model_id).update_definition(definition=definition)
|
@@ -1034,6 +1210,10 @@ class Workspace:
|
|
1034
1210
|
description=description,
|
1035
1211
|
type="sparkJobDefinitions")
|
1036
1212
|
|
1213
|
+
def get_spark_job_definition_definition(self, spark_job_definition_id, format = None):
|
1214
|
+
"""Get the definition of a spark job definition from a workspace"""
|
1215
|
+
return self.get_spark_job_definition(spark_job_definition_id=spark_job_definition_id).get_definition(format=format)
|
1216
|
+
|
1037
1217
|
def update_spark_job_definition_definition(self, spark_job_definition_id, definition):
|
1038
1218
|
"""Update the definition of a spark job definition in a workspace"""
|
1039
1219
|
return self.get_spark_job_definition(spark_job_definition_id=spark_job_definition_id).update_definition(definition=definition)
|
@@ -1042,7 +1222,7 @@ class Workspace:
|
|
1042
1222
|
|
1043
1223
|
def list_warehouses(self, with_properties = False):
|
1044
1224
|
"""List warehouses in a workspace"""
|
1045
|
-
return self.list_items(type="warehouses")
|
1225
|
+
return self.list_items(type="warehouses", with_properties = with_properties)
|
1046
1226
|
|
1047
1227
|
def create_warehouse(self, display_name, description = None):
|
1048
1228
|
"""Create a warehouse in a workspace"""
|
@@ -1073,3 +1253,149 @@ class Workspace:
|
|
1073
1253
|
|
1074
1254
|
|
1075
1255
|
|
1256
|
+
# spark workspace custom pools
|
1257
|
+
|
1258
|
+
def list_workspace_custom_pools(self, continuationToken = None):
|
1259
|
+
"""List spark worspace custom pools in a workspace"""
|
1260
|
+
# GET http://api.fabric.microsoft.com/v1/workspaces/f089354e-8366-4e18-aea3-4cb4a3a50b48/spark/pools
|
1261
|
+
|
1262
|
+
url = f"https://api.fabric.microsoft.com/v1/workspaces/{self.id}/spark/pools"
|
1263
|
+
|
1264
|
+
if continuationToken:
|
1265
|
+
url = f"{url}?continuationToken={continuationToken}"
|
1266
|
+
|
1267
|
+
for _ in range(10):
|
1268
|
+
response = requests.get(url=url, headers=self.auth.get_headers())
|
1269
|
+
if response.status_code == 429:
|
1270
|
+
print("Too many requests, waiting 10 seconds")
|
1271
|
+
sleep(10)
|
1272
|
+
continue
|
1273
|
+
if response.status_code not in (200, 429):
|
1274
|
+
raise Exception(f"Error listing custom spark pools: {response.status_code}, {response.text}")
|
1275
|
+
break
|
1276
|
+
|
1277
|
+
resp_dict = json.loads(response.text)
|
1278
|
+
items = resp_dict["value"]
|
1279
|
+
for item in items:
|
1280
|
+
item["workspaceId"] = self.id
|
1281
|
+
sppools = [SparkCustomPool.from_dict(item, auth=self.auth) for item in items]
|
1282
|
+
|
1283
|
+
if "continuationToken" in resp_dict:
|
1284
|
+
item_list_next = self.list_workspace_custom_pools(continuationToken=resp_dict["continuationToken"])
|
1285
|
+
sppools.extend(item_list_next)
|
1286
|
+
|
1287
|
+
return sppools
|
1288
|
+
|
1289
|
+
def create_workspace_custom_pool(self, name, node_family, node_size, auto_scale, dynamic_executor_allocation):
|
1290
|
+
"""Create a custom pool in a workspace"""
|
1291
|
+
|
1292
|
+
# POST http://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/spark/pools
|
1293
|
+
url = f"https://api.fabric.microsoft.com/v1/workspaces/{self.id}/spark/pools"
|
1294
|
+
|
1295
|
+
body = {
|
1296
|
+
"name": name,
|
1297
|
+
"nodeFamily": node_family,
|
1298
|
+
"nodeSize": node_size,
|
1299
|
+
"autoScale": auto_scale,
|
1300
|
+
"dynamicExecutorAllocation": dynamic_executor_allocation
|
1301
|
+
}
|
1302
|
+
|
1303
|
+
for _ in range(10):
|
1304
|
+
response = requests.post(url=url, headers=self.auth.get_headers(), json=body)
|
1305
|
+
if response.status_code == 429:
|
1306
|
+
print("Too many requests, waiting 10 seconds")
|
1307
|
+
sleep(10)
|
1308
|
+
continue
|
1309
|
+
if response.status_code not in (200, 201, 429):
|
1310
|
+
raise Exception(f"Error creating custom spark pool: {response.status_code}, {response.text}")
|
1311
|
+
break
|
1312
|
+
|
1313
|
+
response_dict = json.loads(response.text)
|
1314
|
+
response_dict["workspaceId"] = self.id
|
1315
|
+
return SparkCustomPool.from_dict(response_dict, auth=self.auth)
|
1316
|
+
|
1317
|
+
def get_workspace_custom_pool(self, pool_id):
|
1318
|
+
"""Get a custom pool in a workspace"""
|
1319
|
+
# GET http://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/spark/pools/{poolId}
|
1320
|
+
url = f"https://api.fabric.microsoft.com/v1/workspaces/{self.id}/spark/pools/{pool_id}"
|
1321
|
+
|
1322
|
+
for _ in range(10):
|
1323
|
+
response = requests.get(url=url, headers=self.auth.get_headers())
|
1324
|
+
if response.status_code == 429:
|
1325
|
+
print("Too many requests, waiting 10 seconds")
|
1326
|
+
sleep(10)
|
1327
|
+
continue
|
1328
|
+
if response.status_code not in (200, 429):
|
1329
|
+
raise Exception(f"Error getting custom spark pool: {response.status_code}, {response.text}")
|
1330
|
+
break
|
1331
|
+
|
1332
|
+
response_dict = json.loads(response.text)
|
1333
|
+
response_dict["workspaceId"] = self.id
|
1334
|
+
return SparkCustomPool.from_dict(response_dict, auth=self.auth)
|
1335
|
+
|
1336
|
+
def delete_workspace_custom_pool(self, pool_id):
|
1337
|
+
"""Delete a custom pool in a workspace"""
|
1338
|
+
pool = self.get_workspace_custom_pool(pool_id)
|
1339
|
+
return pool.delete()
|
1340
|
+
|
1341
|
+
def update_workspace_custom_pool(self, pool_id, name = None , node_family = None, node_size = None,
|
1342
|
+
auto_scale = None,
|
1343
|
+
dynamic_executor_allocation = None):
|
1344
|
+
"""Update a custom pool in a workspace"""
|
1345
|
+
pool = self.get_workspace_custom_pool(pool_id)
|
1346
|
+
return pool.update(name = name,
|
1347
|
+
node_family = node_family,
|
1348
|
+
node_size = node_size,
|
1349
|
+
auto_scale = auto_scale,
|
1350
|
+
dynamic_executor_allocation = dynamic_executor_allocation)
|
1351
|
+
|
1352
|
+
# spark workspace settings
|
1353
|
+
|
1354
|
+
def get_spark_settings(self):
|
1355
|
+
|
1356
|
+
# GET http://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/spark/settings
|
1357
|
+
|
1358
|
+
url = f"https://api.fabric.microsoft.com/v1/workspaces/{self.id}/spark/settings"
|
1359
|
+
|
1360
|
+
for _ in range(10):
|
1361
|
+
response = requests.get(url=url, headers=self.auth.get_headers())
|
1362
|
+
if response.status_code == 429:
|
1363
|
+
print("Too many requests, waiting 10 seconds")
|
1364
|
+
sleep(10)
|
1365
|
+
continue
|
1366
|
+
if response.status_code not in (200, 429):
|
1367
|
+
raise Exception(f"Error getting spark settings: {response.status_code}, {response.text}")
|
1368
|
+
break
|
1369
|
+
|
1370
|
+
return json.loads(response.text)
|
1371
|
+
|
1372
|
+
|
1373
|
+
def update_spark_settings(self, automatic_log = None, environment = None, high_concurrency = None, pool = None):
|
1374
|
+
|
1375
|
+
# PATCH http://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/spark/settings
|
1376
|
+
|
1377
|
+
url = f"https://api.fabric.microsoft.com/v1/workspaces/{self.id}/spark/settings"
|
1378
|
+
|
1379
|
+
body = {}
|
1380
|
+
|
1381
|
+
if automatic_log:
|
1382
|
+
body["automaticLog"] = automatic_log
|
1383
|
+
if environment:
|
1384
|
+
body["environment"] = environment
|
1385
|
+
if high_concurrency:
|
1386
|
+
body["highConcurrency"] = high_concurrency
|
1387
|
+
if pool:
|
1388
|
+
body["pool"] = pool
|
1389
|
+
|
1390
|
+
for _ in range(10):
|
1391
|
+
response = requests.patch(url=url, headers=self.auth.get_headers(), json=body)
|
1392
|
+
if response.status_code == 429:
|
1393
|
+
print("Too many requests, waiting 10 seconds")
|
1394
|
+
sleep(10)
|
1395
|
+
continue
|
1396
|
+
if response.status_code not in (200, 429):
|
1397
|
+
raise Exception(f"Error updating spark settings: {response.status_code}, {response.text}")
|
1398
|
+
break
|
1399
|
+
|
1400
|
+
return json.loads(response.text)
|
1401
|
+
|