msfabricpysdkcore 0.0.9__py3-none-any.whl → 0.0.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -83,6 +83,13 @@ class AdminItem:
83
83
  def get_item_access_details(self, type=None):
84
84
  """Get the access details of the item
85
85
 
86
+ Returns:
87
+ dict: The access details of the item"""
88
+ return self.list_item_access_details(type)
89
+
90
+ def list_item_access_details(self, type=None):
91
+ """Get the access details of the item
92
+
86
93
  Returns:
87
94
  dict: The access details of the item"""
88
95
 
@@ -70,6 +70,14 @@ class AdminWorkspace:
70
70
  def get_workspace_access_details(self):
71
71
  """Get the access details of the workspace
72
72
 
73
+ Returns:
74
+ dict: The access details of the workspace
75
+ """
76
+ return self.list_workspace_access_details()
77
+
78
+ def list_workspace_access_details(self):
79
+ """Get the access details of the workspace
80
+
73
81
  Returns:
74
82
  dict: The access details of the workspace
75
83
  """
@@ -115,6 +123,17 @@ class AdminWorkspace:
115
123
  item_dict = json.loads(response.text)
116
124
  return AdminItem.from_dict(item_dict, self.auth)
117
125
 
126
+ def list_item_access_details(self, item_id, type=None):
127
+ """Get the access details of the item
128
+
129
+ Args:
130
+ item_id (str): The ID of the item
131
+ type (str): The type of the item
132
+ Returns:
133
+ dict: The access details of the item
134
+ """
135
+ return self.get_item(item_id, type).list_item_access_details()
136
+
118
137
  def get_item_access_details(self, item_id, type=None):
119
138
  """Get the access details of the item
120
139
 
@@ -124,4 +143,4 @@ class AdminWorkspace:
124
143
  Returns:
125
144
  dict: The access details of the item
126
145
  """
127
- return self.get_item(item_id, type).get_item_access_details()
146
+ return self.list_item_access_details(item_id, type)
@@ -284,13 +284,24 @@ class FabricClientAdmin(FabricClient):
284
284
  def get_workspace_access_details(self, workspace_id):
285
285
  """Get the access details of the workspace
286
286
 
287
+ Args:
288
+ workspace_id (str): The ID of the workspace
289
+ Returns:
290
+ dict: The access details of the workspace
291
+ """
292
+ print("DEPRECATED: Use list_workspace_access_details instead")
293
+ return self.list_workspace_access_details(workspace_id)
294
+
295
+ def list_workspace_access_details(self, workspace_id):
296
+ """Get the access details of the workspace
297
+
287
298
  Args:
288
299
  workspace_id (str): The ID of the workspace
289
300
  Returns:
290
301
  dict: The access details of the workspace
291
302
  """
292
303
  ws = self.get_workspace(workspace_id)
293
- return ws.get_workspace_access_details()
304
+ return ws.list_workspace_access_details()
294
305
 
295
306
  def list_workspaces(self, capacity_id = None, name=None, state=None, type=None, continuationToken = None):
296
307
  """List all workspaces
@@ -426,6 +437,15 @@ class FabricClientAdmin(FabricClient):
426
437
  def get_tenant_settings(self):
427
438
  """Get the tenant settings
428
439
 
440
+ Returns:
441
+ dict: The tenant settings
442
+ """
443
+ print("DEPRECATED: Use list_tenant_settings instead")
444
+ return self.list_tenant_settings()
445
+
446
+ def list_tenant_settings(self):
447
+ """Get the tenant settings
448
+
429
449
  Returns:
430
450
  dict: The tenant settings
431
451
  """
@@ -445,7 +465,7 @@ class FabricClientAdmin(FabricClient):
445
465
  return json.loads(response.text)
446
466
 
447
467
 
448
- def get_capacities_tenant_settings_overrides(self, continuationToken = None):
468
+ def list_capacities_tenant_settings_overrides(self, continuationToken = None):
449
469
  """Returns list of tenant setting overrides that override at the capacities
450
470
 
451
471
  Returns:
@@ -471,12 +491,35 @@ class FabricClientAdmin(FabricClient):
471
491
  overrides = resp_dict["Overrides"]
472
492
 
473
493
  if "continuationToken" in resp_dict and resp_dict["continuationToken"] is not None:
474
- overrides_next = self.get_capacities_tenant_settings_overrides(continuationToken=resp_dict["continuationToken"])
494
+ overrides_next = self.list_capacities_tenant_settings_overrides(continuationToken=resp_dict["continuationToken"])
475
495
  overrides.extend(overrides_next)
476
496
 
477
497
  return overrides
498
+
499
+ def get_capacities_tenant_settings_overrides(self):
500
+ """Returns list of tenant setting overrides that override at the capacities
501
+
502
+ Returns:
503
+ list: The capacities tenant settings overrides
504
+ """
505
+ print("DEPRECATED: Use list_capacities_tenant_settings_overrides instead")
506
+ return self.list_capacities_tenant_settings_overrides()
478
507
 
479
- def get_access_entities(self, user_id, type = None, continuationToken = None):
508
+
509
+ def get_access_entities(self, user_id, type = None):
510
+ """Get the access entities for a user
511
+
512
+ Args:
513
+ user_id (str): The ID of the user
514
+ type (str): The type of the access entity
515
+ continuationToken (str): The continuation token
516
+ Returns:
517
+ list: The list of access entities
518
+ """
519
+ print("DEPRECATED: Use list_access_entities instead")
520
+ return self.list_access_entities(user_id, type)
521
+
522
+ def list_access_entities(self, user_id, type = None, continuationToken = None):
480
523
  """Get the access entities for a user
481
524
 
482
525
  Args:
@@ -515,12 +558,12 @@ class FabricClientAdmin(FabricClient):
515
558
  access_entities = resp_dict["accessEntities"]
516
559
 
517
560
  if "continuationToken" in resp_dict and resp_dict["continuationToken"] is not None:
518
- access_entities_next = self.get_access_entities(user_id, type, continuationToken=resp_dict["continuationToken"])
561
+ access_entities_next = self.list_access_entities(user_id, type, continuationToken=resp_dict["continuationToken"])
519
562
  resp_dict["accessEntities"].extend(access_entities_next)
520
563
 
521
564
  return access_entities
522
565
 
523
- def get_item_access_details(self, workspace_id, item_id, type=None):
566
+ def list_item_access_details(self, workspace_id, item_id, type=None):
524
567
  """Get the access details of the item
525
568
 
526
569
  Args:
@@ -532,4 +575,87 @@ class FabricClientAdmin(FabricClient):
532
575
  """
533
576
  ws = self.get_workspace(workspace_id)
534
577
  item = ws.get_item(item_id, type)
535
- return item.get_item_access_details(type)
578
+ return item.list_item_access_details(type)
579
+
580
+ def get_item_access_details(self, workspace_id, item_id, type=None):
581
+ """Get the access details of the item
582
+
583
+ Args:
584
+ workspace_id (str): The ID of the workspace
585
+ item_id (str): The ID of the item
586
+ type (str): The type of the item
587
+ Returns:
588
+ dict: The access details of the item
589
+ """
590
+ print("DEPRECATED: Use list_item_access_details instead")
591
+ return self.list_item_access_details(workspace_id, item_id, type)
592
+
593
+ def bulk_set_labels(self, items, label_id, assignment_method = None, delegated_principal = None):
594
+ """Set labels in bulk"""
595
+ # POST https://api.fabric.microsoft.com/v1/admin/items/bulkSetLabels
596
+
597
+ url = "https://api.fabric.microsoft.com/v1/admin/items/bulkSetLabels"
598
+
599
+ if len(items) > 2000:
600
+ self.bulk_set_labels(items[2000:], label_id, assignment_method, delegated_principal)
601
+ items = items[:2000]
602
+
603
+ body = {
604
+ "items": items,
605
+ "labelId": label_id
606
+ }
607
+ if assignment_method:
608
+ body["assignmentMethod"] = assignment_method
609
+
610
+ if delegated_principal:
611
+ body["delegatedPrincipal"] = delegated_principal
612
+
613
+ for _ in range(10):
614
+ response = requests.post(url=url, headers=self.auth.get_headers(), json=body)
615
+ if response.status_code == 429:
616
+ print("Too many requests, waiting 10 seconds")
617
+ sleep(10)
618
+ continue
619
+ if response.status_code not in (200, 429):
620
+ print(response.status_code)
621
+ print(response.text)
622
+ raise Exception(f"Error setting labels: {response.text}")
623
+ break
624
+
625
+ response = json.loads(response.text)
626
+ return response
627
+
628
+
629
+ def bulk_remove_labels(self, items):
630
+ """Remove labels in bulk
631
+ Args:
632
+ items (list): The list of item IDs
633
+
634
+ Returns:
635
+ dict: The response from the API"""
636
+ # POST https://api.fabric.microsoft.com/v1/admin/items/bulkRemoveLabels
637
+
638
+ url = "https://api.fabric.microsoft.com/v1/admin/items/bulkRemoveLabels"
639
+
640
+ if len(items) > 2000:
641
+ self.bulk_remove_labels(items[2000:])
642
+ items = items[:2000]
643
+
644
+ body = {
645
+ "items": items
646
+ }
647
+
648
+ for _ in range(10):
649
+ response = requests.post(url=url, headers=self.auth.get_headers(), json=body)
650
+ if response.status_code == 429:
651
+ print("Too many requests, waiting 10 seconds")
652
+ sleep(10)
653
+ continue
654
+ if response.status_code not in (200, 429):
655
+ print(response.status_code)
656
+ print(response.text)
657
+ raise Exception(f"Error removing labels: {response.text}")
658
+ break
659
+
660
+ response = json.loads(response.text)
661
+ return response
@@ -4,6 +4,8 @@ from time import sleep
4
4
 
5
5
  from msfabricpysdkcore.capacity import Capacity
6
6
  from msfabricpysdkcore.client import FabricClient
7
+ from msfabricpysdkcore.deployment_pipeline import DeploymentPipeline
8
+ from msfabricpysdkcore.long_running_operation import LongRunningOperation
7
9
  from msfabricpysdkcore.workspace import Workspace
8
10
 
9
11
  class FabricClientCore(FabricClient):
@@ -311,17 +313,18 @@ class FabricClientCore(FabricClient):
311
313
  return cap
312
314
  raise ValueError("No capacity found")
313
315
 
314
- def list_tables(self, workspace_id, item_id):
315
- ws = self.get_workspace_by_id(workspace_id)
316
- return ws.list_tables(item_id=item_id)
317
-
318
- def load_table(self, workspace_id, item_id, table_name, path_type, relative_path,
319
- file_extension = None, format_options = None,
320
- mode = None, recursive = None, wait_for_completion = True):
321
- ws = self.get_workspace_by_id(workspace_id)
322
- return ws.load_table(item_id, table_name, path_type, relative_path,
323
- file_extension, format_options,
324
- mode, recursive, wait_for_completion)
316
+
317
+ # long running operations
318
+
319
+ def get_operation_results(self, operation_id):
320
+ """Get the results of an operation"""
321
+ lro = LongRunningOperation(operation_id=operation_id, auth=self.auth)
322
+ return lro.get_operation_results()
323
+
324
+ def get_operation_state(self, operation_id):
325
+ """Get the state of an operation"""
326
+ lro = LongRunningOperation(operation_id=operation_id, auth=self.auth)
327
+ return lro.get_operation_state()
325
328
 
326
329
  # list things
327
330
 
@@ -347,6 +350,11 @@ class FabricClientCore(FabricClient):
347
350
 
348
351
  # dataPipelines
349
352
 
353
+ def create_data_pipeline(self, workspace_id, display_name, description = None):
354
+ """Create a data pipeline in a workspace"""
355
+ ws = self.get_workspace_by_id(workspace_id)
356
+ return ws.create_data_pipeline(display_name = display_name, description = description)
357
+
350
358
  def list_data_pipelines(self, workspace_id, with_properties = False):
351
359
  """List data pipelines in a workspace"""
352
360
  ws = self.get_workspace_by_id(workspace_id)
@@ -367,6 +375,84 @@ class FabricClientCore(FabricClient):
367
375
  ws = self.get_workspace_by_id(workspace_id)
368
376
  return ws.get_data_pipeline(data_pipeline_id).update(display_name=display_name, description=description)
369
377
 
378
+ # environments
379
+
380
+ def list_environments(self, workspace_id, with_properties = False):
381
+ """List environments in a workspace"""
382
+ ws = self.get_workspace_by_id(workspace_id)
383
+ return ws.list_environments(with_properties = with_properties)
384
+
385
+ def create_environment(self, workspace_id, display_name, description = None):
386
+ """Create an environment in a workspace"""
387
+ ws = self.get_workspace_by_id(workspace_id)
388
+ return ws.create_environment(display_name = display_name, description = description)
389
+
390
+ def get_environment(self, workspace_id, environment_id = None, environment_name = None):
391
+ """Get an environment from a workspace"""
392
+ ws = self.get_workspace_by_id(workspace_id)
393
+ return ws.get_environment(environment_id = environment_id, environment_name = environment_name)
394
+
395
+ def delete_environment(self, workspace_id, environment_id):
396
+ """Delete an environment from a workspace"""
397
+ ws = self.get_workspace_by_id(workspace_id)
398
+ return ws.delete_environment(environment_id)
399
+
400
+ def update_environment(self, workspace_id, environment_id, display_name = None, description = None):
401
+ """Update an environment in a workspace"""
402
+ ws = self.get_workspace_by_id(workspace_id)
403
+ return ws.update_environment(environment_id, display_name = display_name, description = description)
404
+
405
+ # environmentSparkCompute
406
+
407
+ def get_published_settings(self, workspace_id, environment_id):
408
+ """Get published settings for an environment"""
409
+ ws = self.get_workspace_by_id(workspace_id)
410
+ return ws.get_environment(environment_id).get_published_settings()
411
+
412
+ def get_staging_settings(self, workspace_id, environment_id):
413
+ """Get staging settings for an environment"""
414
+ ws = self.get_workspace_by_id(workspace_id)
415
+ return ws.get_environment(environment_id).get_staging_settings()
416
+
417
+ def update_staging_settings(self, workspace_id, environment_id, instance_pool, driver_cores, driver_memory, executor_cores, executor_memory,
418
+ dynamic_executor_allocation, spark_properties, runtime_version):
419
+ """Update staging settings for an environment"""
420
+ ws = self.get_workspace_by_id(workspace_id)
421
+ return ws.get_environment(environment_id).update_staging_settings(instance_pool, driver_cores, driver_memory, executor_cores, executor_memory,
422
+ dynamic_executor_allocation, spark_properties, runtime_version)
423
+
424
+ # environmentSparkLibraries
425
+
426
+ def get_published_libraries(self, workspace_id, environment_id):
427
+ """Get published libraries for an environment"""
428
+ ws = self.get_workspace_by_id(workspace_id)
429
+ return ws.get_environment(environment_id).get_published_libraries()
430
+
431
+ def get_staging_libraries(self, workspace_id, environment_id):
432
+ """Get staging libraries for an environment"""
433
+ ws = self.get_workspace_by_id(workspace_id)
434
+ return ws.get_environment(environment_id).get_staging_libraries()
435
+
436
+ def update_staging_library(self, workspace_id, environment_id):
437
+ """Update staging libraries for an environment"""
438
+ ws = self.get_workspace_by_id(workspace_id)
439
+ return ws.get_environment(environment_id).update_staging_libraries()
440
+
441
+ def publish_environment(self, workspace_id, environment_id):
442
+ """Publish an environment"""
443
+ ws = self.get_workspace_by_id(workspace_id)
444
+ return ws.get_environment(environment_id).publish_environment()
445
+
446
+ def delete_staging_library(self, workspace_id, environment_id, library_to_delete):
447
+ """Delete a staging library from an environment"""
448
+ ws = self.get_workspace_by_id(workspace_id)
449
+ return ws.get_environment(environment_id).delete_staging_library(library_to_delete)
450
+
451
+ def cancel_publish(self, workspace_id, environment_id):
452
+ """Cancel publishing an environment"""
453
+ ws = self.get_workspace_by_id(workspace_id)
454
+ return ws.get_environment(environment_id).cancel_publish()
455
+
370
456
  # eventstreams
371
457
 
372
458
  def list_eventstreams(self, workspace_id):
@@ -465,6 +551,27 @@ class FabricClientCore(FabricClient):
465
551
  ws = self.get_workspace_by_id(workspace_id)
466
552
  return ws.get_lakehouse(lakehouse_id = lakehouse_id, lakehouse_name = lakehouse_name)
467
553
 
554
+ def list_tables(self, workspace_id, lakehouse_id):
555
+ ws = self.get_workspace_by_id(workspace_id)
556
+ return ws.list_tables(lakehouse_id=lakehouse_id)
557
+
558
+ def load_table(self, workspace_id, lakehouse_id, table_name, path_type, relative_path,
559
+ file_extension = None, format_options = None,
560
+ mode = None, recursive = None, wait_for_completion = True):
561
+ ws = self.get_workspace_by_id(workspace_id)
562
+ return ws.load_table(lakehouse_id, table_name, path_type, relative_path,
563
+ file_extension, format_options,
564
+ mode, recursive, wait_for_completion)
565
+
566
+ def run_on_demand_table_maintenance(self, workspace_id, lakehouse_id,
567
+ execution_data = None,
568
+ job_type = "TableMaintenance", wait_for_completion = True):
569
+ ws = self.get_workspace_by_id(workspace_id)
570
+ return ws.run_on_demand_table_maintenance(lakehouse_id = lakehouse_id,
571
+ execution_data = execution_data,
572
+ job_type = job_type,
573
+ wait_for_completion= wait_for_completion)
574
+
468
575
  # mlExperiments
469
576
 
470
577
  def list_ml_experiments(self, workspace_id):
@@ -546,6 +653,11 @@ class FabricClientCore(FabricClient):
546
653
  ws = self.get_workspace_by_id(workspace_id)
547
654
  return ws.update_notebook(notebook_id, display_name = display_name, description = description)
548
655
 
656
+ def get_notebook_definition(self, workspace_id, notebook_id, format = None):
657
+ """Get the definition of a notebook"""
658
+ ws = self.get_workspace_by_id(workspace_id)
659
+ return ws.get_notebook_definition(notebook_id, format = format)
660
+
549
661
  def update_notebook_definition(self, workspace_id, notebook_id, definition):
550
662
  """Update the definition of a notebook"""
551
663
  ws = self.get_workspace_by_id(workspace_id)
@@ -573,6 +685,11 @@ class FabricClientCore(FabricClient):
573
685
  ws = self.get_workspace_by_id(workspace_id)
574
686
  return ws.delete_report(report_id)
575
687
 
688
+ def get_report_definition(self, workspace_id, report_id, format = None):
689
+ """Get the definition of a report"""
690
+ ws = self.get_workspace_by_id(workspace_id)
691
+ return ws.get_report_definition(report_id, format = format)
692
+
576
693
  def update_report_definition(self, workspace_id, report_id, definition):
577
694
  """Update the definition of a report"""
578
695
  ws = self.get_workspace_by_id(workspace_id)
@@ -600,11 +717,16 @@ class FabricClientCore(FabricClient):
600
717
  ws = self.get_workspace_by_id(workspace_id)
601
718
  return ws.delete_semantic_model(semantic_model_id)
602
719
 
603
- def update_semantic_model(self, workspace_id, semantic_model_id, display_name = None, description = None):
604
- """Update a semantic model in a workspace"""
605
- ws = self.get_workspace_by_id(workspace_id)
606
- return ws.update_semantic_model(semantic_model_id, display_name = display_name, description = description)
720
+ # def update_semantic_model(self, workspace_id, semantic_model_id, display_name = None, description = None):
721
+ # """Update a semantic model in a workspace"""
722
+ # ws = self.get_workspace_by_id(workspace_id)
723
+ # return ws.update_semantic_model(semantic_model_id, display_name = display_name, description = description)
607
724
 
725
+ def get_semantic_model_definition(self, workspace_id, semantic_model_id, format = None):
726
+ """Get the definition of a semantic model"""
727
+ ws = self.get_workspace_by_id(workspace_id)
728
+ return ws.get_semantic_model_definition(semantic_model_id, format = format)
729
+
608
730
  def update_semantic_model_definition(self, workspace_id, semantic_model_id, definition):
609
731
  """Update the definition of a semantic model"""
610
732
  ws = self.get_workspace_by_id(workspace_id)
@@ -637,6 +759,11 @@ class FabricClientCore(FabricClient):
637
759
  ws = self.get_workspace_by_id(workspace_id)
638
760
  return ws.update_spark_job_definition(spark_job_definition_id, display_name = display_name, description = description)
639
761
 
762
+ def get_spark_job_definition_definition(self, workspace_id, spark_job_definition_id, format = None):
763
+ """Get the definition of a spark job definition"""
764
+ ws = self.get_workspace_by_id(workspace_id)
765
+ return ws.get_spark_job_definition_definition(spark_job_definition_id, format = format)
766
+
640
767
  def update_spark_job_definition_definition(self, workspace_id, spark_job_definition_id, definition):
641
768
  """Update the definition of a spark job definition"""
642
769
  ws = self.get_workspace_by_id(workspace_id)
@@ -668,3 +795,160 @@ class FabricClientCore(FabricClient):
668
795
  """Update a warehouse in a workspace"""
669
796
  ws = self.get_workspace_by_id(workspace_id)
670
797
  return ws.update_warehouse(warehouse_id, display_name = display_name, description = description)
798
+
799
+ # spark workspace custom pools
800
+
801
+ def list_workspace_custom_pools(self, workspace_id):
802
+ """List workspace custom pools"""
803
+ ws = self.get_workspace_by_id(workspace_id)
804
+ return ws.list_workspace_custom_pools()
805
+
806
+ def create_workspace_custom_pool(self, workspace_id, name, node_family, node_size, auto_scale, dynamic_executor_allocation):
807
+ """Create a workspace custom pool"""
808
+ ws = self.get_workspace_by_id(workspace_id)
809
+ return ws.create_workspace_custom_pool(name = name,
810
+ node_family = node_family,
811
+ node_size = node_size,
812
+ auto_scale = auto_scale,
813
+ dynamic_executor_allocation = dynamic_executor_allocation)
814
+
815
+
816
+ def get_workspace_custom_pool(self, workspace_id, pool_id):
817
+ """Get a workspace custom pool"""
818
+ ws = self.get_workspace_by_id(workspace_id)
819
+ return ws.get_workspace_custom_pool(pool_id)
820
+
821
+ def delete_workspace_custom_pool(self, workspace_id, pool_id):
822
+ """Delete a workspace custom pool"""
823
+ ws = self.get_workspace_by_id(workspace_id)
824
+ return ws.delete_workspace_custom_pool(pool_id)
825
+
826
+ def update_workspace_custom_pool(self, workspace_id, pool_id, name = None, node_family = None, node_size = None, auto_scale = None, dynamic_executor_allocation = None):
827
+ """Update a workspace custom pool"""
828
+ ws = self.get_workspace_by_id(workspace_id)
829
+ return ws.update_workspace_custom_pool(pool_id, name = name, node_family = node_family, node_size = node_size, auto_scale = auto_scale, dynamic_executor_allocation = dynamic_executor_allocation)
830
+
831
+ # Deployment Pipelines
832
+
833
+ def deploy_stage_content(self, deployment_pipeline_id, source_stage_id, target_stage_id, created_workspace_details = None,
834
+ items = None, note = None, wait_for_completion = True):
835
+ """Deploy stage content
836
+ Args:
837
+ deployment_pipeline_id (str): The ID of the deployment pipeline
838
+ source_stage_id (str): The ID of the source stage
839
+ target_stage_id (str): The ID of the target stage
840
+ created_workspace_details (list): A list of created workspace details
841
+ items (list): A list of items
842
+ note (str): A note
843
+ wait_for_completion (bool): Whether to wait for the deployment to complete
844
+ Returns:
845
+ Details about the dpeloyment"""
846
+
847
+ pipeline = DeploymentPipeline.get_pipeline(deployment_pipeline_id, auth=self.auth)
848
+
849
+ return pipeline.deploy(source_stage_id, target_stage_id, created_workspace_details = created_workspace_details,
850
+ items = items, note = note, wait_for_completion = wait_for_completion)
851
+
852
+ def list_deployment_pipelines(self, continuationToken = None):
853
+ """List deployment pipelines
854
+ Args:
855
+ continuationToken (str): The continuation token for pagination
856
+ Returns:
857
+ list: List of DeploymentPipeline objects
858
+ """
859
+ # GET https://api.fabric.microsoft.com/v1/deploymentPipelines
860
+
861
+ url = "https://api.fabric.microsoft.com/v1/deploymentPipelines"
862
+
863
+ if continuationToken:
864
+ url = f"{url}?continuationToken={continuationToken}"
865
+
866
+ for _ in range(10):
867
+ response = requests.get(url=url, headers=self.auth.get_headers())
868
+ if response.status_code == 429:
869
+ print("Too many requests, waiting 10 seconds")
870
+ sleep(10)
871
+ continue
872
+ if response.status_code not in (200, 429):
873
+ raise Exception(f"Error listing deployment pipelines: {response.status_code}, {response.text}")
874
+ break
875
+
876
+ resp_dict = json.loads(response.text)
877
+ items = resp_dict["value"]
878
+
879
+ dep_pipes = [DeploymentPipeline.from_dict(i, auth=self.auth) for i in items]
880
+
881
+ if "continuationToken" in resp_dict:
882
+ dep_pipes_next = self.list_deployment_pipelines(continuationToken=resp_dict["continuationToken"])
883
+ dep_pipes.extend(dep_pipes_next)
884
+
885
+ return dep_pipes
886
+
887
+ def get_deployment_pipeline(self, pipeline_id):
888
+ """Get a deployment pipeline by ID
889
+ Args:
890
+ pipeline_id (str): The ID of the deployment pipeline
891
+ Returns:
892
+ DeploymentPipeline: The deployment pipeline
893
+ """
894
+ return DeploymentPipeline.get_pipeline(pipeline_id, auth=self.auth)
895
+
896
+ def get_deployment_pipeline_stages_items(self, pipeline_id, stage_id = None, stage_name = None):
897
+ """Get the items in a deployment stage
898
+ Args:
899
+ pipeline_id (str): The ID of the deployment pipeline
900
+ stage_id (str): The ID of the deployment stage
901
+ stage_name (str): The name of the deployment stage
902
+ Returns:
903
+ list: List of DeploymentStageItem objects
904
+ """
905
+ pipeline = DeploymentPipeline.get_pipeline(pipeline_id, auth=self.auth)
906
+ return pipeline.get_deployment_pipeline_stages_items(stage_id, stage_name)
907
+
908
+ def get_deployment_pipeline_stages(self, pipeline_id):
909
+ """Get the stages of a deployment pipeline
910
+ Args:
911
+ pipeline_id (str): The ID of the deployment pipeline
912
+ Returns:
913
+ list: List of DeploymentPipelineStage objects
914
+ """
915
+ pipeline = self.get_deployment_pipeline(pipeline_id)
916
+ return pipeline.get_stages()
917
+
918
+ def list_deployment_pipeline_stages(self, pipeline_id):
919
+ """Get the stages of a deployment pipeline
920
+ Args:
921
+ pipeline_id (str): The ID of the deployment pipeline
922
+ Returns:
923
+ list: List of DeploymentPipelineStage objects
924
+ """
925
+ return self.get_deployment_pipeline_stages(pipeline_id)
926
+
927
+
928
+ # Spark workspace settings
929
+
930
+ def get_spark_settings(self, workspace_id):
931
+ """Get spark settings for a workspace
932
+ Args:
933
+ workspace_id (str): The ID of the workspace
934
+ Returns:
935
+ dict: The spark settings"""
936
+ ws = self.get_workspace_by_id(workspace_id)
937
+ return ws.get_spark_settings()
938
+
939
+ def update_spark_settings(self, workspace_id, automatic_log = None,
940
+ environment = None, high_concurrency = None, pool = None):
941
+ """Update spark settings for a workspace
942
+ Args:
943
+ workspace_id (str): The ID of the workspace
944
+ automatic_log (bool): Whether to automatically log
945
+ environment (str): The environment
946
+ high_concurrency (bool): Whether to use high concurrency
947
+ pool (str): The pool
948
+ Returns:
949
+ dict: The updated spark settings"""
950
+ ws = self.get_workspace_by_id(workspace_id)
951
+ return ws.update_spark_settings(automatic_log=automatic_log,
952
+ environment=environment,
953
+ high_concurrency=high_concurrency,
954
+ pool=pool)