alibabacloud-emr-serverless-spark20230808 1.9.0__py3-none-any.whl → 1.10.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of alibabacloud-emr-serverless-spark20230808 might be problematic. Click here for more details.

@@ -277,7 +277,7 @@ class Client(OpenApiClient):
277
277
  runtime: util_models.RuntimeOptions,
278
278
  ) -> emr_serverless_spark_20230808_models.CreateProcessDefinitionWithScheduleResponse:
279
279
  """
280
- @summary 创建工作流定义
280
+ @summary Creates a workflow.
281
281
 
282
282
  @param tmp_req: CreateProcessDefinitionWithScheduleRequest
283
283
  @param headers: map
@@ -287,6 +287,8 @@ class Client(OpenApiClient):
287
287
  UtilClient.validate_model(tmp_req)
288
288
  request = emr_serverless_spark_20230808_models.CreateProcessDefinitionWithScheduleShrinkRequest()
289
289
  OpenApiUtilClient.convert(tmp_req, request)
290
+ if not UtilClient.is_unset(tmp_req.global_params):
291
+ request.global_params_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.global_params, 'globalParams', 'json')
290
292
  if not UtilClient.is_unset(tmp_req.schedule):
291
293
  request.schedule_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.schedule, 'schedule', 'json')
292
294
  if not UtilClient.is_unset(tmp_req.tags):
@@ -302,6 +304,8 @@ class Client(OpenApiClient):
302
304
  query['description'] = request.description
303
305
  if not UtilClient.is_unset(request.execution_type):
304
306
  query['executionType'] = request.execution_type
307
+ if not UtilClient.is_unset(request.global_params_shrink):
308
+ query['globalParams'] = request.global_params_shrink
305
309
  if not UtilClient.is_unset(request.name):
306
310
  query['name'] = request.name
307
311
  if not UtilClient.is_unset(request.product_namespace):
@@ -356,7 +360,7 @@ class Client(OpenApiClient):
356
360
  runtime: util_models.RuntimeOptions,
357
361
  ) -> emr_serverless_spark_20230808_models.CreateProcessDefinitionWithScheduleResponse:
358
362
  """
359
- @summary 创建工作流定义
363
+ @summary Creates a workflow.
360
364
 
361
365
  @param tmp_req: CreateProcessDefinitionWithScheduleRequest
362
366
  @param headers: map
@@ -366,6 +370,8 @@ class Client(OpenApiClient):
366
370
  UtilClient.validate_model(tmp_req)
367
371
  request = emr_serverless_spark_20230808_models.CreateProcessDefinitionWithScheduleShrinkRequest()
368
372
  OpenApiUtilClient.convert(tmp_req, request)
373
+ if not UtilClient.is_unset(tmp_req.global_params):
374
+ request.global_params_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.global_params, 'globalParams', 'json')
369
375
  if not UtilClient.is_unset(tmp_req.schedule):
370
376
  request.schedule_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.schedule, 'schedule', 'json')
371
377
  if not UtilClient.is_unset(tmp_req.tags):
@@ -381,6 +387,8 @@ class Client(OpenApiClient):
381
387
  query['description'] = request.description
382
388
  if not UtilClient.is_unset(request.execution_type):
383
389
  query['executionType'] = request.execution_type
390
+ if not UtilClient.is_unset(request.global_params_shrink):
391
+ query['globalParams'] = request.global_params_shrink
384
392
  if not UtilClient.is_unset(request.name):
385
393
  query['name'] = request.name
386
394
  if not UtilClient.is_unset(request.product_namespace):
@@ -433,7 +441,7 @@ class Client(OpenApiClient):
433
441
  request: emr_serverless_spark_20230808_models.CreateProcessDefinitionWithScheduleRequest,
434
442
  ) -> emr_serverless_spark_20230808_models.CreateProcessDefinitionWithScheduleResponse:
435
443
  """
436
- @summary 创建工作流定义
444
+ @summary Creates a workflow.
437
445
 
438
446
  @param request: CreateProcessDefinitionWithScheduleRequest
439
447
  @return: CreateProcessDefinitionWithScheduleResponse
@@ -448,7 +456,7 @@ class Client(OpenApiClient):
448
456
  request: emr_serverless_spark_20230808_models.CreateProcessDefinitionWithScheduleRequest,
449
457
  ) -> emr_serverless_spark_20230808_models.CreateProcessDefinitionWithScheduleResponse:
450
458
  """
451
- @summary 创建工作流定义
459
+ @summary Creates a workflow.
452
460
 
453
461
  @param request: CreateProcessDefinitionWithScheduleRequest
454
462
  @return: CreateProcessDefinitionWithScheduleResponse
@@ -457,137 +465,821 @@ class Client(OpenApiClient):
457
465
  headers = {}
458
466
  return await self.create_process_definition_with_schedule_with_options_async(biz_id, request, headers, runtime)
459
467
 
468
+ def create_session_cluster_with_options(
469
+ self,
470
+ workspace_id: str,
471
+ request: emr_serverless_spark_20230808_models.CreateSessionClusterRequest,
472
+ headers: Dict[str, str],
473
+ runtime: util_models.RuntimeOptions,
474
+ ) -> emr_serverless_spark_20230808_models.CreateSessionClusterResponse:
475
+ """
476
+ @summary Creates a session.
477
+
478
+ @param request: CreateSessionClusterRequest
479
+ @param headers: map
480
+ @param runtime: runtime options for this request RuntimeOptions
481
+ @return: CreateSessionClusterResponse
482
+ """
483
+ UtilClient.validate_model(request)
484
+ query = {}
485
+ if not UtilClient.is_unset(request.region_id):
486
+ query['regionId'] = request.region_id
487
+ body = {}
488
+ if not UtilClient.is_unset(request.application_configs):
489
+ body['applicationConfigs'] = request.application_configs
490
+ if not UtilClient.is_unset(request.auto_start_configuration):
491
+ body['autoStartConfiguration'] = request.auto_start_configuration
492
+ if not UtilClient.is_unset(request.auto_stop_configuration):
493
+ body['autoStopConfiguration'] = request.auto_stop_configuration
494
+ if not UtilClient.is_unset(request.display_release_version):
495
+ body['displayReleaseVersion'] = request.display_release_version
496
+ if not UtilClient.is_unset(request.env_id):
497
+ body['envId'] = request.env_id
498
+ if not UtilClient.is_unset(request.fusion):
499
+ body['fusion'] = request.fusion
500
+ if not UtilClient.is_unset(request.kind):
501
+ body['kind'] = request.kind
502
+ if not UtilClient.is_unset(request.name):
503
+ body['name'] = request.name
504
+ if not UtilClient.is_unset(request.queue_name):
505
+ body['queueName'] = request.queue_name
506
+ if not UtilClient.is_unset(request.release_version):
507
+ body['releaseVersion'] = request.release_version
508
+ req = open_api_models.OpenApiRequest(
509
+ headers=headers,
510
+ query=OpenApiUtilClient.query(query),
511
+ body=OpenApiUtilClient.parse_to_map(body)
512
+ )
513
+ params = open_api_models.Params(
514
+ action='CreateSessionCluster',
515
+ version='2023-08-08',
516
+ protocol='HTTPS',
517
+ pathname=f'/api/v1/workspaces/{OpenApiUtilClient.get_encode_param(workspace_id)}/sessionClusters',
518
+ method='POST',
519
+ auth_type='AK',
520
+ style='ROA',
521
+ req_body_type='json',
522
+ body_type='json'
523
+ )
524
+ return TeaCore.from_map(
525
+ emr_serverless_spark_20230808_models.CreateSessionClusterResponse(),
526
+ self.call_api(params, req, runtime)
527
+ )
528
+
529
+ async def create_session_cluster_with_options_async(
530
+ self,
531
+ workspace_id: str,
532
+ request: emr_serverless_spark_20230808_models.CreateSessionClusterRequest,
533
+ headers: Dict[str, str],
534
+ runtime: util_models.RuntimeOptions,
535
+ ) -> emr_serverless_spark_20230808_models.CreateSessionClusterResponse:
536
+ """
537
+ @summary Creates a session.
538
+
539
+ @param request: CreateSessionClusterRequest
540
+ @param headers: map
541
+ @param runtime: runtime options for this request RuntimeOptions
542
+ @return: CreateSessionClusterResponse
543
+ """
544
+ UtilClient.validate_model(request)
545
+ query = {}
546
+ if not UtilClient.is_unset(request.region_id):
547
+ query['regionId'] = request.region_id
548
+ body = {}
549
+ if not UtilClient.is_unset(request.application_configs):
550
+ body['applicationConfigs'] = request.application_configs
551
+ if not UtilClient.is_unset(request.auto_start_configuration):
552
+ body['autoStartConfiguration'] = request.auto_start_configuration
553
+ if not UtilClient.is_unset(request.auto_stop_configuration):
554
+ body['autoStopConfiguration'] = request.auto_stop_configuration
555
+ if not UtilClient.is_unset(request.display_release_version):
556
+ body['displayReleaseVersion'] = request.display_release_version
557
+ if not UtilClient.is_unset(request.env_id):
558
+ body['envId'] = request.env_id
559
+ if not UtilClient.is_unset(request.fusion):
560
+ body['fusion'] = request.fusion
561
+ if not UtilClient.is_unset(request.kind):
562
+ body['kind'] = request.kind
563
+ if not UtilClient.is_unset(request.name):
564
+ body['name'] = request.name
565
+ if not UtilClient.is_unset(request.queue_name):
566
+ body['queueName'] = request.queue_name
567
+ if not UtilClient.is_unset(request.release_version):
568
+ body['releaseVersion'] = request.release_version
569
+ req = open_api_models.OpenApiRequest(
570
+ headers=headers,
571
+ query=OpenApiUtilClient.query(query),
572
+ body=OpenApiUtilClient.parse_to_map(body)
573
+ )
574
+ params = open_api_models.Params(
575
+ action='CreateSessionCluster',
576
+ version='2023-08-08',
577
+ protocol='HTTPS',
578
+ pathname=f'/api/v1/workspaces/{OpenApiUtilClient.get_encode_param(workspace_id)}/sessionClusters',
579
+ method='POST',
580
+ auth_type='AK',
581
+ style='ROA',
582
+ req_body_type='json',
583
+ body_type='json'
584
+ )
585
+ return TeaCore.from_map(
586
+ emr_serverless_spark_20230808_models.CreateSessionClusterResponse(),
587
+ await self.call_api_async(params, req, runtime)
588
+ )
589
+
590
+ def create_session_cluster(
591
+ self,
592
+ workspace_id: str,
593
+ request: emr_serverless_spark_20230808_models.CreateSessionClusterRequest,
594
+ ) -> emr_serverless_spark_20230808_models.CreateSessionClusterResponse:
595
+ """
596
+ @summary Creates a session.
597
+
598
+ @param request: CreateSessionClusterRequest
599
+ @return: CreateSessionClusterResponse
600
+ """
601
+ runtime = util_models.RuntimeOptions()
602
+ headers = {}
603
+ return self.create_session_cluster_with_options(workspace_id, request, headers, runtime)
604
+
605
+ async def create_session_cluster_async(
606
+ self,
607
+ workspace_id: str,
608
+ request: emr_serverless_spark_20230808_models.CreateSessionClusterRequest,
609
+ ) -> emr_serverless_spark_20230808_models.CreateSessionClusterResponse:
610
+ """
611
+ @summary Creates a session.
612
+
613
+ @param request: CreateSessionClusterRequest
614
+ @return: CreateSessionClusterResponse
615
+ """
616
+ runtime = util_models.RuntimeOptions()
617
+ headers = {}
618
+ return await self.create_session_cluster_with_options_async(workspace_id, request, headers, runtime)
619
+
460
620
  def create_sql_statement_with_options(
461
621
  self,
462
622
  workspace_id: str,
463
- request: emr_serverless_spark_20230808_models.CreateSqlStatementRequest,
623
+ request: emr_serverless_spark_20230808_models.CreateSqlStatementRequest,
624
+ headers: Dict[str, str],
625
+ runtime: util_models.RuntimeOptions,
626
+ ) -> emr_serverless_spark_20230808_models.CreateSqlStatementResponse:
627
+ """
628
+ @summary Creates an SQL query task.
629
+
630
+ @param request: CreateSqlStatementRequest
631
+ @param headers: map
632
+ @param runtime: runtime options for this request RuntimeOptions
633
+ @return: CreateSqlStatementResponse
634
+ """
635
+ UtilClient.validate_model(request)
636
+ query = {}
637
+ if not UtilClient.is_unset(request.region_id):
638
+ query['regionId'] = request.region_id
639
+ body = {}
640
+ if not UtilClient.is_unset(request.code_content):
641
+ body['codeContent'] = request.code_content
642
+ if not UtilClient.is_unset(request.default_catalog):
643
+ body['defaultCatalog'] = request.default_catalog
644
+ if not UtilClient.is_unset(request.default_database):
645
+ body['defaultDatabase'] = request.default_database
646
+ if not UtilClient.is_unset(request.limit):
647
+ body['limit'] = request.limit
648
+ if not UtilClient.is_unset(request.sql_compute_id):
649
+ body['sqlComputeId'] = request.sql_compute_id
650
+ req = open_api_models.OpenApiRequest(
651
+ headers=headers,
652
+ query=OpenApiUtilClient.query(query),
653
+ body=OpenApiUtilClient.parse_to_map(body)
654
+ )
655
+ params = open_api_models.Params(
656
+ action='CreateSqlStatement',
657
+ version='2023-08-08',
658
+ protocol='HTTPS',
659
+ pathname=f'/api/interactive/v1/workspace/{OpenApiUtilClient.get_encode_param(workspace_id)}/statement',
660
+ method='PUT',
661
+ auth_type='AK',
662
+ style='ROA',
663
+ req_body_type='json',
664
+ body_type='json'
665
+ )
666
+ return TeaCore.from_map(
667
+ emr_serverless_spark_20230808_models.CreateSqlStatementResponse(),
668
+ self.call_api(params, req, runtime)
669
+ )
670
+
671
+ async def create_sql_statement_with_options_async(
672
+ self,
673
+ workspace_id: str,
674
+ request: emr_serverless_spark_20230808_models.CreateSqlStatementRequest,
675
+ headers: Dict[str, str],
676
+ runtime: util_models.RuntimeOptions,
677
+ ) -> emr_serverless_spark_20230808_models.CreateSqlStatementResponse:
678
+ """
679
+ @summary Creates an SQL query task.
680
+
681
+ @param request: CreateSqlStatementRequest
682
+ @param headers: map
683
+ @param runtime: runtime options for this request RuntimeOptions
684
+ @return: CreateSqlStatementResponse
685
+ """
686
+ UtilClient.validate_model(request)
687
+ query = {}
688
+ if not UtilClient.is_unset(request.region_id):
689
+ query['regionId'] = request.region_id
690
+ body = {}
691
+ if not UtilClient.is_unset(request.code_content):
692
+ body['codeContent'] = request.code_content
693
+ if not UtilClient.is_unset(request.default_catalog):
694
+ body['defaultCatalog'] = request.default_catalog
695
+ if not UtilClient.is_unset(request.default_database):
696
+ body['defaultDatabase'] = request.default_database
697
+ if not UtilClient.is_unset(request.limit):
698
+ body['limit'] = request.limit
699
+ if not UtilClient.is_unset(request.sql_compute_id):
700
+ body['sqlComputeId'] = request.sql_compute_id
701
+ req = open_api_models.OpenApiRequest(
702
+ headers=headers,
703
+ query=OpenApiUtilClient.query(query),
704
+ body=OpenApiUtilClient.parse_to_map(body)
705
+ )
706
+ params = open_api_models.Params(
707
+ action='CreateSqlStatement',
708
+ version='2023-08-08',
709
+ protocol='HTTPS',
710
+ pathname=f'/api/interactive/v1/workspace/{OpenApiUtilClient.get_encode_param(workspace_id)}/statement',
711
+ method='PUT',
712
+ auth_type='AK',
713
+ style='ROA',
714
+ req_body_type='json',
715
+ body_type='json'
716
+ )
717
+ return TeaCore.from_map(
718
+ emr_serverless_spark_20230808_models.CreateSqlStatementResponse(),
719
+ await self.call_api_async(params, req, runtime)
720
+ )
721
+
722
+ def create_sql_statement(
723
+ self,
724
+ workspace_id: str,
725
+ request: emr_serverless_spark_20230808_models.CreateSqlStatementRequest,
726
+ ) -> emr_serverless_spark_20230808_models.CreateSqlStatementResponse:
727
+ """
728
+ @summary Creates an SQL query task.
729
+
730
+ @param request: CreateSqlStatementRequest
731
+ @return: CreateSqlStatementResponse
732
+ """
733
+ runtime = util_models.RuntimeOptions()
734
+ headers = {}
735
+ return self.create_sql_statement_with_options(workspace_id, request, headers, runtime)
736
+
737
+ async def create_sql_statement_async(
738
+ self,
739
+ workspace_id: str,
740
+ request: emr_serverless_spark_20230808_models.CreateSqlStatementRequest,
741
+ ) -> emr_serverless_spark_20230808_models.CreateSqlStatementResponse:
742
+ """
743
+ @summary Creates an SQL query task.
744
+
745
+ @param request: CreateSqlStatementRequest
746
+ @return: CreateSqlStatementResponse
747
+ """
748
+ runtime = util_models.RuntimeOptions()
749
+ headers = {}
750
+ return await self.create_sql_statement_with_options_async(workspace_id, request, headers, runtime)
751
+
752
+ def create_workspace_with_options(
753
+ self,
754
+ request: emr_serverless_spark_20230808_models.CreateWorkspaceRequest,
755
+ headers: Dict[str, str],
756
+ runtime: util_models.RuntimeOptions,
757
+ ) -> emr_serverless_spark_20230808_models.CreateWorkspaceResponse:
758
+ """
759
+ @summary Creates a workspace.
760
+
761
+ @param request: CreateWorkspaceRequest
762
+ @param headers: map
763
+ @param runtime: runtime options for this request RuntimeOptions
764
+ @return: CreateWorkspaceResponse
765
+ """
766
+ UtilClient.validate_model(request)
767
+ query = {}
768
+ if not UtilClient.is_unset(request.region_id):
769
+ query['regionId'] = request.region_id
770
+ body = {}
771
+ if not UtilClient.is_unset(request.auto_renew):
772
+ body['autoRenew'] = request.auto_renew
773
+ if not UtilClient.is_unset(request.auto_renew_period):
774
+ body['autoRenewPeriod'] = request.auto_renew_period
775
+ if not UtilClient.is_unset(request.auto_renew_period_unit):
776
+ body['autoRenewPeriodUnit'] = request.auto_renew_period_unit
777
+ if not UtilClient.is_unset(request.auto_start_session_cluster):
778
+ body['autoStartSessionCluster'] = request.auto_start_session_cluster
779
+ if not UtilClient.is_unset(request.client_token):
780
+ body['clientToken'] = request.client_token
781
+ if not UtilClient.is_unset(request.dlf_catalog_id):
782
+ body['dlfCatalogId'] = request.dlf_catalog_id
783
+ if not UtilClient.is_unset(request.dlf_type):
784
+ body['dlfType'] = request.dlf_type
785
+ if not UtilClient.is_unset(request.duration):
786
+ body['duration'] = request.duration
787
+ if not UtilClient.is_unset(request.oss_bucket):
788
+ body['ossBucket'] = request.oss_bucket
789
+ if not UtilClient.is_unset(request.payment_duration_unit):
790
+ body['paymentDurationUnit'] = request.payment_duration_unit
791
+ if not UtilClient.is_unset(request.payment_type):
792
+ body['paymentType'] = request.payment_type
793
+ if not UtilClient.is_unset(request.ram_role_name):
794
+ body['ramRoleName'] = request.ram_role_name
795
+ if not UtilClient.is_unset(request.release_type):
796
+ body['releaseType'] = request.release_type
797
+ if not UtilClient.is_unset(request.resource_spec):
798
+ body['resourceSpec'] = request.resource_spec
799
+ if not UtilClient.is_unset(request.tag):
800
+ body['tag'] = request.tag
801
+ if not UtilClient.is_unset(request.workspace_name):
802
+ body['workspaceName'] = request.workspace_name
803
+ req = open_api_models.OpenApiRequest(
804
+ headers=headers,
805
+ query=OpenApiUtilClient.query(query),
806
+ body=OpenApiUtilClient.parse_to_map(body)
807
+ )
808
+ params = open_api_models.Params(
809
+ action='CreateWorkspace',
810
+ version='2023-08-08',
811
+ protocol='HTTPS',
812
+ pathname=f'/api/v1/workspaces',
813
+ method='POST',
814
+ auth_type='AK',
815
+ style='ROA',
816
+ req_body_type='json',
817
+ body_type='json'
818
+ )
819
+ return TeaCore.from_map(
820
+ emr_serverless_spark_20230808_models.CreateWorkspaceResponse(),
821
+ self.call_api(params, req, runtime)
822
+ )
823
+
824
+ async def create_workspace_with_options_async(
825
+ self,
826
+ request: emr_serverless_spark_20230808_models.CreateWorkspaceRequest,
827
+ headers: Dict[str, str],
828
+ runtime: util_models.RuntimeOptions,
829
+ ) -> emr_serverless_spark_20230808_models.CreateWorkspaceResponse:
830
+ """
831
+ @summary Creates a workspace.
832
+
833
+ @param request: CreateWorkspaceRequest
834
+ @param headers: map
835
+ @param runtime: runtime options for this request RuntimeOptions
836
+ @return: CreateWorkspaceResponse
837
+ """
838
+ UtilClient.validate_model(request)
839
+ query = {}
840
+ if not UtilClient.is_unset(request.region_id):
841
+ query['regionId'] = request.region_id
842
+ body = {}
843
+ if not UtilClient.is_unset(request.auto_renew):
844
+ body['autoRenew'] = request.auto_renew
845
+ if not UtilClient.is_unset(request.auto_renew_period):
846
+ body['autoRenewPeriod'] = request.auto_renew_period
847
+ if not UtilClient.is_unset(request.auto_renew_period_unit):
848
+ body['autoRenewPeriodUnit'] = request.auto_renew_period_unit
849
+ if not UtilClient.is_unset(request.auto_start_session_cluster):
850
+ body['autoStartSessionCluster'] = request.auto_start_session_cluster
851
+ if not UtilClient.is_unset(request.client_token):
852
+ body['clientToken'] = request.client_token
853
+ if not UtilClient.is_unset(request.dlf_catalog_id):
854
+ body['dlfCatalogId'] = request.dlf_catalog_id
855
+ if not UtilClient.is_unset(request.dlf_type):
856
+ body['dlfType'] = request.dlf_type
857
+ if not UtilClient.is_unset(request.duration):
858
+ body['duration'] = request.duration
859
+ if not UtilClient.is_unset(request.oss_bucket):
860
+ body['ossBucket'] = request.oss_bucket
861
+ if not UtilClient.is_unset(request.payment_duration_unit):
862
+ body['paymentDurationUnit'] = request.payment_duration_unit
863
+ if not UtilClient.is_unset(request.payment_type):
864
+ body['paymentType'] = request.payment_type
865
+ if not UtilClient.is_unset(request.ram_role_name):
866
+ body['ramRoleName'] = request.ram_role_name
867
+ if not UtilClient.is_unset(request.release_type):
868
+ body['releaseType'] = request.release_type
869
+ if not UtilClient.is_unset(request.resource_spec):
870
+ body['resourceSpec'] = request.resource_spec
871
+ if not UtilClient.is_unset(request.tag):
872
+ body['tag'] = request.tag
873
+ if not UtilClient.is_unset(request.workspace_name):
874
+ body['workspaceName'] = request.workspace_name
875
+ req = open_api_models.OpenApiRequest(
876
+ headers=headers,
877
+ query=OpenApiUtilClient.query(query),
878
+ body=OpenApiUtilClient.parse_to_map(body)
879
+ )
880
+ params = open_api_models.Params(
881
+ action='CreateWorkspace',
882
+ version='2023-08-08',
883
+ protocol='HTTPS',
884
+ pathname=f'/api/v1/workspaces',
885
+ method='POST',
886
+ auth_type='AK',
887
+ style='ROA',
888
+ req_body_type='json',
889
+ body_type='json'
890
+ )
891
+ return TeaCore.from_map(
892
+ emr_serverless_spark_20230808_models.CreateWorkspaceResponse(),
893
+ await self.call_api_async(params, req, runtime)
894
+ )
895
+
896
+ def create_workspace(
897
+ self,
898
+ request: emr_serverless_spark_20230808_models.CreateWorkspaceRequest,
899
+ ) -> emr_serverless_spark_20230808_models.CreateWorkspaceResponse:
900
+ """
901
+ @summary Creates a workspace.
902
+
903
+ @param request: CreateWorkspaceRequest
904
+ @return: CreateWorkspaceResponse
905
+ """
906
+ runtime = util_models.RuntimeOptions()
907
+ headers = {}
908
+ return self.create_workspace_with_options(request, headers, runtime)
909
+
910
+ async def create_workspace_async(
911
+ self,
912
+ request: emr_serverless_spark_20230808_models.CreateWorkspaceRequest,
913
+ ) -> emr_serverless_spark_20230808_models.CreateWorkspaceResponse:
914
+ """
915
+ @summary Creates a workspace.
916
+
917
+ @param request: CreateWorkspaceRequest
918
+ @return: CreateWorkspaceResponse
919
+ """
920
+ runtime = util_models.RuntimeOptions()
921
+ headers = {}
922
+ return await self.create_workspace_with_options_async(request, headers, runtime)
923
+
924
+ def edit_workspace_queue_with_options(
925
+ self,
926
+ request: emr_serverless_spark_20230808_models.EditWorkspaceQueueRequest,
927
+ headers: Dict[str, str],
928
+ runtime: util_models.RuntimeOptions,
929
+ ) -> emr_serverless_spark_20230808_models.EditWorkspaceQueueResponse:
930
+ """
931
+ @summary Modifies the queue of a workspace.
932
+
933
+ @param request: EditWorkspaceQueueRequest
934
+ @param headers: map
935
+ @param runtime: runtime options for this request RuntimeOptions
936
+ @return: EditWorkspaceQueueResponse
937
+ """
938
+ UtilClient.validate_model(request)
939
+ query = {}
940
+ if not UtilClient.is_unset(request.region_id):
941
+ query['regionId'] = request.region_id
942
+ body = {}
943
+ if not UtilClient.is_unset(request.environments):
944
+ body['environments'] = request.environments
945
+ if not UtilClient.is_unset(request.resource_spec):
946
+ body['resourceSpec'] = request.resource_spec
947
+ if not UtilClient.is_unset(request.workspace_id):
948
+ body['workspaceId'] = request.workspace_id
949
+ if not UtilClient.is_unset(request.workspace_queue_name):
950
+ body['workspaceQueueName'] = request.workspace_queue_name
951
+ req = open_api_models.OpenApiRequest(
952
+ headers=headers,
953
+ query=OpenApiUtilClient.query(query),
954
+ body=OpenApiUtilClient.parse_to_map(body)
955
+ )
956
+ params = open_api_models.Params(
957
+ action='EditWorkspaceQueue',
958
+ version='2023-08-08',
959
+ protocol='HTTPS',
960
+ pathname=f'/api/v1/workspaces/queues/action/edit',
961
+ method='POST',
962
+ auth_type='AK',
963
+ style='ROA',
964
+ req_body_type='json',
965
+ body_type='json'
966
+ )
967
+ return TeaCore.from_map(
968
+ emr_serverless_spark_20230808_models.EditWorkspaceQueueResponse(),
969
+ self.call_api(params, req, runtime)
970
+ )
971
+
972
+ async def edit_workspace_queue_with_options_async(
973
+ self,
974
+ request: emr_serverless_spark_20230808_models.EditWorkspaceQueueRequest,
975
+ headers: Dict[str, str],
976
+ runtime: util_models.RuntimeOptions,
977
+ ) -> emr_serverless_spark_20230808_models.EditWorkspaceQueueResponse:
978
+ """
979
+ @summary Modifies the queue of a workspace.
980
+
981
+ @param request: EditWorkspaceQueueRequest
982
+ @param headers: map
983
+ @param runtime: runtime options for this request RuntimeOptions
984
+ @return: EditWorkspaceQueueResponse
985
+ """
986
+ UtilClient.validate_model(request)
987
+ query = {}
988
+ if not UtilClient.is_unset(request.region_id):
989
+ query['regionId'] = request.region_id
990
+ body = {}
991
+ if not UtilClient.is_unset(request.environments):
992
+ body['environments'] = request.environments
993
+ if not UtilClient.is_unset(request.resource_spec):
994
+ body['resourceSpec'] = request.resource_spec
995
+ if not UtilClient.is_unset(request.workspace_id):
996
+ body['workspaceId'] = request.workspace_id
997
+ if not UtilClient.is_unset(request.workspace_queue_name):
998
+ body['workspaceQueueName'] = request.workspace_queue_name
999
+ req = open_api_models.OpenApiRequest(
1000
+ headers=headers,
1001
+ query=OpenApiUtilClient.query(query),
1002
+ body=OpenApiUtilClient.parse_to_map(body)
1003
+ )
1004
+ params = open_api_models.Params(
1005
+ action='EditWorkspaceQueue',
1006
+ version='2023-08-08',
1007
+ protocol='HTTPS',
1008
+ pathname=f'/api/v1/workspaces/queues/action/edit',
1009
+ method='POST',
1010
+ auth_type='AK',
1011
+ style='ROA',
1012
+ req_body_type='json',
1013
+ body_type='json'
1014
+ )
1015
+ return TeaCore.from_map(
1016
+ emr_serverless_spark_20230808_models.EditWorkspaceQueueResponse(),
1017
+ await self.call_api_async(params, req, runtime)
1018
+ )
1019
+
1020
+ def edit_workspace_queue(
1021
+ self,
1022
+ request: emr_serverless_spark_20230808_models.EditWorkspaceQueueRequest,
1023
+ ) -> emr_serverless_spark_20230808_models.EditWorkspaceQueueResponse:
1024
+ """
1025
+ @summary Modifies the queue of a workspace.
1026
+
1027
+ @param request: EditWorkspaceQueueRequest
1028
+ @return: EditWorkspaceQueueResponse
1029
+ """
1030
+ runtime = util_models.RuntimeOptions()
1031
+ headers = {}
1032
+ return self.edit_workspace_queue_with_options(request, headers, runtime)
1033
+
1034
+ async def edit_workspace_queue_async(
1035
+ self,
1036
+ request: emr_serverless_spark_20230808_models.EditWorkspaceQueueRequest,
1037
+ ) -> emr_serverless_spark_20230808_models.EditWorkspaceQueueResponse:
1038
+ """
1039
+ @summary Modifies the queue of a workspace.
1040
+
1041
+ @param request: EditWorkspaceQueueRequest
1042
+ @return: EditWorkspaceQueueResponse
1043
+ """
1044
+ runtime = util_models.RuntimeOptions()
1045
+ headers = {}
1046
+ return await self.edit_workspace_queue_with_options_async(request, headers, runtime)
1047
+
1048
+ def get_cu_hours_with_options(
1049
+ self,
1050
+ workspace_id: str,
1051
+ queue: str,
1052
+ request: emr_serverless_spark_20230808_models.GetCuHoursRequest,
464
1053
  headers: Dict[str, str],
465
1054
  runtime: util_models.RuntimeOptions,
466
- ) -> emr_serverless_spark_20230808_models.CreateSqlStatementResponse:
1055
+ ) -> emr_serverless_spark_20230808_models.GetCuHoursResponse:
467
1056
  """
468
- @summary Creates an SQL query task.
1057
+ @summary Queries the number of CU-hours consumed by a queue during a specified cycle.
469
1058
 
470
- @param request: CreateSqlStatementRequest
1059
+ @param request: GetCuHoursRequest
471
1060
  @param headers: map
472
1061
  @param runtime: runtime options for this request RuntimeOptions
473
- @return: CreateSqlStatementResponse
1062
+ @return: GetCuHoursResponse
1063
+ """
1064
+ UtilClient.validate_model(request)
1065
+ query = {}
1066
+ if not UtilClient.is_unset(request.end_time):
1067
+ query['endTime'] = request.end_time
1068
+ if not UtilClient.is_unset(request.start_time):
1069
+ query['startTime'] = request.start_time
1070
+ req = open_api_models.OpenApiRequest(
1071
+ headers=headers,
1072
+ query=OpenApiUtilClient.query(query)
1073
+ )
1074
+ params = open_api_models.Params(
1075
+ action='GetCuHours',
1076
+ version='2023-08-08',
1077
+ protocol='HTTPS',
1078
+ pathname=f'/api/v1/workspaces/{OpenApiUtilClient.get_encode_param(workspace_id)}/metric/cuHours/{OpenApiUtilClient.get_encode_param(queue)}',
1079
+ method='GET',
1080
+ auth_type='AK',
1081
+ style='ROA',
1082
+ req_body_type='json',
1083
+ body_type='json'
1084
+ )
1085
+ return TeaCore.from_map(
1086
+ emr_serverless_spark_20230808_models.GetCuHoursResponse(),
1087
+ self.call_api(params, req, runtime)
1088
+ )
1089
+
1090
+ async def get_cu_hours_with_options_async(
1091
+ self,
1092
+ workspace_id: str,
1093
+ queue: str,
1094
+ request: emr_serverless_spark_20230808_models.GetCuHoursRequest,
1095
+ headers: Dict[str, str],
1096
+ runtime: util_models.RuntimeOptions,
1097
+ ) -> emr_serverless_spark_20230808_models.GetCuHoursResponse:
1098
+ """
1099
+ @summary Queries the number of CU-hours consumed by a queue during a specified cycle.
1100
+
1101
+ @param request: GetCuHoursRequest
1102
+ @param headers: map
1103
+ @param runtime: runtime options for this request RuntimeOptions
1104
+ @return: GetCuHoursResponse
1105
+ """
1106
+ UtilClient.validate_model(request)
1107
+ query = {}
1108
+ if not UtilClient.is_unset(request.end_time):
1109
+ query['endTime'] = request.end_time
1110
+ if not UtilClient.is_unset(request.start_time):
1111
+ query['startTime'] = request.start_time
1112
+ req = open_api_models.OpenApiRequest(
1113
+ headers=headers,
1114
+ query=OpenApiUtilClient.query(query)
1115
+ )
1116
+ params = open_api_models.Params(
1117
+ action='GetCuHours',
1118
+ version='2023-08-08',
1119
+ protocol='HTTPS',
1120
+ pathname=f'/api/v1/workspaces/{OpenApiUtilClient.get_encode_param(workspace_id)}/metric/cuHours/{OpenApiUtilClient.get_encode_param(queue)}',
1121
+ method='GET',
1122
+ auth_type='AK',
1123
+ style='ROA',
1124
+ req_body_type='json',
1125
+ body_type='json'
1126
+ )
1127
+ return TeaCore.from_map(
1128
+ emr_serverless_spark_20230808_models.GetCuHoursResponse(),
1129
+ await self.call_api_async(params, req, runtime)
1130
+ )
1131
+
1132
+ def get_cu_hours(
1133
+ self,
1134
+ workspace_id: str,
1135
+ queue: str,
1136
+ request: emr_serverless_spark_20230808_models.GetCuHoursRequest,
1137
+ ) -> emr_serverless_spark_20230808_models.GetCuHoursResponse:
1138
+ """
1139
+ @summary Queries the number of CU-hours consumed by a queue during a specified cycle.
1140
+
1141
+ @param request: GetCuHoursRequest
1142
+ @return: GetCuHoursResponse
1143
+ """
1144
+ runtime = util_models.RuntimeOptions()
1145
+ headers = {}
1146
+ return self.get_cu_hours_with_options(workspace_id, queue, request, headers, runtime)
1147
+
1148
+ async def get_cu_hours_async(
1149
+ self,
1150
+ workspace_id: str,
1151
+ queue: str,
1152
+ request: emr_serverless_spark_20230808_models.GetCuHoursRequest,
1153
+ ) -> emr_serverless_spark_20230808_models.GetCuHoursResponse:
1154
+ """
1155
+ @summary Queries the number of CU-hours consumed by a queue during a specified cycle.
1156
+
1157
+ @param request: GetCuHoursRequest
1158
+ @return: GetCuHoursResponse
1159
+ """
1160
+ runtime = util_models.RuntimeOptions()
1161
+ headers = {}
1162
+ return await self.get_cu_hours_with_options_async(workspace_id, queue, request, headers, runtime)
1163
+
1164
+ def get_doctor_application_with_options(
1165
+ self,
1166
+ workspace_id: str,
1167
+ run_id: str,
1168
+ request: emr_serverless_spark_20230808_models.GetDoctorApplicationRequest,
1169
+ headers: Dict[str, str],
1170
+ runtime: util_models.RuntimeOptions,
1171
+ ) -> emr_serverless_spark_20230808_models.GetDoctorApplicationResponse:
1172
+ """
1173
+ @summary Obtains job analysis information on E-MapReduce (EMR) Doctor.
1174
+
1175
+ @param request: GetDoctorApplicationRequest
1176
+ @param headers: map
1177
+ @param runtime: runtime options for this request RuntimeOptions
1178
+ @return: GetDoctorApplicationResponse
474
1179
  """
475
1180
  UtilClient.validate_model(request)
476
1181
  query = {}
1182
+ if not UtilClient.is_unset(request.locale):
1183
+ query['locale'] = request.locale
1184
+ if not UtilClient.is_unset(request.query_time):
1185
+ query['queryTime'] = request.query_time
477
1186
  if not UtilClient.is_unset(request.region_id):
478
1187
  query['regionId'] = request.region_id
479
- body = {}
480
- if not UtilClient.is_unset(request.code_content):
481
- body['codeContent'] = request.code_content
482
- if not UtilClient.is_unset(request.default_catalog):
483
- body['defaultCatalog'] = request.default_catalog
484
- if not UtilClient.is_unset(request.default_database):
485
- body['defaultDatabase'] = request.default_database
486
- if not UtilClient.is_unset(request.limit):
487
- body['limit'] = request.limit
488
- if not UtilClient.is_unset(request.sql_compute_id):
489
- body['sqlComputeId'] = request.sql_compute_id
490
1188
  req = open_api_models.OpenApiRequest(
491
1189
  headers=headers,
492
- query=OpenApiUtilClient.query(query),
493
- body=OpenApiUtilClient.parse_to_map(body)
1190
+ query=OpenApiUtilClient.query(query)
494
1191
  )
495
1192
  params = open_api_models.Params(
496
- action='CreateSqlStatement',
1193
+ action='GetDoctorApplication',
497
1194
  version='2023-08-08',
498
1195
  protocol='HTTPS',
499
- pathname=f'/api/interactive/v1/workspace/{OpenApiUtilClient.get_encode_param(workspace_id)}/statement',
500
- method='PUT',
1196
+ pathname=f'/api/v1/workspaces/{OpenApiUtilClient.get_encode_param(workspace_id)}/runs/{OpenApiUtilClient.get_encode_param(run_id)}/action/getDoctorApplication',
1197
+ method='GET',
501
1198
  auth_type='AK',
502
1199
  style='ROA',
503
1200
  req_body_type='json',
504
1201
  body_type='json'
505
1202
  )
506
1203
  return TeaCore.from_map(
507
- emr_serverless_spark_20230808_models.CreateSqlStatementResponse(),
1204
+ emr_serverless_spark_20230808_models.GetDoctorApplicationResponse(),
508
1205
  self.call_api(params, req, runtime)
509
1206
  )
510
1207
 
511
- async def create_sql_statement_with_options_async(
1208
+ async def get_doctor_application_with_options_async(
512
1209
  self,
513
1210
  workspace_id: str,
514
- request: emr_serverless_spark_20230808_models.CreateSqlStatementRequest,
1211
+ run_id: str,
1212
+ request: emr_serverless_spark_20230808_models.GetDoctorApplicationRequest,
515
1213
  headers: Dict[str, str],
516
1214
  runtime: util_models.RuntimeOptions,
517
- ) -> emr_serverless_spark_20230808_models.CreateSqlStatementResponse:
1215
+ ) -> emr_serverless_spark_20230808_models.GetDoctorApplicationResponse:
518
1216
  """
519
- @summary Creates an SQL query task.
1217
+ @summary Obtains job analysis information on E-MapReduce (EMR) Doctor.
520
1218
 
521
- @param request: CreateSqlStatementRequest
1219
+ @param request: GetDoctorApplicationRequest
522
1220
  @param headers: map
523
1221
  @param runtime: runtime options for this request RuntimeOptions
524
- @return: CreateSqlStatementResponse
1222
+ @return: GetDoctorApplicationResponse
525
1223
  """
526
1224
  UtilClient.validate_model(request)
527
1225
  query = {}
1226
+ if not UtilClient.is_unset(request.locale):
1227
+ query['locale'] = request.locale
1228
+ if not UtilClient.is_unset(request.query_time):
1229
+ query['queryTime'] = request.query_time
528
1230
  if not UtilClient.is_unset(request.region_id):
529
1231
  query['regionId'] = request.region_id
530
- body = {}
531
- if not UtilClient.is_unset(request.code_content):
532
- body['codeContent'] = request.code_content
533
- if not UtilClient.is_unset(request.default_catalog):
534
- body['defaultCatalog'] = request.default_catalog
535
- if not UtilClient.is_unset(request.default_database):
536
- body['defaultDatabase'] = request.default_database
537
- if not UtilClient.is_unset(request.limit):
538
- body['limit'] = request.limit
539
- if not UtilClient.is_unset(request.sql_compute_id):
540
- body['sqlComputeId'] = request.sql_compute_id
541
1232
  req = open_api_models.OpenApiRequest(
542
1233
  headers=headers,
543
- query=OpenApiUtilClient.query(query),
544
- body=OpenApiUtilClient.parse_to_map(body)
1234
+ query=OpenApiUtilClient.query(query)
545
1235
  )
546
1236
  params = open_api_models.Params(
547
- action='CreateSqlStatement',
1237
+ action='GetDoctorApplication',
548
1238
  version='2023-08-08',
549
1239
  protocol='HTTPS',
550
- pathname=f'/api/interactive/v1/workspace/{OpenApiUtilClient.get_encode_param(workspace_id)}/statement',
551
- method='PUT',
1240
+ pathname=f'/api/v1/workspaces/{OpenApiUtilClient.get_encode_param(workspace_id)}/runs/{OpenApiUtilClient.get_encode_param(run_id)}/action/getDoctorApplication',
1241
+ method='GET',
552
1242
  auth_type='AK',
553
1243
  style='ROA',
554
1244
  req_body_type='json',
555
1245
  body_type='json'
556
1246
  )
557
1247
  return TeaCore.from_map(
558
- emr_serverless_spark_20230808_models.CreateSqlStatementResponse(),
1248
+ emr_serverless_spark_20230808_models.GetDoctorApplicationResponse(),
559
1249
  await self.call_api_async(params, req, runtime)
560
1250
  )
561
1251
 
562
- def create_sql_statement(
1252
+ def get_doctor_application(
563
1253
  self,
564
1254
  workspace_id: str,
565
- request: emr_serverless_spark_20230808_models.CreateSqlStatementRequest,
566
- ) -> emr_serverless_spark_20230808_models.CreateSqlStatementResponse:
1255
+ run_id: str,
1256
+ request: emr_serverless_spark_20230808_models.GetDoctorApplicationRequest,
1257
+ ) -> emr_serverless_spark_20230808_models.GetDoctorApplicationResponse:
567
1258
  """
568
- @summary Creates an SQL query task.
1259
+ @summary Obtains job analysis information on E-MapReduce (EMR) Doctor.
569
1260
 
570
- @param request: CreateSqlStatementRequest
571
- @return: CreateSqlStatementResponse
1261
+ @param request: GetDoctorApplicationRequest
1262
+ @return: GetDoctorApplicationResponse
572
1263
  """
573
1264
  runtime = util_models.RuntimeOptions()
574
1265
  headers = {}
575
- return self.create_sql_statement_with_options(workspace_id, request, headers, runtime)
1266
+ return self.get_doctor_application_with_options(workspace_id, run_id, request, headers, runtime)
576
1267
 
577
- async def create_sql_statement_async(
1268
+ async def get_doctor_application_async(
578
1269
  self,
579
1270
  workspace_id: str,
580
- request: emr_serverless_spark_20230808_models.CreateSqlStatementRequest,
581
- ) -> emr_serverless_spark_20230808_models.CreateSqlStatementResponse:
1271
+ run_id: str,
1272
+ request: emr_serverless_spark_20230808_models.GetDoctorApplicationRequest,
1273
+ ) -> emr_serverless_spark_20230808_models.GetDoctorApplicationResponse:
582
1274
  """
583
- @summary Creates an SQL query task.
1275
+ @summary Obtains job analysis information on E-MapReduce (EMR) Doctor.
584
1276
 
585
- @param request: CreateSqlStatementRequest
586
- @return: CreateSqlStatementResponse
1277
+ @param request: GetDoctorApplicationRequest
1278
+ @return: GetDoctorApplicationResponse
587
1279
  """
588
1280
  runtime = util_models.RuntimeOptions()
589
1281
  headers = {}
590
- return await self.create_sql_statement_with_options_async(workspace_id, request, headers, runtime)
1282
+ return await self.get_doctor_application_with_options_async(workspace_id, run_id, request, headers, runtime)
591
1283
 
592
1284
  def get_job_run_with_options(
593
1285
  self,
@@ -710,7 +1402,7 @@ class Client(OpenApiClient):
710
1402
  runtime: util_models.RuntimeOptions,
711
1403
  ) -> emr_serverless_spark_20230808_models.GetSessionClusterResponse:
712
1404
  """
713
- @summary Queries a list of sessions.
1405
+ @summary Queries the information about a session.
714
1406
 
715
1407
  @param request: GetSessionClusterRequest
716
1408
  @param headers: map
@@ -750,7 +1442,7 @@ class Client(OpenApiClient):
750
1442
  runtime: util_models.RuntimeOptions,
751
1443
  ) -> emr_serverless_spark_20230808_models.GetSessionClusterResponse:
752
1444
  """
753
- @summary Queries a list of sessions.
1445
+ @summary Queries the information about a session.
754
1446
 
755
1447
  @param request: GetSessionClusterRequest
756
1448
  @param headers: map
@@ -788,7 +1480,7 @@ class Client(OpenApiClient):
788
1480
  request: emr_serverless_spark_20230808_models.GetSessionClusterRequest,
789
1481
  ) -> emr_serverless_spark_20230808_models.GetSessionClusterResponse:
790
1482
  """
791
- @summary Queries a list of sessions.
1483
+ @summary Queries the information about a session.
792
1484
 
793
1485
  @param request: GetSessionClusterRequest
794
1486
  @return: GetSessionClusterResponse
@@ -804,7 +1496,7 @@ class Client(OpenApiClient):
804
1496
  request: emr_serverless_spark_20230808_models.GetSessionClusterRequest,
805
1497
  ) -> emr_serverless_spark_20230808_models.GetSessionClusterResponse:
806
1498
  """
807
- @summary Queries a list of sessions.
1499
+ @summary Queries the information about a session.
808
1500
 
809
1501
  @param request: GetSessionClusterRequest
810
1502
  @return: GetSessionClusterResponse
@@ -1190,6 +1882,8 @@ class Client(OpenApiClient):
1190
1882
  query['jobRunId'] = request.job_run_id
1191
1883
  if not UtilClient.is_unset(request.max_results):
1192
1884
  query['maxResults'] = request.max_results
1885
+ if not UtilClient.is_unset(request.min_duration):
1886
+ query['minDuration'] = request.min_duration
1193
1887
  if not UtilClient.is_unset(request.name):
1194
1888
  query['name'] = request.name
1195
1889
  if not UtilClient.is_unset(request.next_token):
@@ -1261,6 +1955,8 @@ class Client(OpenApiClient):
1261
1955
  query['jobRunId'] = request.job_run_id
1262
1956
  if not UtilClient.is_unset(request.max_results):
1263
1957
  query['maxResults'] = request.max_results
1958
+ if not UtilClient.is_unset(request.min_duration):
1959
+ query['minDuration'] = request.min_duration
1264
1960
  if not UtilClient.is_unset(request.name):
1265
1961
  query['name'] = request.name
1266
1962
  if not UtilClient.is_unset(request.next_token):
@@ -1325,6 +2021,142 @@ class Client(OpenApiClient):
1325
2021
  headers = {}
1326
2022
  return await self.list_job_runs_with_options_async(workspace_id, request, headers, runtime)
1327
2023
 
2024
+ def list_kyuubi_spark_applications_with_options(
2025
+ self,
2026
+ workspace_id: str,
2027
+ kyuubi_service_id: str,
2028
+ tmp_req: emr_serverless_spark_20230808_models.ListKyuubiSparkApplicationsRequest,
2029
+ headers: Dict[str, str],
2030
+ runtime: util_models.RuntimeOptions,
2031
+ ) -> emr_serverless_spark_20230808_models.ListKyuubiSparkApplicationsResponse:
2032
+ """
2033
+ @summary Queries the applications that are submitted by using a Kyuubi gateway.
2034
+
2035
+ @param tmp_req: ListKyuubiSparkApplicationsRequest
2036
+ @param headers: map
2037
+ @param runtime: runtime options for this request RuntimeOptions
2038
+ @return: ListKyuubiSparkApplicationsResponse
2039
+ """
2040
+ UtilClient.validate_model(tmp_req)
2041
+ request = emr_serverless_spark_20230808_models.ListKyuubiSparkApplicationsShrinkRequest()
2042
+ OpenApiUtilClient.convert(tmp_req, request)
2043
+ if not UtilClient.is_unset(tmp_req.start_time):
2044
+ request.start_time_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.start_time, 'startTime', 'json')
2045
+ query = {}
2046
+ if not UtilClient.is_unset(request.application_id):
2047
+ query['applicationId'] = request.application_id
2048
+ if not UtilClient.is_unset(request.application_name):
2049
+ query['applicationName'] = request.application_name
2050
+ if not UtilClient.is_unset(request.max_results):
2051
+ query['maxResults'] = request.max_results
2052
+ if not UtilClient.is_unset(request.next_token):
2053
+ query['nextToken'] = request.next_token
2054
+ if not UtilClient.is_unset(request.start_time_shrink):
2055
+ query['startTime'] = request.start_time_shrink
2056
+ req = open_api_models.OpenApiRequest(
2057
+ headers=headers,
2058
+ query=OpenApiUtilClient.query(query)
2059
+ )
2060
+ params = open_api_models.Params(
2061
+ action='ListKyuubiSparkApplications',
2062
+ version='2023-08-08',
2063
+ protocol='HTTPS',
2064
+ pathname=f'/api/v1/kyuubi/{OpenApiUtilClient.get_encode_param(workspace_id)}/{OpenApiUtilClient.get_encode_param(kyuubi_service_id)}/applications',
2065
+ method='GET',
2066
+ auth_type='AK',
2067
+ style='ROA',
2068
+ req_body_type='json',
2069
+ body_type='json'
2070
+ )
2071
+ return TeaCore.from_map(
2072
+ emr_serverless_spark_20230808_models.ListKyuubiSparkApplicationsResponse(),
2073
+ self.call_api(params, req, runtime)
2074
+ )
2075
+
2076
+ async def list_kyuubi_spark_applications_with_options_async(
2077
+ self,
2078
+ workspace_id: str,
2079
+ kyuubi_service_id: str,
2080
+ tmp_req: emr_serverless_spark_20230808_models.ListKyuubiSparkApplicationsRequest,
2081
+ headers: Dict[str, str],
2082
+ runtime: util_models.RuntimeOptions,
2083
+ ) -> emr_serverless_spark_20230808_models.ListKyuubiSparkApplicationsResponse:
2084
+ """
2085
+ @summary Queries the applications that are submitted by using a Kyuubi gateway.
2086
+
2087
+ @param tmp_req: ListKyuubiSparkApplicationsRequest
2088
+ @param headers: map
2089
+ @param runtime: runtime options for this request RuntimeOptions
2090
+ @return: ListKyuubiSparkApplicationsResponse
2091
+ """
2092
+ UtilClient.validate_model(tmp_req)
2093
+ request = emr_serverless_spark_20230808_models.ListKyuubiSparkApplicationsShrinkRequest()
2094
+ OpenApiUtilClient.convert(tmp_req, request)
2095
+ if not UtilClient.is_unset(tmp_req.start_time):
2096
+ request.start_time_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.start_time, 'startTime', 'json')
2097
+ query = {}
2098
+ if not UtilClient.is_unset(request.application_id):
2099
+ query['applicationId'] = request.application_id
2100
+ if not UtilClient.is_unset(request.application_name):
2101
+ query['applicationName'] = request.application_name
2102
+ if not UtilClient.is_unset(request.max_results):
2103
+ query['maxResults'] = request.max_results
2104
+ if not UtilClient.is_unset(request.next_token):
2105
+ query['nextToken'] = request.next_token
2106
+ if not UtilClient.is_unset(request.start_time_shrink):
2107
+ query['startTime'] = request.start_time_shrink
2108
+ req = open_api_models.OpenApiRequest(
2109
+ headers=headers,
2110
+ query=OpenApiUtilClient.query(query)
2111
+ )
2112
+ params = open_api_models.Params(
2113
+ action='ListKyuubiSparkApplications',
2114
+ version='2023-08-08',
2115
+ protocol='HTTPS',
2116
+ pathname=f'/api/v1/kyuubi/{OpenApiUtilClient.get_encode_param(workspace_id)}/{OpenApiUtilClient.get_encode_param(kyuubi_service_id)}/applications',
2117
+ method='GET',
2118
+ auth_type='AK',
2119
+ style='ROA',
2120
+ req_body_type='json',
2121
+ body_type='json'
2122
+ )
2123
+ return TeaCore.from_map(
2124
+ emr_serverless_spark_20230808_models.ListKyuubiSparkApplicationsResponse(),
2125
+ await self.call_api_async(params, req, runtime)
2126
+ )
2127
+
2128
+ def list_kyuubi_spark_applications(
2129
+ self,
2130
+ workspace_id: str,
2131
+ kyuubi_service_id: str,
2132
+ request: emr_serverless_spark_20230808_models.ListKyuubiSparkApplicationsRequest,
2133
+ ) -> emr_serverless_spark_20230808_models.ListKyuubiSparkApplicationsResponse:
2134
+ """
2135
+ @summary Queries the applications that are submitted by using a Kyuubi gateway.
2136
+
2137
+ @param request: ListKyuubiSparkApplicationsRequest
2138
+ @return: ListKyuubiSparkApplicationsResponse
2139
+ """
2140
+ runtime = util_models.RuntimeOptions()
2141
+ headers = {}
2142
+ return self.list_kyuubi_spark_applications_with_options(workspace_id, kyuubi_service_id, request, headers, runtime)
2143
+
2144
+ async def list_kyuubi_spark_applications_async(
2145
+ self,
2146
+ workspace_id: str,
2147
+ kyuubi_service_id: str,
2148
+ request: emr_serverless_spark_20230808_models.ListKyuubiSparkApplicationsRequest,
2149
+ ) -> emr_serverless_spark_20230808_models.ListKyuubiSparkApplicationsResponse:
2150
+ """
2151
+ @summary Queries the applications that are submitted by using a Kyuubi gateway.
2152
+
2153
+ @param request: ListKyuubiSparkApplicationsRequest
2154
+ @return: ListKyuubiSparkApplicationsResponse
2155
+ """
2156
+ runtime = util_models.RuntimeOptions()
2157
+ headers = {}
2158
+ return await self.list_kyuubi_spark_applications_with_options_async(workspace_id, kyuubi_service_id, request, headers, runtime)
2159
+
1328
2160
  def list_log_contents_with_options(
1329
2161
  self,
1330
2162
  workspace_id: str,
@@ -1333,7 +2165,7 @@ class Client(OpenApiClient):
1333
2165
  runtime: util_models.RuntimeOptions,
1334
2166
  ) -> emr_serverless_spark_20230808_models.ListLogContentsResponse:
1335
2167
  """
1336
- @summary Obtains the log content.
2168
+ @summary Get Log Content
1337
2169
 
1338
2170
  @param request: ListLogContentsRequest
1339
2171
  @param headers: map
@@ -1378,7 +2210,7 @@ class Client(OpenApiClient):
1378
2210
  runtime: util_models.RuntimeOptions,
1379
2211
  ) -> emr_serverless_spark_20230808_models.ListLogContentsResponse:
1380
2212
  """
1381
- @summary Obtains the log content.
2213
+ @summary Get Log Content
1382
2214
 
1383
2215
  @param request: ListLogContentsRequest
1384
2216
  @param headers: map
@@ -1421,7 +2253,7 @@ class Client(OpenApiClient):
1421
2253
  request: emr_serverless_spark_20230808_models.ListLogContentsRequest,
1422
2254
  ) -> emr_serverless_spark_20230808_models.ListLogContentsResponse:
1423
2255
  """
1424
- @summary Obtains the log content.
2256
+ @summary Get Log Content
1425
2257
 
1426
2258
  @param request: ListLogContentsRequest
1427
2259
  @return: ListLogContentsResponse
@@ -1436,7 +2268,7 @@ class Client(OpenApiClient):
1436
2268
  request: emr_serverless_spark_20230808_models.ListLogContentsRequest,
1437
2269
  ) -> emr_serverless_spark_20230808_models.ListLogContentsResponse:
1438
2270
  """
1439
- @summary Obtains the log content.
2271
+ @summary Get Log Content
1440
2272
 
1441
2273
  @param request: ListLogContentsRequest
1442
2274
  @return: ListLogContentsResponse
@@ -1469,6 +2301,8 @@ class Client(OpenApiClient):
1469
2301
  query['releaseVersion'] = request.release_version
1470
2302
  if not UtilClient.is_unset(request.release_version_status):
1471
2303
  query['releaseVersionStatus'] = request.release_version_status
2304
+ if not UtilClient.is_unset(request.service_filter):
2305
+ query['serviceFilter'] = request.service_filter
1472
2306
  if not UtilClient.is_unset(request.workspace_id):
1473
2307
  query['workspaceId'] = request.workspace_id
1474
2308
  req = open_api_models.OpenApiRequest(
@@ -1515,6 +2349,8 @@ class Client(OpenApiClient):
1515
2349
  query['releaseVersion'] = request.release_version
1516
2350
  if not UtilClient.is_unset(request.release_version_status):
1517
2351
  query['releaseVersionStatus'] = request.release_version_status
2352
+ if not UtilClient.is_unset(request.service_filter):
2353
+ query['serviceFilter'] = request.service_filter
1518
2354
  if not UtilClient.is_unset(request.workspace_id):
1519
2355
  query['workspaceId'] = request.workspace_id
1520
2356
  req = open_api_models.OpenApiRequest(
@@ -1807,19 +2643,23 @@ class Client(OpenApiClient):
1807
2643
 
1808
2644
  def list_workspaces_with_options(
1809
2645
  self,
1810
- request: emr_serverless_spark_20230808_models.ListWorkspacesRequest,
2646
+ tmp_req: emr_serverless_spark_20230808_models.ListWorkspacesRequest,
1811
2647
  headers: Dict[str, str],
1812
2648
  runtime: util_models.RuntimeOptions,
1813
2649
  ) -> emr_serverless_spark_20230808_models.ListWorkspacesResponse:
1814
2650
  """
1815
2651
  @summary Queries a list of workspaces.
1816
2652
 
1817
- @param request: ListWorkspacesRequest
2653
+ @param tmp_req: ListWorkspacesRequest
1818
2654
  @param headers: map
1819
2655
  @param runtime: runtime options for this request RuntimeOptions
1820
2656
  @return: ListWorkspacesResponse
1821
2657
  """
1822
- UtilClient.validate_model(request)
2658
+ UtilClient.validate_model(tmp_req)
2659
+ request = emr_serverless_spark_20230808_models.ListWorkspacesShrinkRequest()
2660
+ OpenApiUtilClient.convert(tmp_req, request)
2661
+ if not UtilClient.is_unset(tmp_req.tag):
2662
+ request.tag_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.tag, 'tag', 'json')
1823
2663
  query = {}
1824
2664
  if not UtilClient.is_unset(request.max_results):
1825
2665
  query['maxResults'] = request.max_results
@@ -1831,6 +2671,8 @@ class Client(OpenApiClient):
1831
2671
  query['regionId'] = request.region_id
1832
2672
  if not UtilClient.is_unset(request.state):
1833
2673
  query['state'] = request.state
2674
+ if not UtilClient.is_unset(request.tag_shrink):
2675
+ query['tag'] = request.tag_shrink
1834
2676
  req = open_api_models.OpenApiRequest(
1835
2677
  headers=headers,
1836
2678
  query=OpenApiUtilClient.query(query)
@@ -1853,19 +2695,23 @@ class Client(OpenApiClient):
1853
2695
 
1854
2696
  async def list_workspaces_with_options_async(
1855
2697
  self,
1856
- request: emr_serverless_spark_20230808_models.ListWorkspacesRequest,
2698
+ tmp_req: emr_serverless_spark_20230808_models.ListWorkspacesRequest,
1857
2699
  headers: Dict[str, str],
1858
2700
  runtime: util_models.RuntimeOptions,
1859
2701
  ) -> emr_serverless_spark_20230808_models.ListWorkspacesResponse:
1860
2702
  """
1861
2703
  @summary Queries a list of workspaces.
1862
2704
 
1863
- @param request: ListWorkspacesRequest
2705
+ @param tmp_req: ListWorkspacesRequest
1864
2706
  @param headers: map
1865
2707
  @param runtime: runtime options for this request RuntimeOptions
1866
2708
  @return: ListWorkspacesResponse
1867
2709
  """
1868
- UtilClient.validate_model(request)
2710
+ UtilClient.validate_model(tmp_req)
2711
+ request = emr_serverless_spark_20230808_models.ListWorkspacesShrinkRequest()
2712
+ OpenApiUtilClient.convert(tmp_req, request)
2713
+ if not UtilClient.is_unset(tmp_req.tag):
2714
+ request.tag_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.tag, 'tag', 'json')
1869
2715
  query = {}
1870
2716
  if not UtilClient.is_unset(request.max_results):
1871
2717
  query['maxResults'] = request.max_results
@@ -1877,6 +2723,8 @@ class Client(OpenApiClient):
1877
2723
  query['regionId'] = request.region_id
1878
2724
  if not UtilClient.is_unset(request.state):
1879
2725
  query['state'] = request.state
2726
+ if not UtilClient.is_unset(request.tag_shrink):
2727
+ query['tag'] = request.tag_shrink
1880
2728
  req = open_api_models.OpenApiRequest(
1881
2729
  headers=headers,
1882
2730
  query=OpenApiUtilClient.query(query)
@@ -2093,7 +2941,7 @@ class Client(OpenApiClient):
2093
2941
  runtime: util_models.RuntimeOptions,
2094
2942
  ) -> emr_serverless_spark_20230808_models.StartProcessInstanceResponse:
2095
2943
  """
2096
- @summary 启动工作流实例
2944
+ @summary Manually runs a workflow.
2097
2945
 
2098
2946
  @param request: StartProcessInstanceRequest
2099
2947
  @param headers: map
@@ -2102,6 +2950,14 @@ class Client(OpenApiClient):
2102
2950
  """
2103
2951
  UtilClient.validate_model(request)
2104
2952
  query = {}
2953
+ if not UtilClient.is_unset(request.action):
2954
+ query['action'] = request.action
2955
+ if not UtilClient.is_unset(request.comments):
2956
+ query['comments'] = request.comments
2957
+ if not UtilClient.is_unset(request.email):
2958
+ query['email'] = request.email
2959
+ if not UtilClient.is_unset(request.interval):
2960
+ query['interval'] = request.interval
2105
2961
  if not UtilClient.is_unset(request.is_prod):
2106
2962
  query['isProd'] = request.is_prod
2107
2963
  if not UtilClient.is_unset(request.process_definition_code):
@@ -2144,7 +3000,7 @@ class Client(OpenApiClient):
2144
3000
  runtime: util_models.RuntimeOptions,
2145
3001
  ) -> emr_serverless_spark_20230808_models.StartProcessInstanceResponse:
2146
3002
  """
2147
- @summary 启动工作流实例
3003
+ @summary Manually runs a workflow.
2148
3004
 
2149
3005
  @param request: StartProcessInstanceRequest
2150
3006
  @param headers: map
@@ -2153,6 +3009,14 @@ class Client(OpenApiClient):
2153
3009
  """
2154
3010
  UtilClient.validate_model(request)
2155
3011
  query = {}
3012
+ if not UtilClient.is_unset(request.action):
3013
+ query['action'] = request.action
3014
+ if not UtilClient.is_unset(request.comments):
3015
+ query['comments'] = request.comments
3016
+ if not UtilClient.is_unset(request.email):
3017
+ query['email'] = request.email
3018
+ if not UtilClient.is_unset(request.interval):
3019
+ query['interval'] = request.interval
2156
3020
  if not UtilClient.is_unset(request.is_prod):
2157
3021
  query['isProd'] = request.is_prod
2158
3022
  if not UtilClient.is_unset(request.process_definition_code):
@@ -2193,7 +3057,7 @@ class Client(OpenApiClient):
2193
3057
  request: emr_serverless_spark_20230808_models.StartProcessInstanceRequest,
2194
3058
  ) -> emr_serverless_spark_20230808_models.StartProcessInstanceResponse:
2195
3059
  """
2196
- @summary 启动工作流实例
3060
+ @summary Manually runs a workflow.
2197
3061
 
2198
3062
  @param request: StartProcessInstanceRequest
2199
3063
  @return: StartProcessInstanceResponse
@@ -2208,7 +3072,7 @@ class Client(OpenApiClient):
2208
3072
  request: emr_serverless_spark_20230808_models.StartProcessInstanceRequest,
2209
3073
  ) -> emr_serverless_spark_20230808_models.StartProcessInstanceResponse:
2210
3074
  """
2211
- @summary 启动工作流实例
3075
+ @summary Manually runs a workflow.
2212
3076
 
2213
3077
  @param request: StartProcessInstanceRequest
2214
3078
  @return: StartProcessInstanceResponse
@@ -2578,7 +3442,7 @@ class Client(OpenApiClient):
2578
3442
  runtime: util_models.RuntimeOptions,
2579
3443
  ) -> emr_serverless_spark_20230808_models.UpdateProcessDefinitionWithScheduleResponse:
2580
3444
  """
2581
- @summary 更新工作流定义和定时调度
3445
+ @summary Updates the workflow and time-based scheduling configurations.
2582
3446
 
2583
3447
  @param tmp_req: UpdateProcessDefinitionWithScheduleRequest
2584
3448
  @param headers: map
@@ -2588,6 +3452,8 @@ class Client(OpenApiClient):
2588
3452
  UtilClient.validate_model(tmp_req)
2589
3453
  request = emr_serverless_spark_20230808_models.UpdateProcessDefinitionWithScheduleShrinkRequest()
2590
3454
  OpenApiUtilClient.convert(tmp_req, request)
3455
+ if not UtilClient.is_unset(tmp_req.global_params):
3456
+ request.global_params_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.global_params, 'globalParams', 'json')
2591
3457
  if not UtilClient.is_unset(tmp_req.schedule):
2592
3458
  request.schedule_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.schedule, 'schedule', 'json')
2593
3459
  if not UtilClient.is_unset(tmp_req.tags):
@@ -2603,6 +3469,8 @@ class Client(OpenApiClient):
2603
3469
  query['description'] = request.description
2604
3470
  if not UtilClient.is_unset(request.execution_type):
2605
3471
  query['executionType'] = request.execution_type
3472
+ if not UtilClient.is_unset(request.global_params_shrink):
3473
+ query['globalParams'] = request.global_params_shrink
2606
3474
  if not UtilClient.is_unset(request.name):
2607
3475
  query['name'] = request.name
2608
3476
  if not UtilClient.is_unset(request.product_namespace):
@@ -2660,7 +3528,7 @@ class Client(OpenApiClient):
2660
3528
  runtime: util_models.RuntimeOptions,
2661
3529
  ) -> emr_serverless_spark_20230808_models.UpdateProcessDefinitionWithScheduleResponse:
2662
3530
  """
2663
- @summary 更新工作流定义和定时调度
3531
+ @summary Updates the workflow and time-based scheduling configurations.
2664
3532
 
2665
3533
  @param tmp_req: UpdateProcessDefinitionWithScheduleRequest
2666
3534
  @param headers: map
@@ -2670,6 +3538,8 @@ class Client(OpenApiClient):
2670
3538
  UtilClient.validate_model(tmp_req)
2671
3539
  request = emr_serverless_spark_20230808_models.UpdateProcessDefinitionWithScheduleShrinkRequest()
2672
3540
  OpenApiUtilClient.convert(tmp_req, request)
3541
+ if not UtilClient.is_unset(tmp_req.global_params):
3542
+ request.global_params_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.global_params, 'globalParams', 'json')
2673
3543
  if not UtilClient.is_unset(tmp_req.schedule):
2674
3544
  request.schedule_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.schedule, 'schedule', 'json')
2675
3545
  if not UtilClient.is_unset(tmp_req.tags):
@@ -2685,6 +3555,8 @@ class Client(OpenApiClient):
2685
3555
  query['description'] = request.description
2686
3556
  if not UtilClient.is_unset(request.execution_type):
2687
3557
  query['executionType'] = request.execution_type
3558
+ if not UtilClient.is_unset(request.global_params_shrink):
3559
+ query['globalParams'] = request.global_params_shrink
2688
3560
  if not UtilClient.is_unset(request.name):
2689
3561
  query['name'] = request.name
2690
3562
  if not UtilClient.is_unset(request.product_namespace):
@@ -2740,7 +3612,7 @@ class Client(OpenApiClient):
2740
3612
  request: emr_serverless_spark_20230808_models.UpdateProcessDefinitionWithScheduleRequest,
2741
3613
  ) -> emr_serverless_spark_20230808_models.UpdateProcessDefinitionWithScheduleResponse:
2742
3614
  """
2743
- @summary 更新工作流定义和定时调度
3615
+ @summary Updates the workflow and time-based scheduling configurations.
2744
3616
 
2745
3617
  @param request: UpdateProcessDefinitionWithScheduleRequest
2746
3618
  @return: UpdateProcessDefinitionWithScheduleResponse
@@ -2756,7 +3628,7 @@ class Client(OpenApiClient):
2756
3628
  request: emr_serverless_spark_20230808_models.UpdateProcessDefinitionWithScheduleRequest,
2757
3629
  ) -> emr_serverless_spark_20230808_models.UpdateProcessDefinitionWithScheduleResponse:
2758
3630
  """
2759
- @summary 更新工作流定义和定时调度
3631
+ @summary Updates the workflow and time-based scheduling configurations.
2760
3632
 
2761
3633
  @param request: UpdateProcessDefinitionWithScheduleRequest
2762
3634
  @return: UpdateProcessDefinitionWithScheduleResponse