alibabacloud-emr-serverless-spark20230808 1.8.1__py3-none-any.whl → 1.10.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of alibabacloud-emr-serverless-spark20230808 might be problematic. Click here for more details.

@@ -269,137 +269,1017 @@ class Client(OpenApiClient):
269
269
  headers = {}
270
270
  return await self.cancel_job_run_with_options_async(workspace_id, job_run_id, request, headers, runtime)
271
271
 
272
+ def create_process_definition_with_schedule_with_options(
273
+ self,
274
+ biz_id: str,
275
+ tmp_req: emr_serverless_spark_20230808_models.CreateProcessDefinitionWithScheduleRequest,
276
+ headers: Dict[str, str],
277
+ runtime: util_models.RuntimeOptions,
278
+ ) -> emr_serverless_spark_20230808_models.CreateProcessDefinitionWithScheduleResponse:
279
+ """
280
+ @summary Creates a workflow.
281
+
282
+ @param tmp_req: CreateProcessDefinitionWithScheduleRequest
283
+ @param headers: map
284
+ @param runtime: runtime options for this request RuntimeOptions
285
+ @return: CreateProcessDefinitionWithScheduleResponse
286
+ """
287
+ UtilClient.validate_model(tmp_req)
288
+ request = emr_serverless_spark_20230808_models.CreateProcessDefinitionWithScheduleShrinkRequest()
289
+ OpenApiUtilClient.convert(tmp_req, request)
290
+ if not UtilClient.is_unset(tmp_req.global_params):
291
+ request.global_params_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.global_params, 'globalParams', 'json')
292
+ if not UtilClient.is_unset(tmp_req.schedule):
293
+ request.schedule_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.schedule, 'schedule', 'json')
294
+ if not UtilClient.is_unset(tmp_req.tags):
295
+ request.tags_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.tags, 'tags', 'json')
296
+ if not UtilClient.is_unset(tmp_req.task_definition_json):
297
+ request.task_definition_json_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.task_definition_json, 'taskDefinitionJson', 'json')
298
+ if not UtilClient.is_unset(tmp_req.task_relation_json):
299
+ request.task_relation_json_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.task_relation_json, 'taskRelationJson', 'json')
300
+ query = {}
301
+ if not UtilClient.is_unset(request.alert_email_address):
302
+ query['alertEmailAddress'] = request.alert_email_address
303
+ if not UtilClient.is_unset(request.description):
304
+ query['description'] = request.description
305
+ if not UtilClient.is_unset(request.execution_type):
306
+ query['executionType'] = request.execution_type
307
+ if not UtilClient.is_unset(request.global_params_shrink):
308
+ query['globalParams'] = request.global_params_shrink
309
+ if not UtilClient.is_unset(request.name):
310
+ query['name'] = request.name
311
+ if not UtilClient.is_unset(request.product_namespace):
312
+ query['productNamespace'] = request.product_namespace
313
+ if not UtilClient.is_unset(request.publish):
314
+ query['publish'] = request.publish
315
+ if not UtilClient.is_unset(request.region_id):
316
+ query['regionId'] = request.region_id
317
+ if not UtilClient.is_unset(request.resource_queue):
318
+ query['resourceQueue'] = request.resource_queue
319
+ if not UtilClient.is_unset(request.retry_times):
320
+ query['retryTimes'] = request.retry_times
321
+ if not UtilClient.is_unset(request.run_as):
322
+ query['runAs'] = request.run_as
323
+ if not UtilClient.is_unset(request.schedule_shrink):
324
+ query['schedule'] = request.schedule_shrink
325
+ if not UtilClient.is_unset(request.tags_shrink):
326
+ query['tags'] = request.tags_shrink
327
+ if not UtilClient.is_unset(request.task_definition_json_shrink):
328
+ query['taskDefinitionJson'] = request.task_definition_json_shrink
329
+ if not UtilClient.is_unset(request.task_parallelism):
330
+ query['taskParallelism'] = request.task_parallelism
331
+ if not UtilClient.is_unset(request.task_relation_json_shrink):
332
+ query['taskRelationJson'] = request.task_relation_json_shrink
333
+ if not UtilClient.is_unset(request.timeout):
334
+ query['timeout'] = request.timeout
335
+ req = open_api_models.OpenApiRequest(
336
+ headers=headers,
337
+ query=OpenApiUtilClient.query(query)
338
+ )
339
+ params = open_api_models.Params(
340
+ action='CreateProcessDefinitionWithSchedule',
341
+ version='2023-08-08',
342
+ protocol='HTTPS',
343
+ pathname=f'/dolphinscheduler/projects/{OpenApiUtilClient.get_encode_param(biz_id)}/process-definition',
344
+ method='POST',
345
+ auth_type='AK',
346
+ style='ROA',
347
+ req_body_type='json',
348
+ body_type='json'
349
+ )
350
+ return TeaCore.from_map(
351
+ emr_serverless_spark_20230808_models.CreateProcessDefinitionWithScheduleResponse(),
352
+ self.call_api(params, req, runtime)
353
+ )
354
+
355
+ async def create_process_definition_with_schedule_with_options_async(
356
+ self,
357
+ biz_id: str,
358
+ tmp_req: emr_serverless_spark_20230808_models.CreateProcessDefinitionWithScheduleRequest,
359
+ headers: Dict[str, str],
360
+ runtime: util_models.RuntimeOptions,
361
+ ) -> emr_serverless_spark_20230808_models.CreateProcessDefinitionWithScheduleResponse:
362
+ """
363
+ @summary Creates a workflow.
364
+
365
+ @param tmp_req: CreateProcessDefinitionWithScheduleRequest
366
+ @param headers: map
367
+ @param runtime: runtime options for this request RuntimeOptions
368
+ @return: CreateProcessDefinitionWithScheduleResponse
369
+ """
370
+ UtilClient.validate_model(tmp_req)
371
+ request = emr_serverless_spark_20230808_models.CreateProcessDefinitionWithScheduleShrinkRequest()
372
+ OpenApiUtilClient.convert(tmp_req, request)
373
+ if not UtilClient.is_unset(tmp_req.global_params):
374
+ request.global_params_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.global_params, 'globalParams', 'json')
375
+ if not UtilClient.is_unset(tmp_req.schedule):
376
+ request.schedule_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.schedule, 'schedule', 'json')
377
+ if not UtilClient.is_unset(tmp_req.tags):
378
+ request.tags_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.tags, 'tags', 'json')
379
+ if not UtilClient.is_unset(tmp_req.task_definition_json):
380
+ request.task_definition_json_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.task_definition_json, 'taskDefinitionJson', 'json')
381
+ if not UtilClient.is_unset(tmp_req.task_relation_json):
382
+ request.task_relation_json_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.task_relation_json, 'taskRelationJson', 'json')
383
+ query = {}
384
+ if not UtilClient.is_unset(request.alert_email_address):
385
+ query['alertEmailAddress'] = request.alert_email_address
386
+ if not UtilClient.is_unset(request.description):
387
+ query['description'] = request.description
388
+ if not UtilClient.is_unset(request.execution_type):
389
+ query['executionType'] = request.execution_type
390
+ if not UtilClient.is_unset(request.global_params_shrink):
391
+ query['globalParams'] = request.global_params_shrink
392
+ if not UtilClient.is_unset(request.name):
393
+ query['name'] = request.name
394
+ if not UtilClient.is_unset(request.product_namespace):
395
+ query['productNamespace'] = request.product_namespace
396
+ if not UtilClient.is_unset(request.publish):
397
+ query['publish'] = request.publish
398
+ if not UtilClient.is_unset(request.region_id):
399
+ query['regionId'] = request.region_id
400
+ if not UtilClient.is_unset(request.resource_queue):
401
+ query['resourceQueue'] = request.resource_queue
402
+ if not UtilClient.is_unset(request.retry_times):
403
+ query['retryTimes'] = request.retry_times
404
+ if not UtilClient.is_unset(request.run_as):
405
+ query['runAs'] = request.run_as
406
+ if not UtilClient.is_unset(request.schedule_shrink):
407
+ query['schedule'] = request.schedule_shrink
408
+ if not UtilClient.is_unset(request.tags_shrink):
409
+ query['tags'] = request.tags_shrink
410
+ if not UtilClient.is_unset(request.task_definition_json_shrink):
411
+ query['taskDefinitionJson'] = request.task_definition_json_shrink
412
+ if not UtilClient.is_unset(request.task_parallelism):
413
+ query['taskParallelism'] = request.task_parallelism
414
+ if not UtilClient.is_unset(request.task_relation_json_shrink):
415
+ query['taskRelationJson'] = request.task_relation_json_shrink
416
+ if not UtilClient.is_unset(request.timeout):
417
+ query['timeout'] = request.timeout
418
+ req = open_api_models.OpenApiRequest(
419
+ headers=headers,
420
+ query=OpenApiUtilClient.query(query)
421
+ )
422
+ params = open_api_models.Params(
423
+ action='CreateProcessDefinitionWithSchedule',
424
+ version='2023-08-08',
425
+ protocol='HTTPS',
426
+ pathname=f'/dolphinscheduler/projects/{OpenApiUtilClient.get_encode_param(biz_id)}/process-definition',
427
+ method='POST',
428
+ auth_type='AK',
429
+ style='ROA',
430
+ req_body_type='json',
431
+ body_type='json'
432
+ )
433
+ return TeaCore.from_map(
434
+ emr_serverless_spark_20230808_models.CreateProcessDefinitionWithScheduleResponse(),
435
+ await self.call_api_async(params, req, runtime)
436
+ )
437
+
438
+ def create_process_definition_with_schedule(
439
+ self,
440
+ biz_id: str,
441
+ request: emr_serverless_spark_20230808_models.CreateProcessDefinitionWithScheduleRequest,
442
+ ) -> emr_serverless_spark_20230808_models.CreateProcessDefinitionWithScheduleResponse:
443
+ """
444
+ @summary Creates a workflow.
445
+
446
+ @param request: CreateProcessDefinitionWithScheduleRequest
447
+ @return: CreateProcessDefinitionWithScheduleResponse
448
+ """
449
+ runtime = util_models.RuntimeOptions()
450
+ headers = {}
451
+ return self.create_process_definition_with_schedule_with_options(biz_id, request, headers, runtime)
452
+
453
+ async def create_process_definition_with_schedule_async(
454
+ self,
455
+ biz_id: str,
456
+ request: emr_serverless_spark_20230808_models.CreateProcessDefinitionWithScheduleRequest,
457
+ ) -> emr_serverless_spark_20230808_models.CreateProcessDefinitionWithScheduleResponse:
458
+ """
459
+ @summary Creates a workflow.
460
+
461
+ @param request: CreateProcessDefinitionWithScheduleRequest
462
+ @return: CreateProcessDefinitionWithScheduleResponse
463
+ """
464
+ runtime = util_models.RuntimeOptions()
465
+ headers = {}
466
+ return await self.create_process_definition_with_schedule_with_options_async(biz_id, request, headers, runtime)
467
+
468
+ def create_session_cluster_with_options(
469
+ self,
470
+ workspace_id: str,
471
+ request: emr_serverless_spark_20230808_models.CreateSessionClusterRequest,
472
+ headers: Dict[str, str],
473
+ runtime: util_models.RuntimeOptions,
474
+ ) -> emr_serverless_spark_20230808_models.CreateSessionClusterResponse:
475
+ """
476
+ @summary Creates a session.
477
+
478
+ @param request: CreateSessionClusterRequest
479
+ @param headers: map
480
+ @param runtime: runtime options for this request RuntimeOptions
481
+ @return: CreateSessionClusterResponse
482
+ """
483
+ UtilClient.validate_model(request)
484
+ query = {}
485
+ if not UtilClient.is_unset(request.region_id):
486
+ query['regionId'] = request.region_id
487
+ body = {}
488
+ if not UtilClient.is_unset(request.application_configs):
489
+ body['applicationConfigs'] = request.application_configs
490
+ if not UtilClient.is_unset(request.auto_start_configuration):
491
+ body['autoStartConfiguration'] = request.auto_start_configuration
492
+ if not UtilClient.is_unset(request.auto_stop_configuration):
493
+ body['autoStopConfiguration'] = request.auto_stop_configuration
494
+ if not UtilClient.is_unset(request.display_release_version):
495
+ body['displayReleaseVersion'] = request.display_release_version
496
+ if not UtilClient.is_unset(request.env_id):
497
+ body['envId'] = request.env_id
498
+ if not UtilClient.is_unset(request.fusion):
499
+ body['fusion'] = request.fusion
500
+ if not UtilClient.is_unset(request.kind):
501
+ body['kind'] = request.kind
502
+ if not UtilClient.is_unset(request.name):
503
+ body['name'] = request.name
504
+ if not UtilClient.is_unset(request.queue_name):
505
+ body['queueName'] = request.queue_name
506
+ if not UtilClient.is_unset(request.release_version):
507
+ body['releaseVersion'] = request.release_version
508
+ req = open_api_models.OpenApiRequest(
509
+ headers=headers,
510
+ query=OpenApiUtilClient.query(query),
511
+ body=OpenApiUtilClient.parse_to_map(body)
512
+ )
513
+ params = open_api_models.Params(
514
+ action='CreateSessionCluster',
515
+ version='2023-08-08',
516
+ protocol='HTTPS',
517
+ pathname=f'/api/v1/workspaces/{OpenApiUtilClient.get_encode_param(workspace_id)}/sessionClusters',
518
+ method='POST',
519
+ auth_type='AK',
520
+ style='ROA',
521
+ req_body_type='json',
522
+ body_type='json'
523
+ )
524
+ return TeaCore.from_map(
525
+ emr_serverless_spark_20230808_models.CreateSessionClusterResponse(),
526
+ self.call_api(params, req, runtime)
527
+ )
528
+
529
+ async def create_session_cluster_with_options_async(
530
+ self,
531
+ workspace_id: str,
532
+ request: emr_serverless_spark_20230808_models.CreateSessionClusterRequest,
533
+ headers: Dict[str, str],
534
+ runtime: util_models.RuntimeOptions,
535
+ ) -> emr_serverless_spark_20230808_models.CreateSessionClusterResponse:
536
+ """
537
+ @summary Creates a session.
538
+
539
+ @param request: CreateSessionClusterRequest
540
+ @param headers: map
541
+ @param runtime: runtime options for this request RuntimeOptions
542
+ @return: CreateSessionClusterResponse
543
+ """
544
+ UtilClient.validate_model(request)
545
+ query = {}
546
+ if not UtilClient.is_unset(request.region_id):
547
+ query['regionId'] = request.region_id
548
+ body = {}
549
+ if not UtilClient.is_unset(request.application_configs):
550
+ body['applicationConfigs'] = request.application_configs
551
+ if not UtilClient.is_unset(request.auto_start_configuration):
552
+ body['autoStartConfiguration'] = request.auto_start_configuration
553
+ if not UtilClient.is_unset(request.auto_stop_configuration):
554
+ body['autoStopConfiguration'] = request.auto_stop_configuration
555
+ if not UtilClient.is_unset(request.display_release_version):
556
+ body['displayReleaseVersion'] = request.display_release_version
557
+ if not UtilClient.is_unset(request.env_id):
558
+ body['envId'] = request.env_id
559
+ if not UtilClient.is_unset(request.fusion):
560
+ body['fusion'] = request.fusion
561
+ if not UtilClient.is_unset(request.kind):
562
+ body['kind'] = request.kind
563
+ if not UtilClient.is_unset(request.name):
564
+ body['name'] = request.name
565
+ if not UtilClient.is_unset(request.queue_name):
566
+ body['queueName'] = request.queue_name
567
+ if not UtilClient.is_unset(request.release_version):
568
+ body['releaseVersion'] = request.release_version
569
+ req = open_api_models.OpenApiRequest(
570
+ headers=headers,
571
+ query=OpenApiUtilClient.query(query),
572
+ body=OpenApiUtilClient.parse_to_map(body)
573
+ )
574
+ params = open_api_models.Params(
575
+ action='CreateSessionCluster',
576
+ version='2023-08-08',
577
+ protocol='HTTPS',
578
+ pathname=f'/api/v1/workspaces/{OpenApiUtilClient.get_encode_param(workspace_id)}/sessionClusters',
579
+ method='POST',
580
+ auth_type='AK',
581
+ style='ROA',
582
+ req_body_type='json',
583
+ body_type='json'
584
+ )
585
+ return TeaCore.from_map(
586
+ emr_serverless_spark_20230808_models.CreateSessionClusterResponse(),
587
+ await self.call_api_async(params, req, runtime)
588
+ )
589
+
590
+ def create_session_cluster(
591
+ self,
592
+ workspace_id: str,
593
+ request: emr_serverless_spark_20230808_models.CreateSessionClusterRequest,
594
+ ) -> emr_serverless_spark_20230808_models.CreateSessionClusterResponse:
595
+ """
596
+ @summary Creates a session.
597
+
598
+ @param request: CreateSessionClusterRequest
599
+ @return: CreateSessionClusterResponse
600
+ """
601
+ runtime = util_models.RuntimeOptions()
602
+ headers = {}
603
+ return self.create_session_cluster_with_options(workspace_id, request, headers, runtime)
604
+
605
+ async def create_session_cluster_async(
606
+ self,
607
+ workspace_id: str,
608
+ request: emr_serverless_spark_20230808_models.CreateSessionClusterRequest,
609
+ ) -> emr_serverless_spark_20230808_models.CreateSessionClusterResponse:
610
+ """
611
+ @summary Creates a session.
612
+
613
+ @param request: CreateSessionClusterRequest
614
+ @return: CreateSessionClusterResponse
615
+ """
616
+ runtime = util_models.RuntimeOptions()
617
+ headers = {}
618
+ return await self.create_session_cluster_with_options_async(workspace_id, request, headers, runtime)
619
+
272
620
  def create_sql_statement_with_options(
273
621
  self,
274
622
  workspace_id: str,
275
- request: emr_serverless_spark_20230808_models.CreateSqlStatementRequest,
623
+ request: emr_serverless_spark_20230808_models.CreateSqlStatementRequest,
624
+ headers: Dict[str, str],
625
+ runtime: util_models.RuntimeOptions,
626
+ ) -> emr_serverless_spark_20230808_models.CreateSqlStatementResponse:
627
+ """
628
+ @summary Creates an SQL query task.
629
+
630
+ @param request: CreateSqlStatementRequest
631
+ @param headers: map
632
+ @param runtime: runtime options for this request RuntimeOptions
633
+ @return: CreateSqlStatementResponse
634
+ """
635
+ UtilClient.validate_model(request)
636
+ query = {}
637
+ if not UtilClient.is_unset(request.region_id):
638
+ query['regionId'] = request.region_id
639
+ body = {}
640
+ if not UtilClient.is_unset(request.code_content):
641
+ body['codeContent'] = request.code_content
642
+ if not UtilClient.is_unset(request.default_catalog):
643
+ body['defaultCatalog'] = request.default_catalog
644
+ if not UtilClient.is_unset(request.default_database):
645
+ body['defaultDatabase'] = request.default_database
646
+ if not UtilClient.is_unset(request.limit):
647
+ body['limit'] = request.limit
648
+ if not UtilClient.is_unset(request.sql_compute_id):
649
+ body['sqlComputeId'] = request.sql_compute_id
650
+ req = open_api_models.OpenApiRequest(
651
+ headers=headers,
652
+ query=OpenApiUtilClient.query(query),
653
+ body=OpenApiUtilClient.parse_to_map(body)
654
+ )
655
+ params = open_api_models.Params(
656
+ action='CreateSqlStatement',
657
+ version='2023-08-08',
658
+ protocol='HTTPS',
659
+ pathname=f'/api/interactive/v1/workspace/{OpenApiUtilClient.get_encode_param(workspace_id)}/statement',
660
+ method='PUT',
661
+ auth_type='AK',
662
+ style='ROA',
663
+ req_body_type='json',
664
+ body_type='json'
665
+ )
666
+ return TeaCore.from_map(
667
+ emr_serverless_spark_20230808_models.CreateSqlStatementResponse(),
668
+ self.call_api(params, req, runtime)
669
+ )
670
+
671
+ async def create_sql_statement_with_options_async(
672
+ self,
673
+ workspace_id: str,
674
+ request: emr_serverless_spark_20230808_models.CreateSqlStatementRequest,
675
+ headers: Dict[str, str],
676
+ runtime: util_models.RuntimeOptions,
677
+ ) -> emr_serverless_spark_20230808_models.CreateSqlStatementResponse:
678
+ """
679
+ @summary Creates an SQL query task.
680
+
681
+ @param request: CreateSqlStatementRequest
682
+ @param headers: map
683
+ @param runtime: runtime options for this request RuntimeOptions
684
+ @return: CreateSqlStatementResponse
685
+ """
686
+ UtilClient.validate_model(request)
687
+ query = {}
688
+ if not UtilClient.is_unset(request.region_id):
689
+ query['regionId'] = request.region_id
690
+ body = {}
691
+ if not UtilClient.is_unset(request.code_content):
692
+ body['codeContent'] = request.code_content
693
+ if not UtilClient.is_unset(request.default_catalog):
694
+ body['defaultCatalog'] = request.default_catalog
695
+ if not UtilClient.is_unset(request.default_database):
696
+ body['defaultDatabase'] = request.default_database
697
+ if not UtilClient.is_unset(request.limit):
698
+ body['limit'] = request.limit
699
+ if not UtilClient.is_unset(request.sql_compute_id):
700
+ body['sqlComputeId'] = request.sql_compute_id
701
+ req = open_api_models.OpenApiRequest(
702
+ headers=headers,
703
+ query=OpenApiUtilClient.query(query),
704
+ body=OpenApiUtilClient.parse_to_map(body)
705
+ )
706
+ params = open_api_models.Params(
707
+ action='CreateSqlStatement',
708
+ version='2023-08-08',
709
+ protocol='HTTPS',
710
+ pathname=f'/api/interactive/v1/workspace/{OpenApiUtilClient.get_encode_param(workspace_id)}/statement',
711
+ method='PUT',
712
+ auth_type='AK',
713
+ style='ROA',
714
+ req_body_type='json',
715
+ body_type='json'
716
+ )
717
+ return TeaCore.from_map(
718
+ emr_serverless_spark_20230808_models.CreateSqlStatementResponse(),
719
+ await self.call_api_async(params, req, runtime)
720
+ )
721
+
722
+ def create_sql_statement(
723
+ self,
724
+ workspace_id: str,
725
+ request: emr_serverless_spark_20230808_models.CreateSqlStatementRequest,
726
+ ) -> emr_serverless_spark_20230808_models.CreateSqlStatementResponse:
727
+ """
728
+ @summary Creates an SQL query task.
729
+
730
+ @param request: CreateSqlStatementRequest
731
+ @return: CreateSqlStatementResponse
732
+ """
733
+ runtime = util_models.RuntimeOptions()
734
+ headers = {}
735
+ return self.create_sql_statement_with_options(workspace_id, request, headers, runtime)
736
+
737
+ async def create_sql_statement_async(
738
+ self,
739
+ workspace_id: str,
740
+ request: emr_serverless_spark_20230808_models.CreateSqlStatementRequest,
741
+ ) -> emr_serverless_spark_20230808_models.CreateSqlStatementResponse:
742
+ """
743
+ @summary Creates an SQL query task.
744
+
745
+ @param request: CreateSqlStatementRequest
746
+ @return: CreateSqlStatementResponse
747
+ """
748
+ runtime = util_models.RuntimeOptions()
749
+ headers = {}
750
+ return await self.create_sql_statement_with_options_async(workspace_id, request, headers, runtime)
751
+
752
+ def create_workspace_with_options(
753
+ self,
754
+ request: emr_serverless_spark_20230808_models.CreateWorkspaceRequest,
755
+ headers: Dict[str, str],
756
+ runtime: util_models.RuntimeOptions,
757
+ ) -> emr_serverless_spark_20230808_models.CreateWorkspaceResponse:
758
+ """
759
+ @summary Creates a workspace.
760
+
761
+ @param request: CreateWorkspaceRequest
762
+ @param headers: map
763
+ @param runtime: runtime options for this request RuntimeOptions
764
+ @return: CreateWorkspaceResponse
765
+ """
766
+ UtilClient.validate_model(request)
767
+ query = {}
768
+ if not UtilClient.is_unset(request.region_id):
769
+ query['regionId'] = request.region_id
770
+ body = {}
771
+ if not UtilClient.is_unset(request.auto_renew):
772
+ body['autoRenew'] = request.auto_renew
773
+ if not UtilClient.is_unset(request.auto_renew_period):
774
+ body['autoRenewPeriod'] = request.auto_renew_period
775
+ if not UtilClient.is_unset(request.auto_renew_period_unit):
776
+ body['autoRenewPeriodUnit'] = request.auto_renew_period_unit
777
+ if not UtilClient.is_unset(request.auto_start_session_cluster):
778
+ body['autoStartSessionCluster'] = request.auto_start_session_cluster
779
+ if not UtilClient.is_unset(request.client_token):
780
+ body['clientToken'] = request.client_token
781
+ if not UtilClient.is_unset(request.dlf_catalog_id):
782
+ body['dlfCatalogId'] = request.dlf_catalog_id
783
+ if not UtilClient.is_unset(request.dlf_type):
784
+ body['dlfType'] = request.dlf_type
785
+ if not UtilClient.is_unset(request.duration):
786
+ body['duration'] = request.duration
787
+ if not UtilClient.is_unset(request.oss_bucket):
788
+ body['ossBucket'] = request.oss_bucket
789
+ if not UtilClient.is_unset(request.payment_duration_unit):
790
+ body['paymentDurationUnit'] = request.payment_duration_unit
791
+ if not UtilClient.is_unset(request.payment_type):
792
+ body['paymentType'] = request.payment_type
793
+ if not UtilClient.is_unset(request.ram_role_name):
794
+ body['ramRoleName'] = request.ram_role_name
795
+ if not UtilClient.is_unset(request.release_type):
796
+ body['releaseType'] = request.release_type
797
+ if not UtilClient.is_unset(request.resource_spec):
798
+ body['resourceSpec'] = request.resource_spec
799
+ if not UtilClient.is_unset(request.tag):
800
+ body['tag'] = request.tag
801
+ if not UtilClient.is_unset(request.workspace_name):
802
+ body['workspaceName'] = request.workspace_name
803
+ req = open_api_models.OpenApiRequest(
804
+ headers=headers,
805
+ query=OpenApiUtilClient.query(query),
806
+ body=OpenApiUtilClient.parse_to_map(body)
807
+ )
808
+ params = open_api_models.Params(
809
+ action='CreateWorkspace',
810
+ version='2023-08-08',
811
+ protocol='HTTPS',
812
+ pathname=f'/api/v1/workspaces',
813
+ method='POST',
814
+ auth_type='AK',
815
+ style='ROA',
816
+ req_body_type='json',
817
+ body_type='json'
818
+ )
819
+ return TeaCore.from_map(
820
+ emr_serverless_spark_20230808_models.CreateWorkspaceResponse(),
821
+ self.call_api(params, req, runtime)
822
+ )
823
+
824
+ async def create_workspace_with_options_async(
825
+ self,
826
+ request: emr_serverless_spark_20230808_models.CreateWorkspaceRequest,
827
+ headers: Dict[str, str],
828
+ runtime: util_models.RuntimeOptions,
829
+ ) -> emr_serverless_spark_20230808_models.CreateWorkspaceResponse:
830
+ """
831
+ @summary Creates a workspace.
832
+
833
+ @param request: CreateWorkspaceRequest
834
+ @param headers: map
835
+ @param runtime: runtime options for this request RuntimeOptions
836
+ @return: CreateWorkspaceResponse
837
+ """
838
+ UtilClient.validate_model(request)
839
+ query = {}
840
+ if not UtilClient.is_unset(request.region_id):
841
+ query['regionId'] = request.region_id
842
+ body = {}
843
+ if not UtilClient.is_unset(request.auto_renew):
844
+ body['autoRenew'] = request.auto_renew
845
+ if not UtilClient.is_unset(request.auto_renew_period):
846
+ body['autoRenewPeriod'] = request.auto_renew_period
847
+ if not UtilClient.is_unset(request.auto_renew_period_unit):
848
+ body['autoRenewPeriodUnit'] = request.auto_renew_period_unit
849
+ if not UtilClient.is_unset(request.auto_start_session_cluster):
850
+ body['autoStartSessionCluster'] = request.auto_start_session_cluster
851
+ if not UtilClient.is_unset(request.client_token):
852
+ body['clientToken'] = request.client_token
853
+ if not UtilClient.is_unset(request.dlf_catalog_id):
854
+ body['dlfCatalogId'] = request.dlf_catalog_id
855
+ if not UtilClient.is_unset(request.dlf_type):
856
+ body['dlfType'] = request.dlf_type
857
+ if not UtilClient.is_unset(request.duration):
858
+ body['duration'] = request.duration
859
+ if not UtilClient.is_unset(request.oss_bucket):
860
+ body['ossBucket'] = request.oss_bucket
861
+ if not UtilClient.is_unset(request.payment_duration_unit):
862
+ body['paymentDurationUnit'] = request.payment_duration_unit
863
+ if not UtilClient.is_unset(request.payment_type):
864
+ body['paymentType'] = request.payment_type
865
+ if not UtilClient.is_unset(request.ram_role_name):
866
+ body['ramRoleName'] = request.ram_role_name
867
+ if not UtilClient.is_unset(request.release_type):
868
+ body['releaseType'] = request.release_type
869
+ if not UtilClient.is_unset(request.resource_spec):
870
+ body['resourceSpec'] = request.resource_spec
871
+ if not UtilClient.is_unset(request.tag):
872
+ body['tag'] = request.tag
873
+ if not UtilClient.is_unset(request.workspace_name):
874
+ body['workspaceName'] = request.workspace_name
875
+ req = open_api_models.OpenApiRequest(
876
+ headers=headers,
877
+ query=OpenApiUtilClient.query(query),
878
+ body=OpenApiUtilClient.parse_to_map(body)
879
+ )
880
+ params = open_api_models.Params(
881
+ action='CreateWorkspace',
882
+ version='2023-08-08',
883
+ protocol='HTTPS',
884
+ pathname=f'/api/v1/workspaces',
885
+ method='POST',
886
+ auth_type='AK',
887
+ style='ROA',
888
+ req_body_type='json',
889
+ body_type='json'
890
+ )
891
+ return TeaCore.from_map(
892
+ emr_serverless_spark_20230808_models.CreateWorkspaceResponse(),
893
+ await self.call_api_async(params, req, runtime)
894
+ )
895
+
896
+ def create_workspace(
897
+ self,
898
+ request: emr_serverless_spark_20230808_models.CreateWorkspaceRequest,
899
+ ) -> emr_serverless_spark_20230808_models.CreateWorkspaceResponse:
900
+ """
901
+ @summary Creates a workspace.
902
+
903
+ @param request: CreateWorkspaceRequest
904
+ @return: CreateWorkspaceResponse
905
+ """
906
+ runtime = util_models.RuntimeOptions()
907
+ headers = {}
908
+ return self.create_workspace_with_options(request, headers, runtime)
909
+
910
+ async def create_workspace_async(
911
+ self,
912
+ request: emr_serverless_spark_20230808_models.CreateWorkspaceRequest,
913
+ ) -> emr_serverless_spark_20230808_models.CreateWorkspaceResponse:
914
+ """
915
+ @summary Creates a workspace.
916
+
917
+ @param request: CreateWorkspaceRequest
918
+ @return: CreateWorkspaceResponse
919
+ """
920
+ runtime = util_models.RuntimeOptions()
921
+ headers = {}
922
+ return await self.create_workspace_with_options_async(request, headers, runtime)
923
+
924
+ def edit_workspace_queue_with_options(
925
+ self,
926
+ request: emr_serverless_spark_20230808_models.EditWorkspaceQueueRequest,
927
+ headers: Dict[str, str],
928
+ runtime: util_models.RuntimeOptions,
929
+ ) -> emr_serverless_spark_20230808_models.EditWorkspaceQueueResponse:
930
+ """
931
+ @summary Modifies the queue of a workspace.
932
+
933
+ @param request: EditWorkspaceQueueRequest
934
+ @param headers: map
935
+ @param runtime: runtime options for this request RuntimeOptions
936
+ @return: EditWorkspaceQueueResponse
937
+ """
938
+ UtilClient.validate_model(request)
939
+ query = {}
940
+ if not UtilClient.is_unset(request.region_id):
941
+ query['regionId'] = request.region_id
942
+ body = {}
943
+ if not UtilClient.is_unset(request.environments):
944
+ body['environments'] = request.environments
945
+ if not UtilClient.is_unset(request.resource_spec):
946
+ body['resourceSpec'] = request.resource_spec
947
+ if not UtilClient.is_unset(request.workspace_id):
948
+ body['workspaceId'] = request.workspace_id
949
+ if not UtilClient.is_unset(request.workspace_queue_name):
950
+ body['workspaceQueueName'] = request.workspace_queue_name
951
+ req = open_api_models.OpenApiRequest(
952
+ headers=headers,
953
+ query=OpenApiUtilClient.query(query),
954
+ body=OpenApiUtilClient.parse_to_map(body)
955
+ )
956
+ params = open_api_models.Params(
957
+ action='EditWorkspaceQueue',
958
+ version='2023-08-08',
959
+ protocol='HTTPS',
960
+ pathname=f'/api/v1/workspaces/queues/action/edit',
961
+ method='POST',
962
+ auth_type='AK',
963
+ style='ROA',
964
+ req_body_type='json',
965
+ body_type='json'
966
+ )
967
+ return TeaCore.from_map(
968
+ emr_serverless_spark_20230808_models.EditWorkspaceQueueResponse(),
969
+ self.call_api(params, req, runtime)
970
+ )
971
+
972
+ async def edit_workspace_queue_with_options_async(
973
+ self,
974
+ request: emr_serverless_spark_20230808_models.EditWorkspaceQueueRequest,
975
+ headers: Dict[str, str],
976
+ runtime: util_models.RuntimeOptions,
977
+ ) -> emr_serverless_spark_20230808_models.EditWorkspaceQueueResponse:
978
+ """
979
+ @summary Modifies the queue of a workspace.
980
+
981
+ @param request: EditWorkspaceQueueRequest
982
+ @param headers: map
983
+ @param runtime: runtime options for this request RuntimeOptions
984
+ @return: EditWorkspaceQueueResponse
985
+ """
986
+ UtilClient.validate_model(request)
987
+ query = {}
988
+ if not UtilClient.is_unset(request.region_id):
989
+ query['regionId'] = request.region_id
990
+ body = {}
991
+ if not UtilClient.is_unset(request.environments):
992
+ body['environments'] = request.environments
993
+ if not UtilClient.is_unset(request.resource_spec):
994
+ body['resourceSpec'] = request.resource_spec
995
+ if not UtilClient.is_unset(request.workspace_id):
996
+ body['workspaceId'] = request.workspace_id
997
+ if not UtilClient.is_unset(request.workspace_queue_name):
998
+ body['workspaceQueueName'] = request.workspace_queue_name
999
+ req = open_api_models.OpenApiRequest(
1000
+ headers=headers,
1001
+ query=OpenApiUtilClient.query(query),
1002
+ body=OpenApiUtilClient.parse_to_map(body)
1003
+ )
1004
+ params = open_api_models.Params(
1005
+ action='EditWorkspaceQueue',
1006
+ version='2023-08-08',
1007
+ protocol='HTTPS',
1008
+ pathname=f'/api/v1/workspaces/queues/action/edit',
1009
+ method='POST',
1010
+ auth_type='AK',
1011
+ style='ROA',
1012
+ req_body_type='json',
1013
+ body_type='json'
1014
+ )
1015
+ return TeaCore.from_map(
1016
+ emr_serverless_spark_20230808_models.EditWorkspaceQueueResponse(),
1017
+ await self.call_api_async(params, req, runtime)
1018
+ )
1019
+
1020
+ def edit_workspace_queue(
1021
+ self,
1022
+ request: emr_serverless_spark_20230808_models.EditWorkspaceQueueRequest,
1023
+ ) -> emr_serverless_spark_20230808_models.EditWorkspaceQueueResponse:
1024
+ """
1025
+ @summary Modifies the queue of a workspace.
1026
+
1027
+ @param request: EditWorkspaceQueueRequest
1028
+ @return: EditWorkspaceQueueResponse
1029
+ """
1030
+ runtime = util_models.RuntimeOptions()
1031
+ headers = {}
1032
+ return self.edit_workspace_queue_with_options(request, headers, runtime)
1033
+
1034
+ async def edit_workspace_queue_async(
1035
+ self,
1036
+ request: emr_serverless_spark_20230808_models.EditWorkspaceQueueRequest,
1037
+ ) -> emr_serverless_spark_20230808_models.EditWorkspaceQueueResponse:
1038
+ """
1039
+ @summary Modifies the queue of a workspace.
1040
+
1041
+ @param request: EditWorkspaceQueueRequest
1042
+ @return: EditWorkspaceQueueResponse
1043
+ """
1044
+ runtime = util_models.RuntimeOptions()
1045
+ headers = {}
1046
+ return await self.edit_workspace_queue_with_options_async(request, headers, runtime)
1047
+
1048
+ def get_cu_hours_with_options(
1049
+ self,
1050
+ workspace_id: str,
1051
+ queue: str,
1052
+ request: emr_serverless_spark_20230808_models.GetCuHoursRequest,
1053
+ headers: Dict[str, str],
1054
+ runtime: util_models.RuntimeOptions,
1055
+ ) -> emr_serverless_spark_20230808_models.GetCuHoursResponse:
1056
+ """
1057
+ @summary Queries the number of CU-hours consumed by a queue during a specified cycle.
1058
+
1059
+ @param request: GetCuHoursRequest
1060
+ @param headers: map
1061
+ @param runtime: runtime options for this request RuntimeOptions
1062
+ @return: GetCuHoursResponse
1063
+ """
1064
+ UtilClient.validate_model(request)
1065
+ query = {}
1066
+ if not UtilClient.is_unset(request.end_time):
1067
+ query['endTime'] = request.end_time
1068
+ if not UtilClient.is_unset(request.start_time):
1069
+ query['startTime'] = request.start_time
1070
+ req = open_api_models.OpenApiRequest(
1071
+ headers=headers,
1072
+ query=OpenApiUtilClient.query(query)
1073
+ )
1074
+ params = open_api_models.Params(
1075
+ action='GetCuHours',
1076
+ version='2023-08-08',
1077
+ protocol='HTTPS',
1078
+ pathname=f'/api/v1/workspaces/{OpenApiUtilClient.get_encode_param(workspace_id)}/metric/cuHours/{OpenApiUtilClient.get_encode_param(queue)}',
1079
+ method='GET',
1080
+ auth_type='AK',
1081
+ style='ROA',
1082
+ req_body_type='json',
1083
+ body_type='json'
1084
+ )
1085
+ return TeaCore.from_map(
1086
+ emr_serverless_spark_20230808_models.GetCuHoursResponse(),
1087
+ self.call_api(params, req, runtime)
1088
+ )
1089
+
1090
+ async def get_cu_hours_with_options_async(
1091
+ self,
1092
+ workspace_id: str,
1093
+ queue: str,
1094
+ request: emr_serverless_spark_20230808_models.GetCuHoursRequest,
1095
+ headers: Dict[str, str],
1096
+ runtime: util_models.RuntimeOptions,
1097
+ ) -> emr_serverless_spark_20230808_models.GetCuHoursResponse:
1098
+ """
1099
+ @summary Queries the number of CU-hours consumed by a queue during a specified cycle.
1100
+
1101
+ @param request: GetCuHoursRequest
1102
+ @param headers: map
1103
+ @param runtime: runtime options for this request RuntimeOptions
1104
+ @return: GetCuHoursResponse
1105
+ """
1106
+ UtilClient.validate_model(request)
1107
+ query = {}
1108
+ if not UtilClient.is_unset(request.end_time):
1109
+ query['endTime'] = request.end_time
1110
+ if not UtilClient.is_unset(request.start_time):
1111
+ query['startTime'] = request.start_time
1112
+ req = open_api_models.OpenApiRequest(
1113
+ headers=headers,
1114
+ query=OpenApiUtilClient.query(query)
1115
+ )
1116
+ params = open_api_models.Params(
1117
+ action='GetCuHours',
1118
+ version='2023-08-08',
1119
+ protocol='HTTPS',
1120
+ pathname=f'/api/v1/workspaces/{OpenApiUtilClient.get_encode_param(workspace_id)}/metric/cuHours/{OpenApiUtilClient.get_encode_param(queue)}',
1121
+ method='GET',
1122
+ auth_type='AK',
1123
+ style='ROA',
1124
+ req_body_type='json',
1125
+ body_type='json'
1126
+ )
1127
+ return TeaCore.from_map(
1128
+ emr_serverless_spark_20230808_models.GetCuHoursResponse(),
1129
+ await self.call_api_async(params, req, runtime)
1130
+ )
1131
+
1132
+ def get_cu_hours(
1133
+ self,
1134
+ workspace_id: str,
1135
+ queue: str,
1136
+ request: emr_serverless_spark_20230808_models.GetCuHoursRequest,
1137
+ ) -> emr_serverless_spark_20230808_models.GetCuHoursResponse:
1138
+ """
1139
+ @summary Queries the number of CU-hours consumed by a queue during a specified cycle.
1140
+
1141
+ @param request: GetCuHoursRequest
1142
+ @return: GetCuHoursResponse
1143
+ """
1144
+ runtime = util_models.RuntimeOptions()
1145
+ headers = {}
1146
+ return self.get_cu_hours_with_options(workspace_id, queue, request, headers, runtime)
1147
+
1148
+ async def get_cu_hours_async(
1149
+ self,
1150
+ workspace_id: str,
1151
+ queue: str,
1152
+ request: emr_serverless_spark_20230808_models.GetCuHoursRequest,
1153
+ ) -> emr_serverless_spark_20230808_models.GetCuHoursResponse:
1154
+ """
1155
+ @summary Queries the number of CU-hours consumed by a queue during a specified cycle.
1156
+
1157
+ @param request: GetCuHoursRequest
1158
+ @return: GetCuHoursResponse
1159
+ """
1160
+ runtime = util_models.RuntimeOptions()
1161
+ headers = {}
1162
+ return await self.get_cu_hours_with_options_async(workspace_id, queue, request, headers, runtime)
1163
+
1164
+ def get_doctor_application_with_options(
1165
+ self,
1166
+ workspace_id: str,
1167
+ run_id: str,
1168
+ request: emr_serverless_spark_20230808_models.GetDoctorApplicationRequest,
276
1169
  headers: Dict[str, str],
277
1170
  runtime: util_models.RuntimeOptions,
278
- ) -> emr_serverless_spark_20230808_models.CreateSqlStatementResponse:
1171
+ ) -> emr_serverless_spark_20230808_models.GetDoctorApplicationResponse:
279
1172
  """
280
- @summary Creates an SQL query task.
1173
+ @summary Obtains job analysis information on E-MapReduce (EMR) Doctor.
281
1174
 
282
- @param request: CreateSqlStatementRequest
1175
+ @param request: GetDoctorApplicationRequest
283
1176
  @param headers: map
284
1177
  @param runtime: runtime options for this request RuntimeOptions
285
- @return: CreateSqlStatementResponse
1178
+ @return: GetDoctorApplicationResponse
286
1179
  """
287
1180
  UtilClient.validate_model(request)
288
1181
  query = {}
1182
+ if not UtilClient.is_unset(request.locale):
1183
+ query['locale'] = request.locale
1184
+ if not UtilClient.is_unset(request.query_time):
1185
+ query['queryTime'] = request.query_time
289
1186
  if not UtilClient.is_unset(request.region_id):
290
1187
  query['regionId'] = request.region_id
291
- body = {}
292
- if not UtilClient.is_unset(request.code_content):
293
- body['codeContent'] = request.code_content
294
- if not UtilClient.is_unset(request.default_catalog):
295
- body['defaultCatalog'] = request.default_catalog
296
- if not UtilClient.is_unset(request.default_database):
297
- body['defaultDatabase'] = request.default_database
298
- if not UtilClient.is_unset(request.limit):
299
- body['limit'] = request.limit
300
- if not UtilClient.is_unset(request.sql_compute_id):
301
- body['sqlComputeId'] = request.sql_compute_id
302
1188
  req = open_api_models.OpenApiRequest(
303
1189
  headers=headers,
304
- query=OpenApiUtilClient.query(query),
305
- body=OpenApiUtilClient.parse_to_map(body)
1190
+ query=OpenApiUtilClient.query(query)
306
1191
  )
307
1192
  params = open_api_models.Params(
308
- action='CreateSqlStatement',
1193
+ action='GetDoctorApplication',
309
1194
  version='2023-08-08',
310
1195
  protocol='HTTPS',
311
- pathname=f'/api/interactive/v1/workspace/{OpenApiUtilClient.get_encode_param(workspace_id)}/statement',
312
- method='PUT',
1196
+ pathname=f'/api/v1/workspaces/{OpenApiUtilClient.get_encode_param(workspace_id)}/runs/{OpenApiUtilClient.get_encode_param(run_id)}/action/getDoctorApplication',
1197
+ method='GET',
313
1198
  auth_type='AK',
314
1199
  style='ROA',
315
1200
  req_body_type='json',
316
1201
  body_type='json'
317
1202
  )
318
1203
  return TeaCore.from_map(
319
- emr_serverless_spark_20230808_models.CreateSqlStatementResponse(),
1204
+ emr_serverless_spark_20230808_models.GetDoctorApplicationResponse(),
320
1205
  self.call_api(params, req, runtime)
321
1206
  )
322
1207
 
323
- async def create_sql_statement_with_options_async(
1208
+ async def get_doctor_application_with_options_async(
324
1209
  self,
325
1210
  workspace_id: str,
326
- request: emr_serverless_spark_20230808_models.CreateSqlStatementRequest,
1211
+ run_id: str,
1212
+ request: emr_serverless_spark_20230808_models.GetDoctorApplicationRequest,
327
1213
  headers: Dict[str, str],
328
1214
  runtime: util_models.RuntimeOptions,
329
- ) -> emr_serverless_spark_20230808_models.CreateSqlStatementResponse:
1215
+ ) -> emr_serverless_spark_20230808_models.GetDoctorApplicationResponse:
330
1216
  """
331
- @summary Creates an SQL query task.
1217
+ @summary Obtains job analysis information on E-MapReduce (EMR) Doctor.
332
1218
 
333
- @param request: CreateSqlStatementRequest
1219
+ @param request: GetDoctorApplicationRequest
334
1220
  @param headers: map
335
1221
  @param runtime: runtime options for this request RuntimeOptions
336
- @return: CreateSqlStatementResponse
1222
+ @return: GetDoctorApplicationResponse
337
1223
  """
338
1224
  UtilClient.validate_model(request)
339
1225
  query = {}
1226
+ if not UtilClient.is_unset(request.locale):
1227
+ query['locale'] = request.locale
1228
+ if not UtilClient.is_unset(request.query_time):
1229
+ query['queryTime'] = request.query_time
340
1230
  if not UtilClient.is_unset(request.region_id):
341
1231
  query['regionId'] = request.region_id
342
- body = {}
343
- if not UtilClient.is_unset(request.code_content):
344
- body['codeContent'] = request.code_content
345
- if not UtilClient.is_unset(request.default_catalog):
346
- body['defaultCatalog'] = request.default_catalog
347
- if not UtilClient.is_unset(request.default_database):
348
- body['defaultDatabase'] = request.default_database
349
- if not UtilClient.is_unset(request.limit):
350
- body['limit'] = request.limit
351
- if not UtilClient.is_unset(request.sql_compute_id):
352
- body['sqlComputeId'] = request.sql_compute_id
353
1232
  req = open_api_models.OpenApiRequest(
354
1233
  headers=headers,
355
- query=OpenApiUtilClient.query(query),
356
- body=OpenApiUtilClient.parse_to_map(body)
1234
+ query=OpenApiUtilClient.query(query)
357
1235
  )
358
1236
  params = open_api_models.Params(
359
- action='CreateSqlStatement',
1237
+ action='GetDoctorApplication',
360
1238
  version='2023-08-08',
361
1239
  protocol='HTTPS',
362
- pathname=f'/api/interactive/v1/workspace/{OpenApiUtilClient.get_encode_param(workspace_id)}/statement',
363
- method='PUT',
1240
+ pathname=f'/api/v1/workspaces/{OpenApiUtilClient.get_encode_param(workspace_id)}/runs/{OpenApiUtilClient.get_encode_param(run_id)}/action/getDoctorApplication',
1241
+ method='GET',
364
1242
  auth_type='AK',
365
1243
  style='ROA',
366
1244
  req_body_type='json',
367
1245
  body_type='json'
368
1246
  )
369
1247
  return TeaCore.from_map(
370
- emr_serverless_spark_20230808_models.CreateSqlStatementResponse(),
1248
+ emr_serverless_spark_20230808_models.GetDoctorApplicationResponse(),
371
1249
  await self.call_api_async(params, req, runtime)
372
1250
  )
373
1251
 
374
- def create_sql_statement(
1252
+ def get_doctor_application(
375
1253
  self,
376
1254
  workspace_id: str,
377
- request: emr_serverless_spark_20230808_models.CreateSqlStatementRequest,
378
- ) -> emr_serverless_spark_20230808_models.CreateSqlStatementResponse:
1255
+ run_id: str,
1256
+ request: emr_serverless_spark_20230808_models.GetDoctorApplicationRequest,
1257
+ ) -> emr_serverless_spark_20230808_models.GetDoctorApplicationResponse:
379
1258
  """
380
- @summary Creates an SQL query task.
1259
+ @summary Obtains job analysis information on E-MapReduce (EMR) Doctor.
381
1260
 
382
- @param request: CreateSqlStatementRequest
383
- @return: CreateSqlStatementResponse
1261
+ @param request: GetDoctorApplicationRequest
1262
+ @return: GetDoctorApplicationResponse
384
1263
  """
385
1264
  runtime = util_models.RuntimeOptions()
386
1265
  headers = {}
387
- return self.create_sql_statement_with_options(workspace_id, request, headers, runtime)
1266
+ return self.get_doctor_application_with_options(workspace_id, run_id, request, headers, runtime)
388
1267
 
389
- async def create_sql_statement_async(
1268
+ async def get_doctor_application_async(
390
1269
  self,
391
1270
  workspace_id: str,
392
- request: emr_serverless_spark_20230808_models.CreateSqlStatementRequest,
393
- ) -> emr_serverless_spark_20230808_models.CreateSqlStatementResponse:
1271
+ run_id: str,
1272
+ request: emr_serverless_spark_20230808_models.GetDoctorApplicationRequest,
1273
+ ) -> emr_serverless_spark_20230808_models.GetDoctorApplicationResponse:
394
1274
  """
395
- @summary Creates an SQL query task.
1275
+ @summary Obtains job analysis information on E-MapReduce (EMR) Doctor.
396
1276
 
397
- @param request: CreateSqlStatementRequest
398
- @return: CreateSqlStatementResponse
1277
+ @param request: GetDoctorApplicationRequest
1278
+ @return: GetDoctorApplicationResponse
399
1279
  """
400
1280
  runtime = util_models.RuntimeOptions()
401
1281
  headers = {}
402
- return await self.create_sql_statement_with_options_async(workspace_id, request, headers, runtime)
1282
+ return await self.get_doctor_application_with_options_async(workspace_id, run_id, request, headers, runtime)
403
1283
 
404
1284
  def get_job_run_with_options(
405
1285
  self,
@@ -522,7 +1402,7 @@ class Client(OpenApiClient):
522
1402
  runtime: util_models.RuntimeOptions,
523
1403
  ) -> emr_serverless_spark_20230808_models.GetSessionClusterResponse:
524
1404
  """
525
- @summary 查询SessionCluster集群
1405
+ @summary Queries the information about a session.
526
1406
 
527
1407
  @param request: GetSessionClusterRequest
528
1408
  @param headers: map
@@ -562,7 +1442,7 @@ class Client(OpenApiClient):
562
1442
  runtime: util_models.RuntimeOptions,
563
1443
  ) -> emr_serverless_spark_20230808_models.GetSessionClusterResponse:
564
1444
  """
565
- @summary 查询SessionCluster集群
1445
+ @summary Queries the information about a session.
566
1446
 
567
1447
  @param request: GetSessionClusterRequest
568
1448
  @param headers: map
@@ -600,7 +1480,7 @@ class Client(OpenApiClient):
600
1480
  request: emr_serverless_spark_20230808_models.GetSessionClusterRequest,
601
1481
  ) -> emr_serverless_spark_20230808_models.GetSessionClusterResponse:
602
1482
  """
603
- @summary 查询SessionCluster集群
1483
+ @summary Queries the information about a session.
604
1484
 
605
1485
  @param request: GetSessionClusterRequest
606
1486
  @return: GetSessionClusterResponse
@@ -616,7 +1496,7 @@ class Client(OpenApiClient):
616
1496
  request: emr_serverless_spark_20230808_models.GetSessionClusterRequest,
617
1497
  ) -> emr_serverless_spark_20230808_models.GetSessionClusterResponse:
618
1498
  """
619
- @summary 查询SessionCluster集群
1499
+ @summary Queries the information about a session.
620
1500
 
621
1501
  @param request: GetSessionClusterRequest
622
1502
  @return: GetSessionClusterResponse
@@ -745,7 +1625,7 @@ class Client(OpenApiClient):
745
1625
  runtime: util_models.RuntimeOptions,
746
1626
  ) -> emr_serverless_spark_20230808_models.GetTemplateResponse:
747
1627
  """
748
- @summary 获取任务模板
1628
+ @summary Queries task templates.
749
1629
 
750
1630
  @param request: GetTemplateRequest
751
1631
  @param headers: map
@@ -786,7 +1666,7 @@ class Client(OpenApiClient):
786
1666
  runtime: util_models.RuntimeOptions,
787
1667
  ) -> emr_serverless_spark_20230808_models.GetTemplateResponse:
788
1668
  """
789
- @summary 获取任务模板
1669
+ @summary Queries task templates.
790
1670
 
791
1671
  @param request: GetTemplateRequest
792
1672
  @param headers: map
@@ -825,7 +1705,7 @@ class Client(OpenApiClient):
825
1705
  request: emr_serverless_spark_20230808_models.GetTemplateRequest,
826
1706
  ) -> emr_serverless_spark_20230808_models.GetTemplateResponse:
827
1707
  """
828
- @summary 获取任务模板
1708
+ @summary Queries task templates.
829
1709
 
830
1710
  @param request: GetTemplateRequest
831
1711
  @return: GetTemplateResponse
@@ -840,7 +1720,7 @@ class Client(OpenApiClient):
840
1720
  request: emr_serverless_spark_20230808_models.GetTemplateRequest,
841
1721
  ) -> emr_serverless_spark_20230808_models.GetTemplateResponse:
842
1722
  """
843
- @summary 获取任务模板
1723
+ @summary Queries task templates.
844
1724
 
845
1725
  @param request: GetTemplateRequest
846
1726
  @return: GetTemplateResponse
@@ -958,73 +1838,230 @@ class Client(OpenApiClient):
958
1838
  """
959
1839
  @summary Assigns a specified role to users.
960
1840
 
961
- @param request: GrantRoleToUsersRequest
962
- @return: GrantRoleToUsersResponse
1841
+ @param request: GrantRoleToUsersRequest
1842
+ @return: GrantRoleToUsersResponse
1843
+ """
1844
+ runtime = util_models.RuntimeOptions()
1845
+ headers = {}
1846
+ return await self.grant_role_to_users_with_options_async(request, headers, runtime)
1847
+
1848
+ def list_job_runs_with_options(
1849
+ self,
1850
+ workspace_id: str,
1851
+ tmp_req: emr_serverless_spark_20230808_models.ListJobRunsRequest,
1852
+ headers: Dict[str, str],
1853
+ runtime: util_models.RuntimeOptions,
1854
+ ) -> emr_serverless_spark_20230808_models.ListJobRunsResponse:
1855
+ """
1856
+ @summary Queries a list of Spark jobs.
1857
+
1858
+ @param tmp_req: ListJobRunsRequest
1859
+ @param headers: map
1860
+ @param runtime: runtime options for this request RuntimeOptions
1861
+ @return: ListJobRunsResponse
1862
+ """
1863
+ UtilClient.validate_model(tmp_req)
1864
+ request = emr_serverless_spark_20230808_models.ListJobRunsShrinkRequest()
1865
+ OpenApiUtilClient.convert(tmp_req, request)
1866
+ if not UtilClient.is_unset(tmp_req.end_time):
1867
+ request.end_time_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.end_time, 'endTime', 'json')
1868
+ if not UtilClient.is_unset(tmp_req.start_time):
1869
+ request.start_time_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.start_time, 'startTime', 'json')
1870
+ if not UtilClient.is_unset(tmp_req.states):
1871
+ request.states_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.states, 'states', 'json')
1872
+ if not UtilClient.is_unset(tmp_req.tags):
1873
+ request.tags_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.tags, 'tags', 'json')
1874
+ query = {}
1875
+ if not UtilClient.is_unset(request.creator):
1876
+ query['creator'] = request.creator
1877
+ if not UtilClient.is_unset(request.end_time_shrink):
1878
+ query['endTime'] = request.end_time_shrink
1879
+ if not UtilClient.is_unset(request.job_run_deployment_id):
1880
+ query['jobRunDeploymentId'] = request.job_run_deployment_id
1881
+ if not UtilClient.is_unset(request.job_run_id):
1882
+ query['jobRunId'] = request.job_run_id
1883
+ if not UtilClient.is_unset(request.max_results):
1884
+ query['maxResults'] = request.max_results
1885
+ if not UtilClient.is_unset(request.min_duration):
1886
+ query['minDuration'] = request.min_duration
1887
+ if not UtilClient.is_unset(request.name):
1888
+ query['name'] = request.name
1889
+ if not UtilClient.is_unset(request.next_token):
1890
+ query['nextToken'] = request.next_token
1891
+ if not UtilClient.is_unset(request.region_id):
1892
+ query['regionId'] = request.region_id
1893
+ if not UtilClient.is_unset(request.resource_queue_id):
1894
+ query['resourceQueueId'] = request.resource_queue_id
1895
+ if not UtilClient.is_unset(request.start_time_shrink):
1896
+ query['startTime'] = request.start_time_shrink
1897
+ if not UtilClient.is_unset(request.states_shrink):
1898
+ query['states'] = request.states_shrink
1899
+ if not UtilClient.is_unset(request.tags_shrink):
1900
+ query['tags'] = request.tags_shrink
1901
+ req = open_api_models.OpenApiRequest(
1902
+ headers=headers,
1903
+ query=OpenApiUtilClient.query(query)
1904
+ )
1905
+ params = open_api_models.Params(
1906
+ action='ListJobRuns',
1907
+ version='2023-08-08',
1908
+ protocol='HTTPS',
1909
+ pathname=f'/api/v1/workspaces/{OpenApiUtilClient.get_encode_param(workspace_id)}/jobRuns',
1910
+ method='GET',
1911
+ auth_type='AK',
1912
+ style='ROA',
1913
+ req_body_type='json',
1914
+ body_type='json'
1915
+ )
1916
+ return TeaCore.from_map(
1917
+ emr_serverless_spark_20230808_models.ListJobRunsResponse(),
1918
+ self.call_api(params, req, runtime)
1919
+ )
1920
+
1921
+ async def list_job_runs_with_options_async(
1922
+ self,
1923
+ workspace_id: str,
1924
+ tmp_req: emr_serverless_spark_20230808_models.ListJobRunsRequest,
1925
+ headers: Dict[str, str],
1926
+ runtime: util_models.RuntimeOptions,
1927
+ ) -> emr_serverless_spark_20230808_models.ListJobRunsResponse:
1928
+ """
1929
+ @summary Queries a list of Spark jobs.
1930
+
1931
+ @param tmp_req: ListJobRunsRequest
1932
+ @param headers: map
1933
+ @param runtime: runtime options for this request RuntimeOptions
1934
+ @return: ListJobRunsResponse
1935
+ """
1936
+ UtilClient.validate_model(tmp_req)
1937
+ request = emr_serverless_spark_20230808_models.ListJobRunsShrinkRequest()
1938
+ OpenApiUtilClient.convert(tmp_req, request)
1939
+ if not UtilClient.is_unset(tmp_req.end_time):
1940
+ request.end_time_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.end_time, 'endTime', 'json')
1941
+ if not UtilClient.is_unset(tmp_req.start_time):
1942
+ request.start_time_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.start_time, 'startTime', 'json')
1943
+ if not UtilClient.is_unset(tmp_req.states):
1944
+ request.states_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.states, 'states', 'json')
1945
+ if not UtilClient.is_unset(tmp_req.tags):
1946
+ request.tags_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.tags, 'tags', 'json')
1947
+ query = {}
1948
+ if not UtilClient.is_unset(request.creator):
1949
+ query['creator'] = request.creator
1950
+ if not UtilClient.is_unset(request.end_time_shrink):
1951
+ query['endTime'] = request.end_time_shrink
1952
+ if not UtilClient.is_unset(request.job_run_deployment_id):
1953
+ query['jobRunDeploymentId'] = request.job_run_deployment_id
1954
+ if not UtilClient.is_unset(request.job_run_id):
1955
+ query['jobRunId'] = request.job_run_id
1956
+ if not UtilClient.is_unset(request.max_results):
1957
+ query['maxResults'] = request.max_results
1958
+ if not UtilClient.is_unset(request.min_duration):
1959
+ query['minDuration'] = request.min_duration
1960
+ if not UtilClient.is_unset(request.name):
1961
+ query['name'] = request.name
1962
+ if not UtilClient.is_unset(request.next_token):
1963
+ query['nextToken'] = request.next_token
1964
+ if not UtilClient.is_unset(request.region_id):
1965
+ query['regionId'] = request.region_id
1966
+ if not UtilClient.is_unset(request.resource_queue_id):
1967
+ query['resourceQueueId'] = request.resource_queue_id
1968
+ if not UtilClient.is_unset(request.start_time_shrink):
1969
+ query['startTime'] = request.start_time_shrink
1970
+ if not UtilClient.is_unset(request.states_shrink):
1971
+ query['states'] = request.states_shrink
1972
+ if not UtilClient.is_unset(request.tags_shrink):
1973
+ query['tags'] = request.tags_shrink
1974
+ req = open_api_models.OpenApiRequest(
1975
+ headers=headers,
1976
+ query=OpenApiUtilClient.query(query)
1977
+ )
1978
+ params = open_api_models.Params(
1979
+ action='ListJobRuns',
1980
+ version='2023-08-08',
1981
+ protocol='HTTPS',
1982
+ pathname=f'/api/v1/workspaces/{OpenApiUtilClient.get_encode_param(workspace_id)}/jobRuns',
1983
+ method='GET',
1984
+ auth_type='AK',
1985
+ style='ROA',
1986
+ req_body_type='json',
1987
+ body_type='json'
1988
+ )
1989
+ return TeaCore.from_map(
1990
+ emr_serverless_spark_20230808_models.ListJobRunsResponse(),
1991
+ await self.call_api_async(params, req, runtime)
1992
+ )
1993
+
1994
+ def list_job_runs(
1995
+ self,
1996
+ workspace_id: str,
1997
+ request: emr_serverless_spark_20230808_models.ListJobRunsRequest,
1998
+ ) -> emr_serverless_spark_20230808_models.ListJobRunsResponse:
1999
+ """
2000
+ @summary Queries a list of Spark jobs.
2001
+
2002
+ @param request: ListJobRunsRequest
2003
+ @return: ListJobRunsResponse
2004
+ """
2005
+ runtime = util_models.RuntimeOptions()
2006
+ headers = {}
2007
+ return self.list_job_runs_with_options(workspace_id, request, headers, runtime)
2008
+
2009
+ async def list_job_runs_async(
2010
+ self,
2011
+ workspace_id: str,
2012
+ request: emr_serverless_spark_20230808_models.ListJobRunsRequest,
2013
+ ) -> emr_serverless_spark_20230808_models.ListJobRunsResponse:
2014
+ """
2015
+ @summary Queries a list of Spark jobs.
2016
+
2017
+ @param request: ListJobRunsRequest
2018
+ @return: ListJobRunsResponse
963
2019
  """
964
2020
  runtime = util_models.RuntimeOptions()
965
2021
  headers = {}
966
- return await self.grant_role_to_users_with_options_async(request, headers, runtime)
2022
+ return await self.list_job_runs_with_options_async(workspace_id, request, headers, runtime)
967
2023
 
968
- def list_job_runs_with_options(
2024
+ def list_kyuubi_spark_applications_with_options(
969
2025
  self,
970
2026
  workspace_id: str,
971
- tmp_req: emr_serverless_spark_20230808_models.ListJobRunsRequest,
2027
+ kyuubi_service_id: str,
2028
+ tmp_req: emr_serverless_spark_20230808_models.ListKyuubiSparkApplicationsRequest,
972
2029
  headers: Dict[str, str],
973
2030
  runtime: util_models.RuntimeOptions,
974
- ) -> emr_serverless_spark_20230808_models.ListJobRunsResponse:
2031
+ ) -> emr_serverless_spark_20230808_models.ListKyuubiSparkApplicationsResponse:
975
2032
  """
976
- @summary Queries a list of Spark jobs.
2033
+ @summary Queries the applications that are submitted by using a Kyuubi gateway.
977
2034
 
978
- @param tmp_req: ListJobRunsRequest
2035
+ @param tmp_req: ListKyuubiSparkApplicationsRequest
979
2036
  @param headers: map
980
2037
  @param runtime: runtime options for this request RuntimeOptions
981
- @return: ListJobRunsResponse
2038
+ @return: ListKyuubiSparkApplicationsResponse
982
2039
  """
983
2040
  UtilClient.validate_model(tmp_req)
984
- request = emr_serverless_spark_20230808_models.ListJobRunsShrinkRequest()
2041
+ request = emr_serverless_spark_20230808_models.ListKyuubiSparkApplicationsShrinkRequest()
985
2042
  OpenApiUtilClient.convert(tmp_req, request)
986
- if not UtilClient.is_unset(tmp_req.end_time):
987
- request.end_time_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.end_time, 'endTime', 'json')
988
2043
  if not UtilClient.is_unset(tmp_req.start_time):
989
2044
  request.start_time_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.start_time, 'startTime', 'json')
990
- if not UtilClient.is_unset(tmp_req.states):
991
- request.states_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.states, 'states', 'json')
992
- if not UtilClient.is_unset(tmp_req.tags):
993
- request.tags_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.tags, 'tags', 'json')
994
2045
  query = {}
995
- if not UtilClient.is_unset(request.creator):
996
- query['creator'] = request.creator
997
- if not UtilClient.is_unset(request.end_time_shrink):
998
- query['endTime'] = request.end_time_shrink
999
- if not UtilClient.is_unset(request.job_run_deployment_id):
1000
- query['jobRunDeploymentId'] = request.job_run_deployment_id
1001
- if not UtilClient.is_unset(request.job_run_id):
1002
- query['jobRunId'] = request.job_run_id
2046
+ if not UtilClient.is_unset(request.application_id):
2047
+ query['applicationId'] = request.application_id
2048
+ if not UtilClient.is_unset(request.application_name):
2049
+ query['applicationName'] = request.application_name
1003
2050
  if not UtilClient.is_unset(request.max_results):
1004
2051
  query['maxResults'] = request.max_results
1005
- if not UtilClient.is_unset(request.name):
1006
- query['name'] = request.name
1007
2052
  if not UtilClient.is_unset(request.next_token):
1008
2053
  query['nextToken'] = request.next_token
1009
- if not UtilClient.is_unset(request.region_id):
1010
- query['regionId'] = request.region_id
1011
- if not UtilClient.is_unset(request.resource_queue_id):
1012
- query['resourceQueueId'] = request.resource_queue_id
1013
2054
  if not UtilClient.is_unset(request.start_time_shrink):
1014
2055
  query['startTime'] = request.start_time_shrink
1015
- if not UtilClient.is_unset(request.states_shrink):
1016
- query['states'] = request.states_shrink
1017
- if not UtilClient.is_unset(request.tags_shrink):
1018
- query['tags'] = request.tags_shrink
1019
2056
  req = open_api_models.OpenApiRequest(
1020
2057
  headers=headers,
1021
2058
  query=OpenApiUtilClient.query(query)
1022
2059
  )
1023
2060
  params = open_api_models.Params(
1024
- action='ListJobRuns',
2061
+ action='ListKyuubiSparkApplications',
1025
2062
  version='2023-08-08',
1026
2063
  protocol='HTTPS',
1027
- pathname=f'/api/v1/workspaces/{OpenApiUtilClient.get_encode_param(workspace_id)}/jobRuns',
2064
+ pathname=f'/api/v1/kyuubi/{OpenApiUtilClient.get_encode_param(workspace_id)}/{OpenApiUtilClient.get_encode_param(kyuubi_service_id)}/applications',
1028
2065
  method='GET',
1029
2066
  auth_type='AK',
1030
2067
  style='ROA',
@@ -1032,70 +2069,51 @@ class Client(OpenApiClient):
1032
2069
  body_type='json'
1033
2070
  )
1034
2071
  return TeaCore.from_map(
1035
- emr_serverless_spark_20230808_models.ListJobRunsResponse(),
2072
+ emr_serverless_spark_20230808_models.ListKyuubiSparkApplicationsResponse(),
1036
2073
  self.call_api(params, req, runtime)
1037
2074
  )
1038
2075
 
1039
- async def list_job_runs_with_options_async(
2076
+ async def list_kyuubi_spark_applications_with_options_async(
1040
2077
  self,
1041
2078
  workspace_id: str,
1042
- tmp_req: emr_serverless_spark_20230808_models.ListJobRunsRequest,
2079
+ kyuubi_service_id: str,
2080
+ tmp_req: emr_serverless_spark_20230808_models.ListKyuubiSparkApplicationsRequest,
1043
2081
  headers: Dict[str, str],
1044
2082
  runtime: util_models.RuntimeOptions,
1045
- ) -> emr_serverless_spark_20230808_models.ListJobRunsResponse:
2083
+ ) -> emr_serverless_spark_20230808_models.ListKyuubiSparkApplicationsResponse:
1046
2084
  """
1047
- @summary Queries a list of Spark jobs.
2085
+ @summary Queries the applications that are submitted by using a Kyuubi gateway.
1048
2086
 
1049
- @param tmp_req: ListJobRunsRequest
2087
+ @param tmp_req: ListKyuubiSparkApplicationsRequest
1050
2088
  @param headers: map
1051
2089
  @param runtime: runtime options for this request RuntimeOptions
1052
- @return: ListJobRunsResponse
2090
+ @return: ListKyuubiSparkApplicationsResponse
1053
2091
  """
1054
2092
  UtilClient.validate_model(tmp_req)
1055
- request = emr_serverless_spark_20230808_models.ListJobRunsShrinkRequest()
2093
+ request = emr_serverless_spark_20230808_models.ListKyuubiSparkApplicationsShrinkRequest()
1056
2094
  OpenApiUtilClient.convert(tmp_req, request)
1057
- if not UtilClient.is_unset(tmp_req.end_time):
1058
- request.end_time_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.end_time, 'endTime', 'json')
1059
2095
  if not UtilClient.is_unset(tmp_req.start_time):
1060
2096
  request.start_time_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.start_time, 'startTime', 'json')
1061
- if not UtilClient.is_unset(tmp_req.states):
1062
- request.states_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.states, 'states', 'json')
1063
- if not UtilClient.is_unset(tmp_req.tags):
1064
- request.tags_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.tags, 'tags', 'json')
1065
2097
  query = {}
1066
- if not UtilClient.is_unset(request.creator):
1067
- query['creator'] = request.creator
1068
- if not UtilClient.is_unset(request.end_time_shrink):
1069
- query['endTime'] = request.end_time_shrink
1070
- if not UtilClient.is_unset(request.job_run_deployment_id):
1071
- query['jobRunDeploymentId'] = request.job_run_deployment_id
1072
- if not UtilClient.is_unset(request.job_run_id):
1073
- query['jobRunId'] = request.job_run_id
2098
+ if not UtilClient.is_unset(request.application_id):
2099
+ query['applicationId'] = request.application_id
2100
+ if not UtilClient.is_unset(request.application_name):
2101
+ query['applicationName'] = request.application_name
1074
2102
  if not UtilClient.is_unset(request.max_results):
1075
2103
  query['maxResults'] = request.max_results
1076
- if not UtilClient.is_unset(request.name):
1077
- query['name'] = request.name
1078
2104
  if not UtilClient.is_unset(request.next_token):
1079
2105
  query['nextToken'] = request.next_token
1080
- if not UtilClient.is_unset(request.region_id):
1081
- query['regionId'] = request.region_id
1082
- if not UtilClient.is_unset(request.resource_queue_id):
1083
- query['resourceQueueId'] = request.resource_queue_id
1084
2106
  if not UtilClient.is_unset(request.start_time_shrink):
1085
2107
  query['startTime'] = request.start_time_shrink
1086
- if not UtilClient.is_unset(request.states_shrink):
1087
- query['states'] = request.states_shrink
1088
- if not UtilClient.is_unset(request.tags_shrink):
1089
- query['tags'] = request.tags_shrink
1090
2108
  req = open_api_models.OpenApiRequest(
1091
2109
  headers=headers,
1092
2110
  query=OpenApiUtilClient.query(query)
1093
2111
  )
1094
2112
  params = open_api_models.Params(
1095
- action='ListJobRuns',
2113
+ action='ListKyuubiSparkApplications',
1096
2114
  version='2023-08-08',
1097
2115
  protocol='HTTPS',
1098
- pathname=f'/api/v1/workspaces/{OpenApiUtilClient.get_encode_param(workspace_id)}/jobRuns',
2116
+ pathname=f'/api/v1/kyuubi/{OpenApiUtilClient.get_encode_param(workspace_id)}/{OpenApiUtilClient.get_encode_param(kyuubi_service_id)}/applications',
1099
2117
  method='GET',
1100
2118
  auth_type='AK',
1101
2119
  style='ROA',
@@ -1103,39 +2121,41 @@ class Client(OpenApiClient):
1103
2121
  body_type='json'
1104
2122
  )
1105
2123
  return TeaCore.from_map(
1106
- emr_serverless_spark_20230808_models.ListJobRunsResponse(),
2124
+ emr_serverless_spark_20230808_models.ListKyuubiSparkApplicationsResponse(),
1107
2125
  await self.call_api_async(params, req, runtime)
1108
2126
  )
1109
2127
 
1110
- def list_job_runs(
2128
+ def list_kyuubi_spark_applications(
1111
2129
  self,
1112
2130
  workspace_id: str,
1113
- request: emr_serverless_spark_20230808_models.ListJobRunsRequest,
1114
- ) -> emr_serverless_spark_20230808_models.ListJobRunsResponse:
2131
+ kyuubi_service_id: str,
2132
+ request: emr_serverless_spark_20230808_models.ListKyuubiSparkApplicationsRequest,
2133
+ ) -> emr_serverless_spark_20230808_models.ListKyuubiSparkApplicationsResponse:
1115
2134
  """
1116
- @summary Queries a list of Spark jobs.
2135
+ @summary Queries the applications that are submitted by using a Kyuubi gateway.
1117
2136
 
1118
- @param request: ListJobRunsRequest
1119
- @return: ListJobRunsResponse
2137
+ @param request: ListKyuubiSparkApplicationsRequest
2138
+ @return: ListKyuubiSparkApplicationsResponse
1120
2139
  """
1121
2140
  runtime = util_models.RuntimeOptions()
1122
2141
  headers = {}
1123
- return self.list_job_runs_with_options(workspace_id, request, headers, runtime)
2142
+ return self.list_kyuubi_spark_applications_with_options(workspace_id, kyuubi_service_id, request, headers, runtime)
1124
2143
 
1125
- async def list_job_runs_async(
2144
+ async def list_kyuubi_spark_applications_async(
1126
2145
  self,
1127
2146
  workspace_id: str,
1128
- request: emr_serverless_spark_20230808_models.ListJobRunsRequest,
1129
- ) -> emr_serverless_spark_20230808_models.ListJobRunsResponse:
2147
+ kyuubi_service_id: str,
2148
+ request: emr_serverless_spark_20230808_models.ListKyuubiSparkApplicationsRequest,
2149
+ ) -> emr_serverless_spark_20230808_models.ListKyuubiSparkApplicationsResponse:
1130
2150
  """
1131
- @summary Queries a list of Spark jobs.
2151
+ @summary Queries the applications that are submitted by using a Kyuubi gateway.
1132
2152
 
1133
- @param request: ListJobRunsRequest
1134
- @return: ListJobRunsResponse
2153
+ @param request: ListKyuubiSparkApplicationsRequest
2154
+ @return: ListKyuubiSparkApplicationsResponse
1135
2155
  """
1136
2156
  runtime = util_models.RuntimeOptions()
1137
2157
  headers = {}
1138
- return await self.list_job_runs_with_options_async(workspace_id, request, headers, runtime)
2158
+ return await self.list_kyuubi_spark_applications_with_options_async(workspace_id, kyuubi_service_id, request, headers, runtime)
1139
2159
 
1140
2160
  def list_log_contents_with_options(
1141
2161
  self,
@@ -1145,7 +2165,7 @@ class Client(OpenApiClient):
1145
2165
  runtime: util_models.RuntimeOptions,
1146
2166
  ) -> emr_serverless_spark_20230808_models.ListLogContentsResponse:
1147
2167
  """
1148
- @summary Obtains the log content.
2168
+ @summary Get Log Content
1149
2169
 
1150
2170
  @param request: ListLogContentsRequest
1151
2171
  @param headers: map
@@ -1190,7 +2210,7 @@ class Client(OpenApiClient):
1190
2210
  runtime: util_models.RuntimeOptions,
1191
2211
  ) -> emr_serverless_spark_20230808_models.ListLogContentsResponse:
1192
2212
  """
1193
- @summary Obtains the log content.
2213
+ @summary Get Log Content
1194
2214
 
1195
2215
  @param request: ListLogContentsRequest
1196
2216
  @param headers: map
@@ -1233,7 +2253,7 @@ class Client(OpenApiClient):
1233
2253
  request: emr_serverless_spark_20230808_models.ListLogContentsRequest,
1234
2254
  ) -> emr_serverless_spark_20230808_models.ListLogContentsResponse:
1235
2255
  """
1236
- @summary Obtains the log content.
2256
+ @summary Get Log Content
1237
2257
 
1238
2258
  @param request: ListLogContentsRequest
1239
2259
  @return: ListLogContentsResponse
@@ -1248,7 +2268,7 @@ class Client(OpenApiClient):
1248
2268
  request: emr_serverless_spark_20230808_models.ListLogContentsRequest,
1249
2269
  ) -> emr_serverless_spark_20230808_models.ListLogContentsResponse:
1250
2270
  """
1251
- @summary Obtains the log content.
2271
+ @summary Get Log Content
1252
2272
 
1253
2273
  @param request: ListLogContentsRequest
1254
2274
  @return: ListLogContentsResponse
@@ -1281,6 +2301,8 @@ class Client(OpenApiClient):
1281
2301
  query['releaseVersion'] = request.release_version
1282
2302
  if not UtilClient.is_unset(request.release_version_status):
1283
2303
  query['releaseVersionStatus'] = request.release_version_status
2304
+ if not UtilClient.is_unset(request.service_filter):
2305
+ query['serviceFilter'] = request.service_filter
1284
2306
  if not UtilClient.is_unset(request.workspace_id):
1285
2307
  query['workspaceId'] = request.workspace_id
1286
2308
  req = open_api_models.OpenApiRequest(
@@ -1327,6 +2349,8 @@ class Client(OpenApiClient):
1327
2349
  query['releaseVersion'] = request.release_version
1328
2350
  if not UtilClient.is_unset(request.release_version_status):
1329
2351
  query['releaseVersionStatus'] = request.release_version_status
2352
+ if not UtilClient.is_unset(request.service_filter):
2353
+ query['serviceFilter'] = request.service_filter
1330
2354
  if not UtilClient.is_unset(request.workspace_id):
1331
2355
  query['workspaceId'] = request.workspace_id
1332
2356
  req = open_api_models.OpenApiRequest(
@@ -1619,19 +2643,23 @@ class Client(OpenApiClient):
1619
2643
 
1620
2644
  def list_workspaces_with_options(
1621
2645
  self,
1622
- request: emr_serverless_spark_20230808_models.ListWorkspacesRequest,
2646
+ tmp_req: emr_serverless_spark_20230808_models.ListWorkspacesRequest,
1623
2647
  headers: Dict[str, str],
1624
2648
  runtime: util_models.RuntimeOptions,
1625
2649
  ) -> emr_serverless_spark_20230808_models.ListWorkspacesResponse:
1626
2650
  """
1627
2651
  @summary Queries a list of workspaces.
1628
2652
 
1629
- @param request: ListWorkspacesRequest
2653
+ @param tmp_req: ListWorkspacesRequest
1630
2654
  @param headers: map
1631
2655
  @param runtime: runtime options for this request RuntimeOptions
1632
2656
  @return: ListWorkspacesResponse
1633
2657
  """
1634
- UtilClient.validate_model(request)
2658
+ UtilClient.validate_model(tmp_req)
2659
+ request = emr_serverless_spark_20230808_models.ListWorkspacesShrinkRequest()
2660
+ OpenApiUtilClient.convert(tmp_req, request)
2661
+ if not UtilClient.is_unset(tmp_req.tag):
2662
+ request.tag_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.tag, 'tag', 'json')
1635
2663
  query = {}
1636
2664
  if not UtilClient.is_unset(request.max_results):
1637
2665
  query['maxResults'] = request.max_results
@@ -1643,6 +2671,8 @@ class Client(OpenApiClient):
1643
2671
  query['regionId'] = request.region_id
1644
2672
  if not UtilClient.is_unset(request.state):
1645
2673
  query['state'] = request.state
2674
+ if not UtilClient.is_unset(request.tag_shrink):
2675
+ query['tag'] = request.tag_shrink
1646
2676
  req = open_api_models.OpenApiRequest(
1647
2677
  headers=headers,
1648
2678
  query=OpenApiUtilClient.query(query)
@@ -1665,19 +2695,23 @@ class Client(OpenApiClient):
1665
2695
 
1666
2696
  async def list_workspaces_with_options_async(
1667
2697
  self,
1668
- request: emr_serverless_spark_20230808_models.ListWorkspacesRequest,
2698
+ tmp_req: emr_serverless_spark_20230808_models.ListWorkspacesRequest,
1669
2699
  headers: Dict[str, str],
1670
2700
  runtime: util_models.RuntimeOptions,
1671
2701
  ) -> emr_serverless_spark_20230808_models.ListWorkspacesResponse:
1672
2702
  """
1673
2703
  @summary Queries a list of workspaces.
1674
2704
 
1675
- @param request: ListWorkspacesRequest
2705
+ @param tmp_req: ListWorkspacesRequest
1676
2706
  @param headers: map
1677
2707
  @param runtime: runtime options for this request RuntimeOptions
1678
2708
  @return: ListWorkspacesResponse
1679
2709
  """
1680
- UtilClient.validate_model(request)
2710
+ UtilClient.validate_model(tmp_req)
2711
+ request = emr_serverless_spark_20230808_models.ListWorkspacesShrinkRequest()
2712
+ OpenApiUtilClient.convert(tmp_req, request)
2713
+ if not UtilClient.is_unset(tmp_req.tag):
2714
+ request.tag_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.tag, 'tag', 'json')
1681
2715
  query = {}
1682
2716
  if not UtilClient.is_unset(request.max_results):
1683
2717
  query['maxResults'] = request.max_results
@@ -1689,6 +2723,8 @@ class Client(OpenApiClient):
1689
2723
  query['regionId'] = request.region_id
1690
2724
  if not UtilClient.is_unset(request.state):
1691
2725
  query['state'] = request.state
2726
+ if not UtilClient.is_unset(request.tag_shrink):
2727
+ query['tag'] = request.tag_shrink
1692
2728
  req = open_api_models.OpenApiRequest(
1693
2729
  headers=headers,
1694
2730
  query=OpenApiUtilClient.query(query)
@@ -1897,6 +2933,154 @@ class Client(OpenApiClient):
1897
2933
  headers = {}
1898
2934
  return await self.start_job_run_with_options_async(workspace_id, request, headers, runtime)
1899
2935
 
2936
+ def start_process_instance_with_options(
2937
+ self,
2938
+ biz_id: str,
2939
+ request: emr_serverless_spark_20230808_models.StartProcessInstanceRequest,
2940
+ headers: Dict[str, str],
2941
+ runtime: util_models.RuntimeOptions,
2942
+ ) -> emr_serverless_spark_20230808_models.StartProcessInstanceResponse:
2943
+ """
2944
+ @summary Manually runs a workflow.
2945
+
2946
+ @param request: StartProcessInstanceRequest
2947
+ @param headers: map
2948
+ @param runtime: runtime options for this request RuntimeOptions
2949
+ @return: StartProcessInstanceResponse
2950
+ """
2951
+ UtilClient.validate_model(request)
2952
+ query = {}
2953
+ if not UtilClient.is_unset(request.action):
2954
+ query['action'] = request.action
2955
+ if not UtilClient.is_unset(request.comments):
2956
+ query['comments'] = request.comments
2957
+ if not UtilClient.is_unset(request.email):
2958
+ query['email'] = request.email
2959
+ if not UtilClient.is_unset(request.interval):
2960
+ query['interval'] = request.interval
2961
+ if not UtilClient.is_unset(request.is_prod):
2962
+ query['isProd'] = request.is_prod
2963
+ if not UtilClient.is_unset(request.process_definition_code):
2964
+ query['processDefinitionCode'] = request.process_definition_code
2965
+ if not UtilClient.is_unset(request.product_namespace):
2966
+ query['productNamespace'] = request.product_namespace
2967
+ if not UtilClient.is_unset(request.region_id):
2968
+ query['regionId'] = request.region_id
2969
+ if not UtilClient.is_unset(request.runtime_queue):
2970
+ query['runtimeQueue'] = request.runtime_queue
2971
+ if not UtilClient.is_unset(request.version_hash_code):
2972
+ query['versionHashCode'] = request.version_hash_code
2973
+ if not UtilClient.is_unset(request.version_number):
2974
+ query['versionNumber'] = request.version_number
2975
+ req = open_api_models.OpenApiRequest(
2976
+ headers=headers,
2977
+ query=OpenApiUtilClient.query(query)
2978
+ )
2979
+ params = open_api_models.Params(
2980
+ action='StartProcessInstance',
2981
+ version='2023-08-08',
2982
+ protocol='HTTPS',
2983
+ pathname=f'/dolphinscheduler/projects/{OpenApiUtilClient.get_encode_param(biz_id)}/executors/start-process-instance',
2984
+ method='POST',
2985
+ auth_type='AK',
2986
+ style='ROA',
2987
+ req_body_type='json',
2988
+ body_type='json'
2989
+ )
2990
+ return TeaCore.from_map(
2991
+ emr_serverless_spark_20230808_models.StartProcessInstanceResponse(),
2992
+ self.call_api(params, req, runtime)
2993
+ )
2994
+
2995
+ async def start_process_instance_with_options_async(
2996
+ self,
2997
+ biz_id: str,
2998
+ request: emr_serverless_spark_20230808_models.StartProcessInstanceRequest,
2999
+ headers: Dict[str, str],
3000
+ runtime: util_models.RuntimeOptions,
3001
+ ) -> emr_serverless_spark_20230808_models.StartProcessInstanceResponse:
3002
+ """
3003
+ @summary Manually runs a workflow.
3004
+
3005
+ @param request: StartProcessInstanceRequest
3006
+ @param headers: map
3007
+ @param runtime: runtime options for this request RuntimeOptions
3008
+ @return: StartProcessInstanceResponse
3009
+ """
3010
+ UtilClient.validate_model(request)
3011
+ query = {}
3012
+ if not UtilClient.is_unset(request.action):
3013
+ query['action'] = request.action
3014
+ if not UtilClient.is_unset(request.comments):
3015
+ query['comments'] = request.comments
3016
+ if not UtilClient.is_unset(request.email):
3017
+ query['email'] = request.email
3018
+ if not UtilClient.is_unset(request.interval):
3019
+ query['interval'] = request.interval
3020
+ if not UtilClient.is_unset(request.is_prod):
3021
+ query['isProd'] = request.is_prod
3022
+ if not UtilClient.is_unset(request.process_definition_code):
3023
+ query['processDefinitionCode'] = request.process_definition_code
3024
+ if not UtilClient.is_unset(request.product_namespace):
3025
+ query['productNamespace'] = request.product_namespace
3026
+ if not UtilClient.is_unset(request.region_id):
3027
+ query['regionId'] = request.region_id
3028
+ if not UtilClient.is_unset(request.runtime_queue):
3029
+ query['runtimeQueue'] = request.runtime_queue
3030
+ if not UtilClient.is_unset(request.version_hash_code):
3031
+ query['versionHashCode'] = request.version_hash_code
3032
+ if not UtilClient.is_unset(request.version_number):
3033
+ query['versionNumber'] = request.version_number
3034
+ req = open_api_models.OpenApiRequest(
3035
+ headers=headers,
3036
+ query=OpenApiUtilClient.query(query)
3037
+ )
3038
+ params = open_api_models.Params(
3039
+ action='StartProcessInstance',
3040
+ version='2023-08-08',
3041
+ protocol='HTTPS',
3042
+ pathname=f'/dolphinscheduler/projects/{OpenApiUtilClient.get_encode_param(biz_id)}/executors/start-process-instance',
3043
+ method='POST',
3044
+ auth_type='AK',
3045
+ style='ROA',
3046
+ req_body_type='json',
3047
+ body_type='json'
3048
+ )
3049
+ return TeaCore.from_map(
3050
+ emr_serverless_spark_20230808_models.StartProcessInstanceResponse(),
3051
+ await self.call_api_async(params, req, runtime)
3052
+ )
3053
+
3054
+ def start_process_instance(
3055
+ self,
3056
+ biz_id: str,
3057
+ request: emr_serverless_spark_20230808_models.StartProcessInstanceRequest,
3058
+ ) -> emr_serverless_spark_20230808_models.StartProcessInstanceResponse:
3059
+ """
3060
+ @summary Manually runs a workflow.
3061
+
3062
+ @param request: StartProcessInstanceRequest
3063
+ @return: StartProcessInstanceResponse
3064
+ """
3065
+ runtime = util_models.RuntimeOptions()
3066
+ headers = {}
3067
+ return self.start_process_instance_with_options(biz_id, request, headers, runtime)
3068
+
3069
+ async def start_process_instance_async(
3070
+ self,
3071
+ biz_id: str,
3072
+ request: emr_serverless_spark_20230808_models.StartProcessInstanceRequest,
3073
+ ) -> emr_serverless_spark_20230808_models.StartProcessInstanceResponse:
3074
+ """
3075
+ @summary Manually runs a workflow.
3076
+
3077
+ @param request: StartProcessInstanceRequest
3078
+ @return: StartProcessInstanceResponse
3079
+ """
3080
+ runtime = util_models.RuntimeOptions()
3081
+ headers = {}
3082
+ return await self.start_process_instance_with_options_async(biz_id, request, headers, runtime)
3083
+
1900
3084
  def start_session_cluster_with_options(
1901
3085
  self,
1902
3086
  workspace_id: str,
@@ -2248,3 +3432,207 @@ class Client(OpenApiClient):
2248
3432
  runtime = util_models.RuntimeOptions()
2249
3433
  headers = {}
2250
3434
  return await self.terminate_sql_statement_with_options_async(workspace_id, statement_id, request, headers, runtime)
3435
+
3436
+ def update_process_definition_with_schedule_with_options(
3437
+ self,
3438
+ biz_id: str,
3439
+ code: str,
3440
+ tmp_req: emr_serverless_spark_20230808_models.UpdateProcessDefinitionWithScheduleRequest,
3441
+ headers: Dict[str, str],
3442
+ runtime: util_models.RuntimeOptions,
3443
+ ) -> emr_serverless_spark_20230808_models.UpdateProcessDefinitionWithScheduleResponse:
3444
+ """
3445
+ @summary Updates the workflow and time-based scheduling configurations.
3446
+
3447
+ @param tmp_req: UpdateProcessDefinitionWithScheduleRequest
3448
+ @param headers: map
3449
+ @param runtime: runtime options for this request RuntimeOptions
3450
+ @return: UpdateProcessDefinitionWithScheduleResponse
3451
+ """
3452
+ UtilClient.validate_model(tmp_req)
3453
+ request = emr_serverless_spark_20230808_models.UpdateProcessDefinitionWithScheduleShrinkRequest()
3454
+ OpenApiUtilClient.convert(tmp_req, request)
3455
+ if not UtilClient.is_unset(tmp_req.global_params):
3456
+ request.global_params_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.global_params, 'globalParams', 'json')
3457
+ if not UtilClient.is_unset(tmp_req.schedule):
3458
+ request.schedule_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.schedule, 'schedule', 'json')
3459
+ if not UtilClient.is_unset(tmp_req.tags):
3460
+ request.tags_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.tags, 'tags', 'json')
3461
+ if not UtilClient.is_unset(tmp_req.task_definition_json):
3462
+ request.task_definition_json_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.task_definition_json, 'taskDefinitionJson', 'json')
3463
+ if not UtilClient.is_unset(tmp_req.task_relation_json):
3464
+ request.task_relation_json_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.task_relation_json, 'taskRelationJson', 'json')
3465
+ query = {}
3466
+ if not UtilClient.is_unset(request.alert_email_address):
3467
+ query['alertEmailAddress'] = request.alert_email_address
3468
+ if not UtilClient.is_unset(request.description):
3469
+ query['description'] = request.description
3470
+ if not UtilClient.is_unset(request.execution_type):
3471
+ query['executionType'] = request.execution_type
3472
+ if not UtilClient.is_unset(request.global_params_shrink):
3473
+ query['globalParams'] = request.global_params_shrink
3474
+ if not UtilClient.is_unset(request.name):
3475
+ query['name'] = request.name
3476
+ if not UtilClient.is_unset(request.product_namespace):
3477
+ query['productNamespace'] = request.product_namespace
3478
+ if not UtilClient.is_unset(request.publish):
3479
+ query['publish'] = request.publish
3480
+ if not UtilClient.is_unset(request.region_id):
3481
+ query['regionId'] = request.region_id
3482
+ if not UtilClient.is_unset(request.release_state):
3483
+ query['releaseState'] = request.release_state
3484
+ if not UtilClient.is_unset(request.resource_queue):
3485
+ query['resourceQueue'] = request.resource_queue
3486
+ if not UtilClient.is_unset(request.retry_times):
3487
+ query['retryTimes'] = request.retry_times
3488
+ if not UtilClient.is_unset(request.run_as):
3489
+ query['runAs'] = request.run_as
3490
+ if not UtilClient.is_unset(request.schedule_shrink):
3491
+ query['schedule'] = request.schedule_shrink
3492
+ if not UtilClient.is_unset(request.tags_shrink):
3493
+ query['tags'] = request.tags_shrink
3494
+ if not UtilClient.is_unset(request.task_definition_json_shrink):
3495
+ query['taskDefinitionJson'] = request.task_definition_json_shrink
3496
+ if not UtilClient.is_unset(request.task_parallelism):
3497
+ query['taskParallelism'] = request.task_parallelism
3498
+ if not UtilClient.is_unset(request.task_relation_json_shrink):
3499
+ query['taskRelationJson'] = request.task_relation_json_shrink
3500
+ if not UtilClient.is_unset(request.timeout):
3501
+ query['timeout'] = request.timeout
3502
+ req = open_api_models.OpenApiRequest(
3503
+ headers=headers,
3504
+ query=OpenApiUtilClient.query(query)
3505
+ )
3506
+ params = open_api_models.Params(
3507
+ action='UpdateProcessDefinitionWithSchedule',
3508
+ version='2023-08-08',
3509
+ protocol='HTTPS',
3510
+ pathname=f'/dolphinscheduler/projects/{OpenApiUtilClient.get_encode_param(biz_id)}/process-definition/{OpenApiUtilClient.get_encode_param(code)}',
3511
+ method='PUT',
3512
+ auth_type='AK',
3513
+ style='ROA',
3514
+ req_body_type='json',
3515
+ body_type='json'
3516
+ )
3517
+ return TeaCore.from_map(
3518
+ emr_serverless_spark_20230808_models.UpdateProcessDefinitionWithScheduleResponse(),
3519
+ self.call_api(params, req, runtime)
3520
+ )
3521
+
3522
+ async def update_process_definition_with_schedule_with_options_async(
3523
+ self,
3524
+ biz_id: str,
3525
+ code: str,
3526
+ tmp_req: emr_serverless_spark_20230808_models.UpdateProcessDefinitionWithScheduleRequest,
3527
+ headers: Dict[str, str],
3528
+ runtime: util_models.RuntimeOptions,
3529
+ ) -> emr_serverless_spark_20230808_models.UpdateProcessDefinitionWithScheduleResponse:
3530
+ """
3531
+ @summary Updates the workflow and time-based scheduling configurations.
3532
+
3533
+ @param tmp_req: UpdateProcessDefinitionWithScheduleRequest
3534
+ @param headers: map
3535
+ @param runtime: runtime options for this request RuntimeOptions
3536
+ @return: UpdateProcessDefinitionWithScheduleResponse
3537
+ """
3538
+ UtilClient.validate_model(tmp_req)
3539
+ request = emr_serverless_spark_20230808_models.UpdateProcessDefinitionWithScheduleShrinkRequest()
3540
+ OpenApiUtilClient.convert(tmp_req, request)
3541
+ if not UtilClient.is_unset(tmp_req.global_params):
3542
+ request.global_params_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.global_params, 'globalParams', 'json')
3543
+ if not UtilClient.is_unset(tmp_req.schedule):
3544
+ request.schedule_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.schedule, 'schedule', 'json')
3545
+ if not UtilClient.is_unset(tmp_req.tags):
3546
+ request.tags_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.tags, 'tags', 'json')
3547
+ if not UtilClient.is_unset(tmp_req.task_definition_json):
3548
+ request.task_definition_json_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.task_definition_json, 'taskDefinitionJson', 'json')
3549
+ if not UtilClient.is_unset(tmp_req.task_relation_json):
3550
+ request.task_relation_json_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.task_relation_json, 'taskRelationJson', 'json')
3551
+ query = {}
3552
+ if not UtilClient.is_unset(request.alert_email_address):
3553
+ query['alertEmailAddress'] = request.alert_email_address
3554
+ if not UtilClient.is_unset(request.description):
3555
+ query['description'] = request.description
3556
+ if not UtilClient.is_unset(request.execution_type):
3557
+ query['executionType'] = request.execution_type
3558
+ if not UtilClient.is_unset(request.global_params_shrink):
3559
+ query['globalParams'] = request.global_params_shrink
3560
+ if not UtilClient.is_unset(request.name):
3561
+ query['name'] = request.name
3562
+ if not UtilClient.is_unset(request.product_namespace):
3563
+ query['productNamespace'] = request.product_namespace
3564
+ if not UtilClient.is_unset(request.publish):
3565
+ query['publish'] = request.publish
3566
+ if not UtilClient.is_unset(request.region_id):
3567
+ query['regionId'] = request.region_id
3568
+ if not UtilClient.is_unset(request.release_state):
3569
+ query['releaseState'] = request.release_state
3570
+ if not UtilClient.is_unset(request.resource_queue):
3571
+ query['resourceQueue'] = request.resource_queue
3572
+ if not UtilClient.is_unset(request.retry_times):
3573
+ query['retryTimes'] = request.retry_times
3574
+ if not UtilClient.is_unset(request.run_as):
3575
+ query['runAs'] = request.run_as
3576
+ if not UtilClient.is_unset(request.schedule_shrink):
3577
+ query['schedule'] = request.schedule_shrink
3578
+ if not UtilClient.is_unset(request.tags_shrink):
3579
+ query['tags'] = request.tags_shrink
3580
+ if not UtilClient.is_unset(request.task_definition_json_shrink):
3581
+ query['taskDefinitionJson'] = request.task_definition_json_shrink
3582
+ if not UtilClient.is_unset(request.task_parallelism):
3583
+ query['taskParallelism'] = request.task_parallelism
3584
+ if not UtilClient.is_unset(request.task_relation_json_shrink):
3585
+ query['taskRelationJson'] = request.task_relation_json_shrink
3586
+ if not UtilClient.is_unset(request.timeout):
3587
+ query['timeout'] = request.timeout
3588
+ req = open_api_models.OpenApiRequest(
3589
+ headers=headers,
3590
+ query=OpenApiUtilClient.query(query)
3591
+ )
3592
+ params = open_api_models.Params(
3593
+ action='UpdateProcessDefinitionWithSchedule',
3594
+ version='2023-08-08',
3595
+ protocol='HTTPS',
3596
+ pathname=f'/dolphinscheduler/projects/{OpenApiUtilClient.get_encode_param(biz_id)}/process-definition/{OpenApiUtilClient.get_encode_param(code)}',
3597
+ method='PUT',
3598
+ auth_type='AK',
3599
+ style='ROA',
3600
+ req_body_type='json',
3601
+ body_type='json'
3602
+ )
3603
+ return TeaCore.from_map(
3604
+ emr_serverless_spark_20230808_models.UpdateProcessDefinitionWithScheduleResponse(),
3605
+ await self.call_api_async(params, req, runtime)
3606
+ )
3607
+
3608
+ def update_process_definition_with_schedule(
3609
+ self,
3610
+ biz_id: str,
3611
+ code: str,
3612
+ request: emr_serverless_spark_20230808_models.UpdateProcessDefinitionWithScheduleRequest,
3613
+ ) -> emr_serverless_spark_20230808_models.UpdateProcessDefinitionWithScheduleResponse:
3614
+ """
3615
+ @summary Updates the workflow and time-based scheduling configurations.
3616
+
3617
+ @param request: UpdateProcessDefinitionWithScheduleRequest
3618
+ @return: UpdateProcessDefinitionWithScheduleResponse
3619
+ """
3620
+ runtime = util_models.RuntimeOptions()
3621
+ headers = {}
3622
+ return self.update_process_definition_with_schedule_with_options(biz_id, code, request, headers, runtime)
3623
+
3624
+ async def update_process_definition_with_schedule_async(
3625
+ self,
3626
+ biz_id: str,
3627
+ code: str,
3628
+ request: emr_serverless_spark_20230808_models.UpdateProcessDefinitionWithScheduleRequest,
3629
+ ) -> emr_serverless_spark_20230808_models.UpdateProcessDefinitionWithScheduleResponse:
3630
+ """
3631
+ @summary Updates the workflow and time-based scheduling configurations.
3632
+
3633
+ @param request: UpdateProcessDefinitionWithScheduleRequest
3634
+ @return: UpdateProcessDefinitionWithScheduleResponse
3635
+ """
3636
+ runtime = util_models.RuntimeOptions()
3637
+ headers = {}
3638
+ return await self.update_process_definition_with_schedule_with_options_async(biz_id, code, request, headers, runtime)