alibabacloud-emr-serverless-spark20230808 1.0.0__tar.gz → 1.0.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of alibabacloud-emr-serverless-spark20230808 might be problematic. Click here for more details.

Files changed (17) hide show
  1. alibabacloud_emr-serverless-spark20230808-1.0.1/ChangeLog.md +3 -0
  2. {alibabacloud_emr-serverless-spark20230808-1.0.0 → alibabacloud_emr-serverless-spark20230808-1.0.1}/PKG-INFO +1 -1
  3. alibabacloud_emr-serverless-spark20230808-1.0.1/alibabacloud_emr_serverless_spark20230808/__init__.py +1 -0
  4. {alibabacloud_emr-serverless-spark20230808-1.0.0 → alibabacloud_emr-serverless-spark20230808-1.0.1}/alibabacloud_emr_serverless_spark20230808/client.py +112 -0
  5. {alibabacloud_emr-serverless-spark20230808-1.0.0 → alibabacloud_emr-serverless-spark20230808-1.0.1}/alibabacloud_emr_serverless_spark20230808/models.py +159 -0
  6. {alibabacloud_emr-serverless-spark20230808-1.0.0 → alibabacloud_emr-serverless-spark20230808-1.0.1}/alibabacloud_emr_serverless_spark20230808.egg-info/PKG-INFO +1 -1
  7. {alibabacloud_emr-serverless-spark20230808-1.0.0 → alibabacloud_emr-serverless-spark20230808-1.0.1}/alibabacloud_emr_serverless_spark20230808.egg-info/SOURCES.txt +1 -0
  8. {alibabacloud_emr-serverless-spark20230808-1.0.0 → alibabacloud_emr-serverless-spark20230808-1.0.1}/alibabacloud_emr_serverless_spark20230808.egg-info/requires.txt +2 -2
  9. {alibabacloud_emr-serverless-spark20230808-1.0.0 → alibabacloud_emr-serverless-spark20230808-1.0.1}/setup.py +3 -3
  10. alibabacloud_emr-serverless-spark20230808-1.0.0/alibabacloud_emr_serverless_spark20230808/__init__.py +0 -1
  11. {alibabacloud_emr-serverless-spark20230808-1.0.0 → alibabacloud_emr-serverless-spark20230808-1.0.1}/LICENSE +0 -0
  12. {alibabacloud_emr-serverless-spark20230808-1.0.0 → alibabacloud_emr-serverless-spark20230808-1.0.1}/MANIFEST.in +0 -0
  13. {alibabacloud_emr-serverless-spark20230808-1.0.0 → alibabacloud_emr-serverless-spark20230808-1.0.1}/README-CN.md +0 -0
  14. {alibabacloud_emr-serverless-spark20230808-1.0.0 → alibabacloud_emr-serverless-spark20230808-1.0.1}/README.md +0 -0
  15. {alibabacloud_emr-serverless-spark20230808-1.0.0 → alibabacloud_emr-serverless-spark20230808-1.0.1}/alibabacloud_emr_serverless_spark20230808.egg-info/dependency_links.txt +0 -0
  16. {alibabacloud_emr-serverless-spark20230808-1.0.0 → alibabacloud_emr-serverless-spark20230808-1.0.1}/alibabacloud_emr_serverless_spark20230808.egg-info/top_level.txt +0 -0
  17. {alibabacloud_emr-serverless-spark20230808-1.0.0 → alibabacloud_emr-serverless-spark20230808-1.0.1}/setup.cfg +0 -0
@@ -0,0 +1,3 @@
1
+ 2024-04-16 Version: 1.0.0
2
+ - Generated python 2023-08-08 for emr-serverless-spark.
3
+
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: alibabacloud_emr-serverless-spark20230808
3
- Version: 1.0.0
3
+ Version: 1.0.1
4
4
  Summary: Alibaba Cloud emr-serverless-spark (20230808) SDK Library for Python
5
5
  Home-page: https://github.com/aliyun/alibabacloud-python-sdk
6
6
  Author: Alibaba Cloud SDK
@@ -49,6 +49,14 @@ class Client(OpenApiClient):
49
49
  headers: Dict[str, str],
50
50
  runtime: util_models.RuntimeOptions,
51
51
  ) -> emr_serverless_spark_20230808_models.CancelJobRunResponse:
52
+ """
53
+ @summary 取消jobRun作业
54
+
55
+ @param request: CancelJobRunRequest
56
+ @param headers: map
57
+ @param runtime: runtime options for this request RuntimeOptions
58
+ @return: CancelJobRunResponse
59
+ """
52
60
  UtilClient.validate_model(request)
53
61
  query = {}
54
62
  if not UtilClient.is_unset(request.region_id):
@@ -81,6 +89,14 @@ class Client(OpenApiClient):
81
89
  headers: Dict[str, str],
82
90
  runtime: util_models.RuntimeOptions,
83
91
  ) -> emr_serverless_spark_20230808_models.CancelJobRunResponse:
92
+ """
93
+ @summary 取消jobRun作业
94
+
95
+ @param request: CancelJobRunRequest
96
+ @param headers: map
97
+ @param runtime: runtime options for this request RuntimeOptions
98
+ @return: CancelJobRunResponse
99
+ """
84
100
  UtilClient.validate_model(request)
85
101
  query = {}
86
102
  if not UtilClient.is_unset(request.region_id):
@@ -111,6 +127,12 @@ class Client(OpenApiClient):
111
127
  job_run_id: str,
112
128
  request: emr_serverless_spark_20230808_models.CancelJobRunRequest,
113
129
  ) -> emr_serverless_spark_20230808_models.CancelJobRunResponse:
130
+ """
131
+ @summary 取消jobRun作业
132
+
133
+ @param request: CancelJobRunRequest
134
+ @return: CancelJobRunResponse
135
+ """
114
136
  runtime = util_models.RuntimeOptions()
115
137
  headers = {}
116
138
  return self.cancel_job_run_with_options(workspace_id, job_run_id, request, headers, runtime)
@@ -121,6 +143,12 @@ class Client(OpenApiClient):
121
143
  job_run_id: str,
122
144
  request: emr_serverless_spark_20230808_models.CancelJobRunRequest,
123
145
  ) -> emr_serverless_spark_20230808_models.CancelJobRunResponse:
146
+ """
147
+ @summary 取消jobRun作业
148
+
149
+ @param request: CancelJobRunRequest
150
+ @return: CancelJobRunResponse
151
+ """
124
152
  runtime = util_models.RuntimeOptions()
125
153
  headers = {}
126
154
  return await self.cancel_job_run_with_options_async(workspace_id, job_run_id, request, headers, runtime)
@@ -133,6 +161,14 @@ class Client(OpenApiClient):
133
161
  headers: Dict[str, str],
134
162
  runtime: util_models.RuntimeOptions,
135
163
  ) -> emr_serverless_spark_20230808_models.GetJobRunResponse:
164
+ """
165
+ @summary 获取任务
166
+
167
+ @param request: GetJobRunRequest
168
+ @param headers: map
169
+ @param runtime: runtime options for this request RuntimeOptions
170
+ @return: GetJobRunResponse
171
+ """
136
172
  UtilClient.validate_model(request)
137
173
  query = {}
138
174
  if not UtilClient.is_unset(request.region_id):
@@ -165,6 +201,14 @@ class Client(OpenApiClient):
165
201
  headers: Dict[str, str],
166
202
  runtime: util_models.RuntimeOptions,
167
203
  ) -> emr_serverless_spark_20230808_models.GetJobRunResponse:
204
+ """
205
+ @summary 获取任务
206
+
207
+ @param request: GetJobRunRequest
208
+ @param headers: map
209
+ @param runtime: runtime options for this request RuntimeOptions
210
+ @return: GetJobRunResponse
211
+ """
168
212
  UtilClient.validate_model(request)
169
213
  query = {}
170
214
  if not UtilClient.is_unset(request.region_id):
@@ -195,6 +239,12 @@ class Client(OpenApiClient):
195
239
  job_run_id: str,
196
240
  request: emr_serverless_spark_20230808_models.GetJobRunRequest,
197
241
  ) -> emr_serverless_spark_20230808_models.GetJobRunResponse:
242
+ """
243
+ @summary 获取任务
244
+
245
+ @param request: GetJobRunRequest
246
+ @return: GetJobRunResponse
247
+ """
198
248
  runtime = util_models.RuntimeOptions()
199
249
  headers = {}
200
250
  return self.get_job_run_with_options(workspace_id, job_run_id, request, headers, runtime)
@@ -205,6 +255,12 @@ class Client(OpenApiClient):
205
255
  job_run_id: str,
206
256
  request: emr_serverless_spark_20230808_models.GetJobRunRequest,
207
257
  ) -> emr_serverless_spark_20230808_models.GetJobRunResponse:
258
+ """
259
+ @summary 获取任务
260
+
261
+ @param request: GetJobRunRequest
262
+ @return: GetJobRunResponse
263
+ """
208
264
  runtime = util_models.RuntimeOptions()
209
265
  headers = {}
210
266
  return await self.get_job_run_with_options_async(workspace_id, job_run_id, request, headers, runtime)
@@ -216,6 +272,14 @@ class Client(OpenApiClient):
216
272
  headers: Dict[str, str],
217
273
  runtime: util_models.RuntimeOptions,
218
274
  ) -> emr_serverless_spark_20230808_models.ListJobRunsResponse:
275
+ """
276
+ @summary 查询run列表
277
+
278
+ @param tmp_req: ListJobRunsRequest
279
+ @param headers: map
280
+ @param runtime: runtime options for this request RuntimeOptions
281
+ @return: ListJobRunsResponse
282
+ """
219
283
  UtilClient.validate_model(tmp_req)
220
284
  request = emr_serverless_spark_20230808_models.ListJobRunsShrinkRequest()
221
285
  OpenApiUtilClient.convert(tmp_req, request)
@@ -277,6 +341,14 @@ class Client(OpenApiClient):
277
341
  headers: Dict[str, str],
278
342
  runtime: util_models.RuntimeOptions,
279
343
  ) -> emr_serverless_spark_20230808_models.ListJobRunsResponse:
344
+ """
345
+ @summary 查询run列表
346
+
347
+ @param tmp_req: ListJobRunsRequest
348
+ @param headers: map
349
+ @param runtime: runtime options for this request RuntimeOptions
350
+ @return: ListJobRunsResponse
351
+ """
280
352
  UtilClient.validate_model(tmp_req)
281
353
  request = emr_serverless_spark_20230808_models.ListJobRunsShrinkRequest()
282
354
  OpenApiUtilClient.convert(tmp_req, request)
@@ -336,6 +408,12 @@ class Client(OpenApiClient):
336
408
  workspace_id: str,
337
409
  request: emr_serverless_spark_20230808_models.ListJobRunsRequest,
338
410
  ) -> emr_serverless_spark_20230808_models.ListJobRunsResponse:
411
+ """
412
+ @summary 查询run列表
413
+
414
+ @param request: ListJobRunsRequest
415
+ @return: ListJobRunsResponse
416
+ """
339
417
  runtime = util_models.RuntimeOptions()
340
418
  headers = {}
341
419
  return self.list_job_runs_with_options(workspace_id, request, headers, runtime)
@@ -345,6 +423,12 @@ class Client(OpenApiClient):
345
423
  workspace_id: str,
346
424
  request: emr_serverless_spark_20230808_models.ListJobRunsRequest,
347
425
  ) -> emr_serverless_spark_20230808_models.ListJobRunsResponse:
426
+ """
427
+ @summary 查询run列表
428
+
429
+ @param request: ListJobRunsRequest
430
+ @return: ListJobRunsResponse
431
+ """
348
432
  runtime = util_models.RuntimeOptions()
349
433
  headers = {}
350
434
  return await self.list_job_runs_with_options_async(workspace_id, request, headers, runtime)
@@ -356,6 +440,14 @@ class Client(OpenApiClient):
356
440
  headers: Dict[str, str],
357
441
  runtime: util_models.RuntimeOptions,
358
442
  ) -> emr_serverless_spark_20230808_models.StartJobRunResponse:
443
+ """
444
+ @summary 启动作业
445
+
446
+ @param request: StartJobRunRequest
447
+ @param headers: map
448
+ @param runtime: runtime options for this request RuntimeOptions
449
+ @return: StartJobRunResponse
450
+ """
359
451
  UtilClient.validate_model(request)
360
452
  query = {}
361
453
  if not UtilClient.is_unset(request.region_id):
@@ -409,6 +501,14 @@ class Client(OpenApiClient):
409
501
  headers: Dict[str, str],
410
502
  runtime: util_models.RuntimeOptions,
411
503
  ) -> emr_serverless_spark_20230808_models.StartJobRunResponse:
504
+ """
505
+ @summary 启动作业
506
+
507
+ @param request: StartJobRunRequest
508
+ @param headers: map
509
+ @param runtime: runtime options for this request RuntimeOptions
510
+ @return: StartJobRunResponse
511
+ """
412
512
  UtilClient.validate_model(request)
413
513
  query = {}
414
514
  if not UtilClient.is_unset(request.region_id):
@@ -460,6 +560,12 @@ class Client(OpenApiClient):
460
560
  workspace_id: str,
461
561
  request: emr_serverless_spark_20230808_models.StartJobRunRequest,
462
562
  ) -> emr_serverless_spark_20230808_models.StartJobRunResponse:
563
+ """
564
+ @summary 启动作业
565
+
566
+ @param request: StartJobRunRequest
567
+ @return: StartJobRunResponse
568
+ """
463
569
  runtime = util_models.RuntimeOptions()
464
570
  headers = {}
465
571
  return self.start_job_run_with_options(workspace_id, request, headers, runtime)
@@ -469,6 +575,12 @@ class Client(OpenApiClient):
469
575
  workspace_id: str,
470
576
  request: emr_serverless_spark_20230808_models.StartJobRunRequest,
471
577
  ) -> emr_serverless_spark_20230808_models.StartJobRunResponse:
578
+ """
579
+ @summary 启动作业
580
+
581
+ @param request: StartJobRunRequest
582
+ @return: StartJobRunResponse
583
+ """
472
584
  runtime = util_models.RuntimeOptions()
473
585
  headers = {}
474
586
  return await self.start_job_run_with_options_async(workspace_id, request, headers, runtime)
@@ -15,12 +15,19 @@ class Credential(TeaModel):
15
15
  security_token: str = None,
16
16
  signature: str = None,
17
17
  ):
18
+ # This parameter is required.
18
19
  self.access_id = access_id
20
+ # This parameter is required.
19
21
  self.dir = dir
22
+ # This parameter is required.
20
23
  self.expire = expire
24
+ # This parameter is required.
21
25
  self.host = host
26
+ # This parameter is required.
22
27
  self.policy = policy
28
+ # This parameter is required.
23
29
  self.security_token = security_token
30
+ # This parameter is required.
24
31
  self.signature = signature
25
32
 
26
33
  def validate(self):
@@ -79,13 +86,20 @@ class Artifact(TeaModel):
79
86
  modifier: int = None,
80
87
  name: str = None,
81
88
  ):
89
+ # This parameter is required.
82
90
  self.biz_id = biz_id
91
+ # This parameter is required.
83
92
  self.creator = creator
84
93
  self.credential = credential
94
+ # This parameter is required.
85
95
  self.gmt_created = gmt_created
96
+ # This parameter is required.
86
97
  self.gmt_modified = gmt_modified
98
+ # This parameter is required.
87
99
  self.location = location
100
+ # This parameter is required.
88
101
  self.modifier = modifier
102
+ # This parameter is required.
89
103
  self.name = name
90
104
 
91
105
  def validate(self):
@@ -150,13 +164,20 @@ class Category(TeaModel):
150
164
  parent_biz_id: str = None,
151
165
  type: str = None,
152
166
  ):
167
+ # This parameter is required.
153
168
  self.biz_id = biz_id
169
+ # This parameter is required.
154
170
  self.creator = creator
171
+ # This parameter is required.
155
172
  self.gmt_created = gmt_created
173
+ # This parameter is required.
156
174
  self.gmt_modified = gmt_modified
175
+ # This parameter is required.
157
176
  self.modifier = modifier
177
+ # This parameter is required.
158
178
  self.name = name
159
179
  self.parent_biz_id = parent_biz_id
180
+ # This parameter is required.
160
181
  self.type = type
161
182
 
162
183
  def validate(self):
@@ -246,6 +267,80 @@ class Configuration(TeaModel):
246
267
  return self
247
268
 
248
269
 
270
+ class ConfigurationOverridesConfigurations(TeaModel):
271
+ def __init__(
272
+ self,
273
+ config_file_name: str = None,
274
+ config_item_key: str = None,
275
+ config_item_value: str = None,
276
+ ):
277
+ self.config_file_name = config_file_name
278
+ self.config_item_key = config_item_key
279
+ self.config_item_value = config_item_value
280
+
281
+ def validate(self):
282
+ pass
283
+
284
+ def to_map(self):
285
+ _map = super().to_map()
286
+ if _map is not None:
287
+ return _map
288
+
289
+ result = dict()
290
+ if self.config_file_name is not None:
291
+ result['configFileName'] = self.config_file_name
292
+ if self.config_item_key is not None:
293
+ result['configItemKey'] = self.config_item_key
294
+ if self.config_item_value is not None:
295
+ result['configItemValue'] = self.config_item_value
296
+ return result
297
+
298
+ def from_map(self, m: dict = None):
299
+ m = m or dict()
300
+ if m.get('configFileName') is not None:
301
+ self.config_file_name = m.get('configFileName')
302
+ if m.get('configItemKey') is not None:
303
+ self.config_item_key = m.get('configItemKey')
304
+ if m.get('configItemValue') is not None:
305
+ self.config_item_value = m.get('configItemValue')
306
+ return self
307
+
308
+
309
+ class ConfigurationOverrides(TeaModel):
310
+ def __init__(
311
+ self,
312
+ configurations: List[ConfigurationOverridesConfigurations] = None,
313
+ ):
314
+ self.configurations = configurations
315
+
316
+ def validate(self):
317
+ if self.configurations:
318
+ for k in self.configurations:
319
+ if k:
320
+ k.validate()
321
+
322
+ def to_map(self):
323
+ _map = super().to_map()
324
+ if _map is not None:
325
+ return _map
326
+
327
+ result = dict()
328
+ result['configurations'] = []
329
+ if self.configurations is not None:
330
+ for k in self.configurations:
331
+ result['configurations'].append(k.to_map() if k else None)
332
+ return result
333
+
334
+ def from_map(self, m: dict = None):
335
+ m = m or dict()
336
+ self.configurations = []
337
+ if m.get('configurations') is not None:
338
+ for k in m.get('configurations'):
339
+ temp_model = ConfigurationOverridesConfigurations()
340
+ self.configurations.append(temp_model.from_map(k))
341
+ return self
342
+
343
+
249
344
  class JobDriverSparkSubmit(TeaModel):
250
345
  def __init__(
251
346
  self,
@@ -389,11 +484,15 @@ class ReleaseVersionImage(TeaModel):
389
484
  class RunLog(TeaModel):
390
485
  def __init__(
391
486
  self,
487
+ driver_startup: str = None,
392
488
  driver_std_error: str = None,
393
489
  driver_std_out: str = None,
490
+ driver_syslog: str = None,
394
491
  ):
492
+ self.driver_startup = driver_startup
395
493
  self.driver_std_error = driver_std_error
396
494
  self.driver_std_out = driver_std_out
495
+ self.driver_syslog = driver_syslog
397
496
 
398
497
  def validate(self):
399
498
  pass
@@ -404,18 +503,26 @@ class RunLog(TeaModel):
404
503
  return _map
405
504
 
406
505
  result = dict()
506
+ if self.driver_startup is not None:
507
+ result['driverStartup'] = self.driver_startup
407
508
  if self.driver_std_error is not None:
408
509
  result['driverStdError'] = self.driver_std_error
409
510
  if self.driver_std_out is not None:
410
511
  result['driverStdOut'] = self.driver_std_out
512
+ if self.driver_syslog is not None:
513
+ result['driverSyslog'] = self.driver_syslog
411
514
  return result
412
515
 
413
516
  def from_map(self, m: dict = None):
414
517
  m = m or dict()
518
+ if m.get('driverStartup') is not None:
519
+ self.driver_startup = m.get('driverStartup')
415
520
  if m.get('driverStdError') is not None:
416
521
  self.driver_std_error = m.get('driverStdError')
417
522
  if m.get('driverStdOut') is not None:
418
523
  self.driver_std_out = m.get('driverStdOut')
524
+ if m.get('driverSyslog') is not None:
525
+ self.driver_syslog = m.get('driverSyslog')
419
526
  return self
420
527
 
421
528
 
@@ -425,7 +532,9 @@ class SparkConf(TeaModel):
425
532
  key: str = None,
426
533
  value: str = None,
427
534
  ):
535
+ # This parameter is required.
428
536
  self.key = key
537
+ # This parameter is required.
429
538
  self.value = value
430
539
 
431
540
  def validate(self):
@@ -635,6 +744,7 @@ class Tag(TeaModel):
635
744
  class Task(TeaModel):
636
745
  def __init__(
637
746
  self,
747
+ archives: List[str] = None,
638
748
  artifact_url: str = None,
639
749
  biz_id: str = None,
640
750
  category_biz_id: str = None,
@@ -645,10 +755,13 @@ class Task(TeaModel):
645
755
  default_resource_queue_id: str = None,
646
756
  default_sql_compute_id: str = None,
647
757
  extra_artifact_ids: List[str] = None,
758
+ extra_spark_submit_params: str = None,
759
+ files: List[str] = None,
648
760
  gmt_created: str = None,
649
761
  gmt_modified: str = None,
650
762
  has_changed: bool = None,
651
763
  has_commited: bool = None,
764
+ jars: List[str] = None,
652
765
  last_run_resource_queue_id: str = None,
653
766
  modifier: int = None,
654
767
  name: str = None,
@@ -666,35 +779,54 @@ class Task(TeaModel):
666
779
  tags: Dict[str, str] = None,
667
780
  type: str = None,
668
781
  ):
782
+ self.archives = archives
669
783
  self.artifact_url = artifact_url
784
+ # This parameter is required.
670
785
  self.biz_id = biz_id
671
786
  self.category_biz_id = category_biz_id
672
787
  self.content = content
788
+ # This parameter is required.
673
789
  self.creator = creator
674
790
  self.default_catalog_id = default_catalog_id
675
791
  self.default_database = default_database
676
792
  self.default_resource_queue_id = default_resource_queue_id
677
793
  self.default_sql_compute_id = default_sql_compute_id
678
794
  self.extra_artifact_ids = extra_artifact_ids
795
+ self.extra_spark_submit_params = extra_spark_submit_params
796
+ self.files = files
797
+ # This parameter is required.
679
798
  self.gmt_created = gmt_created
799
+ # This parameter is required.
680
800
  self.gmt_modified = gmt_modified
681
801
  self.has_changed = has_changed
802
+ # This parameter is required.
682
803
  self.has_commited = has_commited
804
+ self.jars = jars
683
805
  self.last_run_resource_queue_id = last_run_resource_queue_id
806
+ # This parameter is required.
684
807
  self.modifier = modifier
808
+ # This parameter is required.
685
809
  self.name = name
686
810
  self.py_files = py_files
687
811
  self.spark_args = spark_args
688
812
  self.spark_conf = spark_conf
813
+ # This parameter is required.
689
814
  self.spark_driver_cores = spark_driver_cores
815
+ # This parameter is required.
690
816
  self.spark_driver_memory = spark_driver_memory
691
817
  self.spark_entrypoint = spark_entrypoint
818
+ # This parameter is required.
692
819
  self.spark_executor_cores = spark_executor_cores
820
+ # This parameter is required.
693
821
  self.spark_executor_memory = spark_executor_memory
822
+ # This parameter is required.
694
823
  self.spark_log_level = spark_log_level
824
+ # This parameter is required.
695
825
  self.spark_log_path = spark_log_path
826
+ # This parameter is required.
696
827
  self.spark_version = spark_version
697
828
  self.tags = tags
829
+ # This parameter is required.
698
830
  self.type = type
699
831
 
700
832
  def validate(self):
@@ -709,6 +841,8 @@ class Task(TeaModel):
709
841
  return _map
710
842
 
711
843
  result = dict()
844
+ if self.archives is not None:
845
+ result['archives'] = self.archives
712
846
  if self.artifact_url is not None:
713
847
  result['artifactUrl'] = self.artifact_url
714
848
  if self.biz_id is not None:
@@ -729,6 +863,10 @@ class Task(TeaModel):
729
863
  result['defaultSqlComputeId'] = self.default_sql_compute_id
730
864
  if self.extra_artifact_ids is not None:
731
865
  result['extraArtifactIds'] = self.extra_artifact_ids
866
+ if self.extra_spark_submit_params is not None:
867
+ result['extraSparkSubmitParams'] = self.extra_spark_submit_params
868
+ if self.files is not None:
869
+ result['files'] = self.files
732
870
  if self.gmt_created is not None:
733
871
  result['gmtCreated'] = self.gmt_created
734
872
  if self.gmt_modified is not None:
@@ -737,6 +875,8 @@ class Task(TeaModel):
737
875
  result['hasChanged'] = self.has_changed
738
876
  if self.has_commited is not None:
739
877
  result['hasCommited'] = self.has_commited
878
+ if self.jars is not None:
879
+ result['jars'] = self.jars
740
880
  if self.last_run_resource_queue_id is not None:
741
881
  result['lastRunResourceQueueId'] = self.last_run_resource_queue_id
742
882
  if self.modifier is not None:
@@ -775,6 +915,8 @@ class Task(TeaModel):
775
915
 
776
916
  def from_map(self, m: dict = None):
777
917
  m = m or dict()
918
+ if m.get('archives') is not None:
919
+ self.archives = m.get('archives')
778
920
  if m.get('artifactUrl') is not None:
779
921
  self.artifact_url = m.get('artifactUrl')
780
922
  if m.get('bizId') is not None:
@@ -795,6 +937,10 @@ class Task(TeaModel):
795
937
  self.default_sql_compute_id = m.get('defaultSqlComputeId')
796
938
  if m.get('extraArtifactIds') is not None:
797
939
  self.extra_artifact_ids = m.get('extraArtifactIds')
940
+ if m.get('extraSparkSubmitParams') is not None:
941
+ self.extra_spark_submit_params = m.get('extraSparkSubmitParams')
942
+ if m.get('files') is not None:
943
+ self.files = m.get('files')
798
944
  if m.get('gmtCreated') is not None:
799
945
  self.gmt_created = m.get('gmtCreated')
800
946
  if m.get('gmtModified') is not None:
@@ -803,6 +949,8 @@ class Task(TeaModel):
803
949
  self.has_changed = m.get('hasChanged')
804
950
  if m.get('hasCommited') is not None:
805
951
  self.has_commited = m.get('hasCommited')
952
+ if m.get('jars') is not None:
953
+ self.jars = m.get('jars')
806
954
  if m.get('lastRunResourceQueueId') is not None:
807
955
  self.last_run_resource_queue_id = m.get('lastRunResourceQueueId')
808
956
  if m.get('modifier') is not None:
@@ -994,17 +1142,28 @@ class Template(TeaModel):
994
1142
  spark_version: str = None,
995
1143
  template_type: str = None,
996
1144
  ):
1145
+ # This parameter is required.
997
1146
  self.creator = creator
1147
+ # This parameter is required.
998
1148
  self.gmt_created = gmt_created
1149
+ # This parameter is required.
999
1150
  self.gmt_modified = gmt_modified
1151
+ # This parameter is required.
1000
1152
  self.modifier = modifier
1001
1153
  self.spark_conf = spark_conf
1154
+ # This parameter is required.
1002
1155
  self.spark_driver_cores = spark_driver_cores
1156
+ # This parameter is required.
1003
1157
  self.spark_driver_memory = spark_driver_memory
1158
+ # This parameter is required.
1004
1159
  self.spark_executor_cores = spark_executor_cores
1160
+ # This parameter is required.
1005
1161
  self.spark_executor_memory = spark_executor_memory
1162
+ # This parameter is required.
1006
1163
  self.spark_log_level = spark_log_level
1164
+ # This parameter is required.
1007
1165
  self.spark_log_path = spark_log_path
1166
+ # This parameter is required.
1008
1167
  self.spark_version = spark_version
1009
1168
  self.template_type = template_type
1010
1169
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: alibabacloud-emr-serverless-spark20230808
3
- Version: 1.0.0
3
+ Version: 1.0.1
4
4
  Summary: Alibaba Cloud emr-serverless-spark (20230808) SDK Library for Python
5
5
  Home-page: https://github.com/aliyun/alibabacloud-python-sdk
6
6
  Author: Alibaba Cloud SDK
@@ -1,4 +1,4 @@
1
- alibabacloud_tea_util<1.0.0,>=0.3.11
2
- alibabacloud_tea_openapi<1.0.0,>=0.3.8
1
+ alibabacloud_tea_util<1.0.0,>=0.3.12
2
+ alibabacloud_tea_openapi<1.0.0,>=0.3.9
3
3
  alibabacloud_openapi_util<1.0.0,>=0.2.1
4
4
  alibabacloud_endpoint_util<1.0.0,>=0.0.3
@@ -24,7 +24,7 @@ from setuptools import setup, find_packages
24
24
  """
25
25
  setup module for alibabacloud_emr-serverless-spark20230808.
26
26
 
27
- Created on 16/04/2024
27
+ Created on 17/05/2024
28
28
 
29
29
  @author: Alibaba Cloud SDK
30
30
  """
@@ -37,8 +37,8 @@ AUTHOR_EMAIL = "sdk-team@alibabacloud.com"
37
37
  URL = "https://github.com/aliyun/alibabacloud-python-sdk"
38
38
  VERSION = __import__(PACKAGE).__version__
39
39
  REQUIRES = [
40
- "alibabacloud_tea_util>=0.3.11, <1.0.0",
41
- "alibabacloud_tea_openapi>=0.3.8, <1.0.0",
40
+ "alibabacloud_tea_util>=0.3.12, <1.0.0",
41
+ "alibabacloud_tea_openapi>=0.3.9, <1.0.0",
42
42
  "alibabacloud_openapi_util>=0.2.1, <1.0.0",
43
43
  "alibabacloud_endpoint_util>=0.0.3, <1.0.0"
44
44
  ]