alibabacloud-fc20230330 4.2.7__py3-none-any.whl → 4.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- alibabacloud_fc20230330/__init__.py +1 -1
- alibabacloud_fc20230330/client.py +536 -0
- alibabacloud_fc20230330/models.py +1563 -54
- {alibabacloud_fc20230330-4.2.7.dist-info → alibabacloud_fc20230330-4.5.0.dist-info}/METADATA +2 -2
- alibabacloud_fc20230330-4.5.0.dist-info/RECORD +8 -0
- alibabacloud_fc20230330-4.2.7.dist-info/RECORD +0 -8
- {alibabacloud_fc20230330-4.2.7.dist-info → alibabacloud_fc20230330-4.5.0.dist-info}/LICENSE +0 -0
- {alibabacloud_fc20230330-4.2.7.dist-info → alibabacloud_fc20230330-4.5.0.dist-info}/WHEEL +0 -0
- {alibabacloud_fc20230330-4.2.7.dist-info → alibabacloud_fc20230330-4.5.0.dist-info}/top_level.txt +0 -0
@@ -646,6 +646,45 @@ class ConcurrencyConfig(TeaModel):
|
|
646
646
|
return self
|
647
647
|
|
648
648
|
|
649
|
+
class CookieSessionAffinityConfig(TeaModel):
|
650
|
+
def __init__(
|
651
|
+
self,
|
652
|
+
session_concurrency_per_instance: int = None,
|
653
|
+
session_idle_timeout_in_seconds: int = None,
|
654
|
+
session_ttlin_seconds: int = None,
|
655
|
+
):
|
656
|
+
self.session_concurrency_per_instance = session_concurrency_per_instance
|
657
|
+
self.session_idle_timeout_in_seconds = session_idle_timeout_in_seconds
|
658
|
+
self.session_ttlin_seconds = session_ttlin_seconds
|
659
|
+
|
660
|
+
def validate(self):
|
661
|
+
pass
|
662
|
+
|
663
|
+
def to_map(self):
|
664
|
+
_map = super().to_map()
|
665
|
+
if _map is not None:
|
666
|
+
return _map
|
667
|
+
|
668
|
+
result = dict()
|
669
|
+
if self.session_concurrency_per_instance is not None:
|
670
|
+
result['sessionConcurrencyPerInstance'] = self.session_concurrency_per_instance
|
671
|
+
if self.session_idle_timeout_in_seconds is not None:
|
672
|
+
result['sessionIdleTimeoutInSeconds'] = self.session_idle_timeout_in_seconds
|
673
|
+
if self.session_ttlin_seconds is not None:
|
674
|
+
result['sessionTTLInSeconds'] = self.session_ttlin_seconds
|
675
|
+
return result
|
676
|
+
|
677
|
+
def from_map(self, m: dict = None):
|
678
|
+
m = m or dict()
|
679
|
+
if m.get('sessionConcurrencyPerInstance') is not None:
|
680
|
+
self.session_concurrency_per_instance = m.get('sessionConcurrencyPerInstance')
|
681
|
+
if m.get('sessionIdleTimeoutInSeconds') is not None:
|
682
|
+
self.session_idle_timeout_in_seconds = m.get('sessionIdleTimeoutInSeconds')
|
683
|
+
if m.get('sessionTTLInSeconds') is not None:
|
684
|
+
self.session_ttlin_seconds = m.get('sessionTTLInSeconds')
|
685
|
+
return self
|
686
|
+
|
687
|
+
|
649
688
|
class CreateAliasInput(TeaModel):
|
650
689
|
def __init__(
|
651
690
|
self,
|
@@ -2030,6 +2069,7 @@ class CreateFunctionInput(TeaModel):
|
|
2030
2069
|
gpu_config: GPUConfig = None,
|
2031
2070
|
handler: str = None,
|
2032
2071
|
instance_concurrency: int = None,
|
2072
|
+
instance_isolation_mode: str = None,
|
2033
2073
|
instance_lifecycle_config: InstanceLifecycleConfig = None,
|
2034
2074
|
internet_access: bool = None,
|
2035
2075
|
layers: List[str] = None,
|
@@ -2041,6 +2081,7 @@ class CreateFunctionInput(TeaModel):
|
|
2041
2081
|
role: str = None,
|
2042
2082
|
runtime: str = None,
|
2043
2083
|
session_affinity: str = None,
|
2084
|
+
session_affinity_config: str = None,
|
2044
2085
|
tags: List[Tag] = None,
|
2045
2086
|
timeout: int = None,
|
2046
2087
|
tracing_config: TracingConfig = None,
|
@@ -2062,6 +2103,7 @@ class CreateFunctionInput(TeaModel):
|
|
2062
2103
|
# This parameter is required.
|
2063
2104
|
self.handler = handler
|
2064
2105
|
self.instance_concurrency = instance_concurrency
|
2106
|
+
self.instance_isolation_mode = instance_isolation_mode
|
2065
2107
|
self.instance_lifecycle_config = instance_lifecycle_config
|
2066
2108
|
self.internet_access = internet_access
|
2067
2109
|
self.layers = layers
|
@@ -2074,6 +2116,7 @@ class CreateFunctionInput(TeaModel):
|
|
2074
2116
|
# This parameter is required.
|
2075
2117
|
self.runtime = runtime
|
2076
2118
|
self.session_affinity = session_affinity
|
2119
|
+
self.session_affinity_config = session_affinity_config
|
2077
2120
|
self.tags = tags
|
2078
2121
|
self.timeout = timeout
|
2079
2122
|
self.tracing_config = tracing_config
|
@@ -2141,6 +2184,8 @@ class CreateFunctionInput(TeaModel):
|
|
2141
2184
|
result['handler'] = self.handler
|
2142
2185
|
if self.instance_concurrency is not None:
|
2143
2186
|
result['instanceConcurrency'] = self.instance_concurrency
|
2187
|
+
if self.instance_isolation_mode is not None:
|
2188
|
+
result['instanceIsolationMode'] = self.instance_isolation_mode
|
2144
2189
|
if self.instance_lifecycle_config is not None:
|
2145
2190
|
result['instanceLifecycleConfig'] = self.instance_lifecycle_config.to_map()
|
2146
2191
|
if self.internet_access is not None:
|
@@ -2163,6 +2208,8 @@ class CreateFunctionInput(TeaModel):
|
|
2163
2208
|
result['runtime'] = self.runtime
|
2164
2209
|
if self.session_affinity is not None:
|
2165
2210
|
result['sessionAffinity'] = self.session_affinity
|
2211
|
+
if self.session_affinity_config is not None:
|
2212
|
+
result['sessionAffinityConfig'] = self.session_affinity_config
|
2166
2213
|
result['tags'] = []
|
2167
2214
|
if self.tags is not None:
|
2168
2215
|
for k in self.tags:
|
@@ -2210,6 +2257,8 @@ class CreateFunctionInput(TeaModel):
|
|
2210
2257
|
self.handler = m.get('handler')
|
2211
2258
|
if m.get('instanceConcurrency') is not None:
|
2212
2259
|
self.instance_concurrency = m.get('instanceConcurrency')
|
2260
|
+
if m.get('instanceIsolationMode') is not None:
|
2261
|
+
self.instance_isolation_mode = m.get('instanceIsolationMode')
|
2213
2262
|
if m.get('instanceLifecycleConfig') is not None:
|
2214
2263
|
temp_model = InstanceLifecycleConfig()
|
2215
2264
|
self.instance_lifecycle_config = temp_model.from_map(m['instanceLifecycleConfig'])
|
@@ -2236,6 +2285,8 @@ class CreateFunctionInput(TeaModel):
|
|
2236
2285
|
self.runtime = m.get('runtime')
|
2237
2286
|
if m.get('sessionAffinity') is not None:
|
2238
2287
|
self.session_affinity = m.get('sessionAffinity')
|
2288
|
+
if m.get('sessionAffinityConfig') is not None:
|
2289
|
+
self.session_affinity_config = m.get('sessionAffinityConfig')
|
2239
2290
|
self.tags = []
|
2240
2291
|
if m.get('tags') is not None:
|
2241
2292
|
for k in m.get('tags'):
|
@@ -2657,6 +2708,218 @@ class DescribeRegionsOutput(TeaModel):
|
|
2657
2708
|
return self
|
2658
2709
|
|
2659
2710
|
|
2711
|
+
class ScalingPolicy(TeaModel):
|
2712
|
+
def __init__(
|
2713
|
+
self,
|
2714
|
+
end_time: str = None,
|
2715
|
+
max_instances: int = None,
|
2716
|
+
metric_target: float = None,
|
2717
|
+
metric_type: str = None,
|
2718
|
+
min_instances: int = None,
|
2719
|
+
name: str = None,
|
2720
|
+
start_time: str = None,
|
2721
|
+
time_zone: str = None,
|
2722
|
+
):
|
2723
|
+
self.end_time = end_time
|
2724
|
+
self.max_instances = max_instances
|
2725
|
+
self.metric_target = metric_target
|
2726
|
+
self.metric_type = metric_type
|
2727
|
+
self.min_instances = min_instances
|
2728
|
+
self.name = name
|
2729
|
+
self.start_time = start_time
|
2730
|
+
self.time_zone = time_zone
|
2731
|
+
|
2732
|
+
def validate(self):
|
2733
|
+
pass
|
2734
|
+
|
2735
|
+
def to_map(self):
|
2736
|
+
_map = super().to_map()
|
2737
|
+
if _map is not None:
|
2738
|
+
return _map
|
2739
|
+
|
2740
|
+
result = dict()
|
2741
|
+
if self.end_time is not None:
|
2742
|
+
result['endTime'] = self.end_time
|
2743
|
+
if self.max_instances is not None:
|
2744
|
+
result['maxInstances'] = self.max_instances
|
2745
|
+
if self.metric_target is not None:
|
2746
|
+
result['metricTarget'] = self.metric_target
|
2747
|
+
if self.metric_type is not None:
|
2748
|
+
result['metricType'] = self.metric_type
|
2749
|
+
if self.min_instances is not None:
|
2750
|
+
result['minInstances'] = self.min_instances
|
2751
|
+
if self.name is not None:
|
2752
|
+
result['name'] = self.name
|
2753
|
+
if self.start_time is not None:
|
2754
|
+
result['startTime'] = self.start_time
|
2755
|
+
if self.time_zone is not None:
|
2756
|
+
result['timeZone'] = self.time_zone
|
2757
|
+
return result
|
2758
|
+
|
2759
|
+
def from_map(self, m: dict = None):
|
2760
|
+
m = m or dict()
|
2761
|
+
if m.get('endTime') is not None:
|
2762
|
+
self.end_time = m.get('endTime')
|
2763
|
+
if m.get('maxInstances') is not None:
|
2764
|
+
self.max_instances = m.get('maxInstances')
|
2765
|
+
if m.get('metricTarget') is not None:
|
2766
|
+
self.metric_target = m.get('metricTarget')
|
2767
|
+
if m.get('metricType') is not None:
|
2768
|
+
self.metric_type = m.get('metricType')
|
2769
|
+
if m.get('minInstances') is not None:
|
2770
|
+
self.min_instances = m.get('minInstances')
|
2771
|
+
if m.get('name') is not None:
|
2772
|
+
self.name = m.get('name')
|
2773
|
+
if m.get('startTime') is not None:
|
2774
|
+
self.start_time = m.get('startTime')
|
2775
|
+
if m.get('timeZone') is not None:
|
2776
|
+
self.time_zone = m.get('timeZone')
|
2777
|
+
return self
|
2778
|
+
|
2779
|
+
|
2780
|
+
class ScheduledPolicy(TeaModel):
|
2781
|
+
def __init__(
|
2782
|
+
self,
|
2783
|
+
end_time: str = None,
|
2784
|
+
name: str = None,
|
2785
|
+
schedule_expression: str = None,
|
2786
|
+
start_time: str = None,
|
2787
|
+
target: int = None,
|
2788
|
+
time_zone: str = None,
|
2789
|
+
):
|
2790
|
+
self.end_time = end_time
|
2791
|
+
self.name = name
|
2792
|
+
self.schedule_expression = schedule_expression
|
2793
|
+
self.start_time = start_time
|
2794
|
+
self.target = target
|
2795
|
+
self.time_zone = time_zone
|
2796
|
+
|
2797
|
+
def validate(self):
|
2798
|
+
pass
|
2799
|
+
|
2800
|
+
def to_map(self):
|
2801
|
+
_map = super().to_map()
|
2802
|
+
if _map is not None:
|
2803
|
+
return _map
|
2804
|
+
|
2805
|
+
result = dict()
|
2806
|
+
if self.end_time is not None:
|
2807
|
+
result['endTime'] = self.end_time
|
2808
|
+
if self.name is not None:
|
2809
|
+
result['name'] = self.name
|
2810
|
+
if self.schedule_expression is not None:
|
2811
|
+
result['scheduleExpression'] = self.schedule_expression
|
2812
|
+
if self.start_time is not None:
|
2813
|
+
result['startTime'] = self.start_time
|
2814
|
+
if self.target is not None:
|
2815
|
+
result['target'] = self.target
|
2816
|
+
if self.time_zone is not None:
|
2817
|
+
result['timeZone'] = self.time_zone
|
2818
|
+
return result
|
2819
|
+
|
2820
|
+
def from_map(self, m: dict = None):
|
2821
|
+
m = m or dict()
|
2822
|
+
if m.get('endTime') is not None:
|
2823
|
+
self.end_time = m.get('endTime')
|
2824
|
+
if m.get('name') is not None:
|
2825
|
+
self.name = m.get('name')
|
2826
|
+
if m.get('scheduleExpression') is not None:
|
2827
|
+
self.schedule_expression = m.get('scheduleExpression')
|
2828
|
+
if m.get('startTime') is not None:
|
2829
|
+
self.start_time = m.get('startTime')
|
2830
|
+
if m.get('target') is not None:
|
2831
|
+
self.target = m.get('target')
|
2832
|
+
if m.get('timeZone') is not None:
|
2833
|
+
self.time_zone = m.get('timeZone')
|
2834
|
+
return self
|
2835
|
+
|
2836
|
+
|
2837
|
+
class ElasticConfigStatus(TeaModel):
|
2838
|
+
def __init__(
|
2839
|
+
self,
|
2840
|
+
current_error: str = None,
|
2841
|
+
current_instances: int = None,
|
2842
|
+
function_arn: str = None,
|
2843
|
+
min_instances: int = None,
|
2844
|
+
resident_pool_id: str = None,
|
2845
|
+
scaling_policies: List[ScalingPolicy] = None,
|
2846
|
+
scheduled_policies: List[ScheduledPolicy] = None,
|
2847
|
+
target_instances: int = None,
|
2848
|
+
):
|
2849
|
+
self.current_error = current_error
|
2850
|
+
self.current_instances = current_instances
|
2851
|
+
self.function_arn = function_arn
|
2852
|
+
self.min_instances = min_instances
|
2853
|
+
self.resident_pool_id = resident_pool_id
|
2854
|
+
self.scaling_policies = scaling_policies
|
2855
|
+
self.scheduled_policies = scheduled_policies
|
2856
|
+
self.target_instances = target_instances
|
2857
|
+
|
2858
|
+
def validate(self):
|
2859
|
+
if self.scaling_policies:
|
2860
|
+
for k in self.scaling_policies:
|
2861
|
+
if k:
|
2862
|
+
k.validate()
|
2863
|
+
if self.scheduled_policies:
|
2864
|
+
for k in self.scheduled_policies:
|
2865
|
+
if k:
|
2866
|
+
k.validate()
|
2867
|
+
|
2868
|
+
def to_map(self):
|
2869
|
+
_map = super().to_map()
|
2870
|
+
if _map is not None:
|
2871
|
+
return _map
|
2872
|
+
|
2873
|
+
result = dict()
|
2874
|
+
if self.current_error is not None:
|
2875
|
+
result['currentError'] = self.current_error
|
2876
|
+
if self.current_instances is not None:
|
2877
|
+
result['currentInstances'] = self.current_instances
|
2878
|
+
if self.function_arn is not None:
|
2879
|
+
result['functionArn'] = self.function_arn
|
2880
|
+
if self.min_instances is not None:
|
2881
|
+
result['minInstances'] = self.min_instances
|
2882
|
+
if self.resident_pool_id is not None:
|
2883
|
+
result['residentPoolId'] = self.resident_pool_id
|
2884
|
+
result['scalingPolicies'] = []
|
2885
|
+
if self.scaling_policies is not None:
|
2886
|
+
for k in self.scaling_policies:
|
2887
|
+
result['scalingPolicies'].append(k.to_map() if k else None)
|
2888
|
+
result['scheduledPolicies'] = []
|
2889
|
+
if self.scheduled_policies is not None:
|
2890
|
+
for k in self.scheduled_policies:
|
2891
|
+
result['scheduledPolicies'].append(k.to_map() if k else None)
|
2892
|
+
if self.target_instances is not None:
|
2893
|
+
result['targetInstances'] = self.target_instances
|
2894
|
+
return result
|
2895
|
+
|
2896
|
+
def from_map(self, m: dict = None):
|
2897
|
+
m = m or dict()
|
2898
|
+
if m.get('currentError') is not None:
|
2899
|
+
self.current_error = m.get('currentError')
|
2900
|
+
if m.get('currentInstances') is not None:
|
2901
|
+
self.current_instances = m.get('currentInstances')
|
2902
|
+
if m.get('functionArn') is not None:
|
2903
|
+
self.function_arn = m.get('functionArn')
|
2904
|
+
if m.get('minInstances') is not None:
|
2905
|
+
self.min_instances = m.get('minInstances')
|
2906
|
+
if m.get('residentPoolId') is not None:
|
2907
|
+
self.resident_pool_id = m.get('residentPoolId')
|
2908
|
+
self.scaling_policies = []
|
2909
|
+
if m.get('scalingPolicies') is not None:
|
2910
|
+
for k in m.get('scalingPolicies'):
|
2911
|
+
temp_model = ScalingPolicy()
|
2912
|
+
self.scaling_policies.append(temp_model.from_map(k))
|
2913
|
+
self.scheduled_policies = []
|
2914
|
+
if m.get('scheduledPolicies') is not None:
|
2915
|
+
for k in m.get('scheduledPolicies'):
|
2916
|
+
temp_model = ScheduledPolicy()
|
2917
|
+
self.scheduled_policies.append(temp_model.from_map(k))
|
2918
|
+
if m.get('targetInstances') is not None:
|
2919
|
+
self.target_instances = m.get('targetInstances')
|
2920
|
+
return self
|
2921
|
+
|
2922
|
+
|
2660
2923
|
class Error(TeaModel):
|
2661
2924
|
def __init__(
|
2662
2925
|
self,
|
@@ -3530,6 +3793,7 @@ class Function(TeaModel):
|
|
3530
3793
|
gpu_config: GPUConfig = None,
|
3531
3794
|
handler: str = None,
|
3532
3795
|
instance_concurrency: int = None,
|
3796
|
+
instance_isolation_mode: str = None,
|
3533
3797
|
instance_lifecycle_config: InstanceLifecycleConfig = None,
|
3534
3798
|
internet_access: bool = None,
|
3535
3799
|
invocation_restriction: FunctionRestriction = None,
|
@@ -3546,6 +3810,7 @@ class Function(TeaModel):
|
|
3546
3810
|
role: str = None,
|
3547
3811
|
runtime: str = None,
|
3548
3812
|
session_affinity: str = None,
|
3813
|
+
session_affinity_config: str = None,
|
3549
3814
|
state: str = None,
|
3550
3815
|
state_reason: str = None,
|
3551
3816
|
state_reason_code: str = None,
|
@@ -3572,6 +3837,7 @@ class Function(TeaModel):
|
|
3572
3837
|
self.gpu_config = gpu_config
|
3573
3838
|
self.handler = handler
|
3574
3839
|
self.instance_concurrency = instance_concurrency
|
3840
|
+
self.instance_isolation_mode = instance_isolation_mode
|
3575
3841
|
self.instance_lifecycle_config = instance_lifecycle_config
|
3576
3842
|
self.internet_access = internet_access
|
3577
3843
|
self.invocation_restriction = invocation_restriction
|
@@ -3588,6 +3854,7 @@ class Function(TeaModel):
|
|
3588
3854
|
self.role = role
|
3589
3855
|
self.runtime = runtime
|
3590
3856
|
self.session_affinity = session_affinity
|
3857
|
+
self.session_affinity_config = session_affinity_config
|
3591
3858
|
self.state = state
|
3592
3859
|
self.state_reason = state_reason
|
3593
3860
|
self.state_reason_code = state_reason_code
|
@@ -3670,6 +3937,8 @@ class Function(TeaModel):
|
|
3670
3937
|
result['handler'] = self.handler
|
3671
3938
|
if self.instance_concurrency is not None:
|
3672
3939
|
result['instanceConcurrency'] = self.instance_concurrency
|
3940
|
+
if self.instance_isolation_mode is not None:
|
3941
|
+
result['instanceIsolationMode'] = self.instance_isolation_mode
|
3673
3942
|
if self.instance_lifecycle_config is not None:
|
3674
3943
|
result['instanceLifecycleConfig'] = self.instance_lifecycle_config.to_map()
|
3675
3944
|
if self.internet_access is not None:
|
@@ -3704,6 +3973,8 @@ class Function(TeaModel):
|
|
3704
3973
|
result['runtime'] = self.runtime
|
3705
3974
|
if self.session_affinity is not None:
|
3706
3975
|
result['sessionAffinity'] = self.session_affinity
|
3976
|
+
if self.session_affinity_config is not None:
|
3977
|
+
result['sessionAffinityConfig'] = self.session_affinity_config
|
3707
3978
|
if self.state is not None:
|
3708
3979
|
result['state'] = self.state
|
3709
3980
|
if self.state_reason is not None:
|
@@ -3764,6 +4035,8 @@ class Function(TeaModel):
|
|
3764
4035
|
self.handler = m.get('handler')
|
3765
4036
|
if m.get('instanceConcurrency') is not None:
|
3766
4037
|
self.instance_concurrency = m.get('instanceConcurrency')
|
4038
|
+
if m.get('instanceIsolationMode') is not None:
|
4039
|
+
self.instance_isolation_mode = m.get('instanceIsolationMode')
|
3767
4040
|
if m.get('instanceLifecycleConfig') is not None:
|
3768
4041
|
temp_model = InstanceLifecycleConfig()
|
3769
4042
|
self.instance_lifecycle_config = temp_model.from_map(m['instanceLifecycleConfig'])
|
@@ -3804,6 +4077,8 @@ class Function(TeaModel):
|
|
3804
4077
|
self.runtime = m.get('runtime')
|
3805
4078
|
if m.get('sessionAffinity') is not None:
|
3806
4079
|
self.session_affinity = m.get('sessionAffinity')
|
4080
|
+
if m.get('sessionAffinityConfig') is not None:
|
4081
|
+
self.session_affinity_config = m.get('sessionAffinityConfig')
|
3807
4082
|
if m.get('state') is not None:
|
3808
4083
|
self.state = m.get('state')
|
3809
4084
|
if m.get('stateReason') is not None:
|
@@ -3965,6 +4240,26 @@ class GetResourceTagsOutput(TeaModel):
|
|
3965
4240
|
return self
|
3966
4241
|
|
3967
4242
|
|
4243
|
+
class GetScalingConfigStatusOutput(TeaModel):
|
4244
|
+
def __init__(self):
|
4245
|
+
pass
|
4246
|
+
|
4247
|
+
def validate(self):
|
4248
|
+
pass
|
4249
|
+
|
4250
|
+
def to_map(self):
|
4251
|
+
_map = super().to_map()
|
4252
|
+
if _map is not None:
|
4253
|
+
return _map
|
4254
|
+
|
4255
|
+
result = dict()
|
4256
|
+
return result
|
4257
|
+
|
4258
|
+
def from_map(self, m: dict = None):
|
4259
|
+
m = m or dict()
|
4260
|
+
return self
|
4261
|
+
|
4262
|
+
|
3968
4263
|
class HTTPTrigger(TeaModel):
|
3969
4264
|
def __init__(
|
3970
4265
|
self,
|
@@ -4043,6 +4338,51 @@ class HTTPTriggerConfig(TeaModel):
|
|
4043
4338
|
return self
|
4044
4339
|
|
4045
4340
|
|
4341
|
+
class HeaderFieldSessionAffinityConfig(TeaModel):
|
4342
|
+
def __init__(
|
4343
|
+
self,
|
4344
|
+
affinity_header_field_name: str = None,
|
4345
|
+
session_concurrency_per_instance: int = None,
|
4346
|
+
session_idle_timeout_in_seconds: int = None,
|
4347
|
+
session_ttlin_seconds: int = None,
|
4348
|
+
):
|
4349
|
+
self.affinity_header_field_name = affinity_header_field_name
|
4350
|
+
self.session_concurrency_per_instance = session_concurrency_per_instance
|
4351
|
+
self.session_idle_timeout_in_seconds = session_idle_timeout_in_seconds
|
4352
|
+
self.session_ttlin_seconds = session_ttlin_seconds
|
4353
|
+
|
4354
|
+
def validate(self):
|
4355
|
+
pass
|
4356
|
+
|
4357
|
+
def to_map(self):
|
4358
|
+
_map = super().to_map()
|
4359
|
+
if _map is not None:
|
4360
|
+
return _map
|
4361
|
+
|
4362
|
+
result = dict()
|
4363
|
+
if self.affinity_header_field_name is not None:
|
4364
|
+
result['affinityHeaderFieldName'] = self.affinity_header_field_name
|
4365
|
+
if self.session_concurrency_per_instance is not None:
|
4366
|
+
result['sessionConcurrencyPerInstance'] = self.session_concurrency_per_instance
|
4367
|
+
if self.session_idle_timeout_in_seconds is not None:
|
4368
|
+
result['sessionIdleTimeoutInSeconds'] = self.session_idle_timeout_in_seconds
|
4369
|
+
if self.session_ttlin_seconds is not None:
|
4370
|
+
result['sessionTTLInSeconds'] = self.session_ttlin_seconds
|
4371
|
+
return result
|
4372
|
+
|
4373
|
+
def from_map(self, m: dict = None):
|
4374
|
+
m = m or dict()
|
4375
|
+
if m.get('affinityHeaderFieldName') is not None:
|
4376
|
+
self.affinity_header_field_name = m.get('affinityHeaderFieldName')
|
4377
|
+
if m.get('sessionConcurrencyPerInstance') is not None:
|
4378
|
+
self.session_concurrency_per_instance = m.get('sessionConcurrencyPerInstance')
|
4379
|
+
if m.get('sessionIdleTimeoutInSeconds') is not None:
|
4380
|
+
self.session_idle_timeout_in_seconds = m.get('sessionIdleTimeoutInSeconds')
|
4381
|
+
if m.get('sessionTTLInSeconds') is not None:
|
4382
|
+
self.session_ttlin_seconds = m.get('sessionTTLInSeconds')
|
4383
|
+
return self
|
4384
|
+
|
4385
|
+
|
4046
4386
|
class InstanceInfo(TeaModel):
|
4047
4387
|
def __init__(
|
4048
4388
|
self,
|
@@ -4461,6 +4801,47 @@ class ListCustomDomainOutput(TeaModel):
|
|
4461
4801
|
return self
|
4462
4802
|
|
4463
4803
|
|
4804
|
+
class ListElasticConfigsOutput(TeaModel):
|
4805
|
+
def __init__(
|
4806
|
+
self,
|
4807
|
+
elastic_configs: List[ElasticConfigStatus] = None,
|
4808
|
+
next_token: str = None,
|
4809
|
+
):
|
4810
|
+
self.elastic_configs = elastic_configs
|
4811
|
+
self.next_token = next_token
|
4812
|
+
|
4813
|
+
def validate(self):
|
4814
|
+
if self.elastic_configs:
|
4815
|
+
for k in self.elastic_configs:
|
4816
|
+
if k:
|
4817
|
+
k.validate()
|
4818
|
+
|
4819
|
+
def to_map(self):
|
4820
|
+
_map = super().to_map()
|
4821
|
+
if _map is not None:
|
4822
|
+
return _map
|
4823
|
+
|
4824
|
+
result = dict()
|
4825
|
+
result['elasticConfigs'] = []
|
4826
|
+
if self.elastic_configs is not None:
|
4827
|
+
for k in self.elastic_configs:
|
4828
|
+
result['elasticConfigs'].append(k.to_map() if k else None)
|
4829
|
+
if self.next_token is not None:
|
4830
|
+
result['nextToken'] = self.next_token
|
4831
|
+
return result
|
4832
|
+
|
4833
|
+
def from_map(self, m: dict = None):
|
4834
|
+
m = m or dict()
|
4835
|
+
self.elastic_configs = []
|
4836
|
+
if m.get('elasticConfigs') is not None:
|
4837
|
+
for k in m.get('elasticConfigs'):
|
4838
|
+
temp_model = ElasticConfigStatus()
|
4839
|
+
self.elastic_configs.append(temp_model.from_map(k))
|
4840
|
+
if m.get('nextToken') is not None:
|
4841
|
+
self.next_token = m.get('nextToken')
|
4842
|
+
return self
|
4843
|
+
|
4844
|
+
|
4464
4845
|
class ListFunctionsOutput(TeaModel):
|
4465
4846
|
def __init__(
|
4466
4847
|
self,
|
@@ -4892,18 +5273,31 @@ class ListProvisionConfigsOutput(TeaModel):
|
|
4892
5273
|
return self
|
4893
5274
|
|
4894
5275
|
|
4895
|
-
class
|
5276
|
+
class ResidentResourceAllocation(TeaModel):
|
4896
5277
|
def __init__(
|
4897
5278
|
self,
|
4898
|
-
|
4899
|
-
|
4900
|
-
|
4901
|
-
|
5279
|
+
function_name: str = None,
|
5280
|
+
instance_count: int = None,
|
5281
|
+
qualifier: str = None,
|
5282
|
+
total_cpu_cores: float = None,
|
5283
|
+
total_disk_size: float = None,
|
5284
|
+
total_gpu_memory_size: float = None,
|
5285
|
+
total_memory_size: float = None,
|
4902
5286
|
):
|
4903
|
-
|
4904
|
-
self.
|
4905
|
-
|
4906
|
-
self.
|
5287
|
+
# 使用该资源池的函数名
|
5288
|
+
self.function_name = function_name
|
5289
|
+
# 实例数
|
5290
|
+
self.instance_count = instance_count
|
5291
|
+
# 函数的别名
|
5292
|
+
self.qualifier = qualifier
|
5293
|
+
# CPU 占用总核数
|
5294
|
+
self.total_cpu_cores = total_cpu_cores
|
5295
|
+
# 占用磁盘大小,单位 GB
|
5296
|
+
self.total_disk_size = total_disk_size
|
5297
|
+
# 占用显存大小,单位 GB
|
5298
|
+
self.total_gpu_memory_size = total_gpu_memory_size
|
5299
|
+
# 内存占用大小,单位 GB
|
5300
|
+
self.total_memory_size = total_memory_size
|
4907
5301
|
|
4908
5302
|
def validate(self):
|
4909
5303
|
pass
|
@@ -4914,17 +5308,451 @@ class TagResource(TeaModel):
|
|
4914
5308
|
return _map
|
4915
5309
|
|
4916
5310
|
result = dict()
|
4917
|
-
if self.
|
4918
|
-
result['
|
4919
|
-
if self.
|
4920
|
-
result['
|
4921
|
-
if self.
|
4922
|
-
result['
|
4923
|
-
if self.
|
4924
|
-
result['
|
4925
|
-
|
4926
|
-
|
4927
|
-
|
5311
|
+
if self.function_name is not None:
|
5312
|
+
result['functionName'] = self.function_name
|
5313
|
+
if self.instance_count is not None:
|
5314
|
+
result['instanceCount'] = self.instance_count
|
5315
|
+
if self.qualifier is not None:
|
5316
|
+
result['qualifier'] = self.qualifier
|
5317
|
+
if self.total_cpu_cores is not None:
|
5318
|
+
result['totalCpuCores'] = self.total_cpu_cores
|
5319
|
+
if self.total_disk_size is not None:
|
5320
|
+
result['totalDiskSize'] = self.total_disk_size
|
5321
|
+
if self.total_gpu_memory_size is not None:
|
5322
|
+
result['totalGpuMemorySize'] = self.total_gpu_memory_size
|
5323
|
+
if self.total_memory_size is not None:
|
5324
|
+
result['totalMemorySize'] = self.total_memory_size
|
5325
|
+
return result
|
5326
|
+
|
5327
|
+
def from_map(self, m: dict = None):
|
5328
|
+
m = m or dict()
|
5329
|
+
if m.get('functionName') is not None:
|
5330
|
+
self.function_name = m.get('functionName')
|
5331
|
+
if m.get('instanceCount') is not None:
|
5332
|
+
self.instance_count = m.get('instanceCount')
|
5333
|
+
if m.get('qualifier') is not None:
|
5334
|
+
self.qualifier = m.get('qualifier')
|
5335
|
+
if m.get('totalCpuCores') is not None:
|
5336
|
+
self.total_cpu_cores = m.get('totalCpuCores')
|
5337
|
+
if m.get('totalDiskSize') is not None:
|
5338
|
+
self.total_disk_size = m.get('totalDiskSize')
|
5339
|
+
if m.get('totalGpuMemorySize') is not None:
|
5340
|
+
self.total_gpu_memory_size = m.get('totalGpuMemorySize')
|
5341
|
+
if m.get('totalMemorySize') is not None:
|
5342
|
+
self.total_memory_size = m.get('totalMemorySize')
|
5343
|
+
return self
|
5344
|
+
|
5345
|
+
|
5346
|
+
class ResidentResourceAllocationStatus(TeaModel):
|
5347
|
+
def __init__(
|
5348
|
+
self,
|
5349
|
+
last_allocated_time: str = None,
|
5350
|
+
last_allocation: List[ResidentResourceAllocation] = None,
|
5351
|
+
):
|
5352
|
+
self.last_allocated_time = last_allocated_time
|
5353
|
+
self.last_allocation = last_allocation
|
5354
|
+
|
5355
|
+
def validate(self):
|
5356
|
+
if self.last_allocation:
|
5357
|
+
for k in self.last_allocation:
|
5358
|
+
if k:
|
5359
|
+
k.validate()
|
5360
|
+
|
5361
|
+
def to_map(self):
|
5362
|
+
_map = super().to_map()
|
5363
|
+
if _map is not None:
|
5364
|
+
return _map
|
5365
|
+
|
5366
|
+
result = dict()
|
5367
|
+
if self.last_allocated_time is not None:
|
5368
|
+
result['lastAllocatedTime'] = self.last_allocated_time
|
5369
|
+
result['lastAllocation'] = []
|
5370
|
+
if self.last_allocation is not None:
|
5371
|
+
for k in self.last_allocation:
|
5372
|
+
result['lastAllocation'].append(k.to_map() if k else None)
|
5373
|
+
return result
|
5374
|
+
|
5375
|
+
def from_map(self, m: dict = None):
|
5376
|
+
m = m or dict()
|
5377
|
+
if m.get('lastAllocatedTime') is not None:
|
5378
|
+
self.last_allocated_time = m.get('lastAllocatedTime')
|
5379
|
+
self.last_allocation = []
|
5380
|
+
if m.get('lastAllocation') is not None:
|
5381
|
+
for k in m.get('lastAllocation'):
|
5382
|
+
temp_model = ResidentResourceAllocation()
|
5383
|
+
self.last_allocation.append(temp_model.from_map(k))
|
5384
|
+
return self
|
5385
|
+
|
5386
|
+
|
5387
|
+
class ResidentResourceCapacity(TeaModel):
|
5388
|
+
def __init__(
|
5389
|
+
self,
|
5390
|
+
gpu_type: str = None,
|
5391
|
+
total_cpu_cores: int = None,
|
5392
|
+
total_disk_size: int = None,
|
5393
|
+
total_gpu_cards: int = None,
|
5394
|
+
total_gpu_memory_size: int = None,
|
5395
|
+
total_memory_size: int = None,
|
5396
|
+
):
|
5397
|
+
# GPU 卡型
|
5398
|
+
self.gpu_type = gpu_type
|
5399
|
+
# CPU 总核数
|
5400
|
+
self.total_cpu_cores = total_cpu_cores
|
5401
|
+
# 总磁盘大小,单位 GB
|
5402
|
+
self.total_disk_size = total_disk_size
|
5403
|
+
# GPU总卡数
|
5404
|
+
self.total_gpu_cards = total_gpu_cards
|
5405
|
+
# 总显存大小,单位 GB
|
5406
|
+
self.total_gpu_memory_size = total_gpu_memory_size
|
5407
|
+
# 总内存大小,单位 GB
|
5408
|
+
self.total_memory_size = total_memory_size
|
5409
|
+
|
5410
|
+
def validate(self):
|
5411
|
+
pass
|
5412
|
+
|
5413
|
+
def to_map(self):
|
5414
|
+
_map = super().to_map()
|
5415
|
+
if _map is not None:
|
5416
|
+
return _map
|
5417
|
+
|
5418
|
+
result = dict()
|
5419
|
+
if self.gpu_type is not None:
|
5420
|
+
result['gpuType'] = self.gpu_type
|
5421
|
+
if self.total_cpu_cores is not None:
|
5422
|
+
result['totalCpuCores'] = self.total_cpu_cores
|
5423
|
+
if self.total_disk_size is not None:
|
5424
|
+
result['totalDiskSize'] = self.total_disk_size
|
5425
|
+
if self.total_gpu_cards is not None:
|
5426
|
+
result['totalGpuCards'] = self.total_gpu_cards
|
5427
|
+
if self.total_gpu_memory_size is not None:
|
5428
|
+
result['totalGpuMemorySize'] = self.total_gpu_memory_size
|
5429
|
+
if self.total_memory_size is not None:
|
5430
|
+
result['totalMemorySize'] = self.total_memory_size
|
5431
|
+
return result
|
5432
|
+
|
5433
|
+
def from_map(self, m: dict = None):
|
5434
|
+
m = m or dict()
|
5435
|
+
if m.get('gpuType') is not None:
|
5436
|
+
self.gpu_type = m.get('gpuType')
|
5437
|
+
if m.get('totalCpuCores') is not None:
|
5438
|
+
self.total_cpu_cores = m.get('totalCpuCores')
|
5439
|
+
if m.get('totalDiskSize') is not None:
|
5440
|
+
self.total_disk_size = m.get('totalDiskSize')
|
5441
|
+
if m.get('totalGpuCards') is not None:
|
5442
|
+
self.total_gpu_cards = m.get('totalGpuCards')
|
5443
|
+
if m.get('totalGpuMemorySize') is not None:
|
5444
|
+
self.total_gpu_memory_size = m.get('totalGpuMemorySize')
|
5445
|
+
if m.get('totalMemorySize') is not None:
|
5446
|
+
self.total_memory_size = m.get('totalMemorySize')
|
5447
|
+
return self
|
5448
|
+
|
5449
|
+
|
5450
|
+
class ResidentResourcePool(TeaModel):
|
5451
|
+
def __init__(
|
5452
|
+
self,
|
5453
|
+
allocation_status: ResidentResourceAllocationStatus = None,
|
5454
|
+
created_time: str = None,
|
5455
|
+
expire_time: str = None,
|
5456
|
+
last_modified_time: str = None,
|
5457
|
+
resident_resource_pool_id: str = None,
|
5458
|
+
resident_resource_pool_name: str = None,
|
5459
|
+
resource_pool_capacity: ResidentResourceCapacity = None,
|
5460
|
+
resource_pool_config: ResidentResourceCapacity = None,
|
5461
|
+
):
|
5462
|
+
# 资源池实时分配情况,包含每个函数的具体分配情况
|
5463
|
+
self.allocation_status = allocation_status
|
5464
|
+
# 代表创建时间的资源属性字段
|
5465
|
+
#
|
5466
|
+
# Use the UTC time format: yyyy-MM-ddTHH:mmZ
|
5467
|
+
self.created_time = created_time
|
5468
|
+
# 资源池过期时间
|
5469
|
+
self.expire_time = expire_time
|
5470
|
+
# 上次修改时间,包含扩容、续费、更名等操作
|
5471
|
+
self.last_modified_time = last_modified_time
|
5472
|
+
self.resident_resource_pool_id = resident_resource_pool_id
|
5473
|
+
# 代表资源名称的资源属性字段
|
5474
|
+
self.resident_resource_pool_name = resident_resource_pool_name
|
5475
|
+
# 资源池总体规格
|
5476
|
+
self.resource_pool_capacity = resource_pool_capacity
|
5477
|
+
self.resource_pool_config = resource_pool_config
|
5478
|
+
|
5479
|
+
def validate(self):
|
5480
|
+
if self.allocation_status:
|
5481
|
+
self.allocation_status.validate()
|
5482
|
+
if self.resource_pool_capacity:
|
5483
|
+
self.resource_pool_capacity.validate()
|
5484
|
+
if self.resource_pool_config:
|
5485
|
+
self.resource_pool_config.validate()
|
5486
|
+
|
5487
|
+
def to_map(self):
|
5488
|
+
_map = super().to_map()
|
5489
|
+
if _map is not None:
|
5490
|
+
return _map
|
5491
|
+
|
5492
|
+
result = dict()
|
5493
|
+
if self.allocation_status is not None:
|
5494
|
+
result['allocationStatus'] = self.allocation_status.to_map()
|
5495
|
+
if self.created_time is not None:
|
5496
|
+
result['createdTime'] = self.created_time
|
5497
|
+
if self.expire_time is not None:
|
5498
|
+
result['expireTime'] = self.expire_time
|
5499
|
+
if self.last_modified_time is not None:
|
5500
|
+
result['lastModifiedTime'] = self.last_modified_time
|
5501
|
+
if self.resident_resource_pool_id is not None:
|
5502
|
+
result['residentResourcePoolId'] = self.resident_resource_pool_id
|
5503
|
+
if self.resident_resource_pool_name is not None:
|
5504
|
+
result['residentResourcePoolName'] = self.resident_resource_pool_name
|
5505
|
+
if self.resource_pool_capacity is not None:
|
5506
|
+
result['resourcePoolCapacity'] = self.resource_pool_capacity.to_map()
|
5507
|
+
if self.resource_pool_config is not None:
|
5508
|
+
result['resourcePoolConfig'] = self.resource_pool_config.to_map()
|
5509
|
+
return result
|
5510
|
+
|
5511
|
+
def from_map(self, m: dict = None):
|
5512
|
+
m = m or dict()
|
5513
|
+
if m.get('allocationStatus') is not None:
|
5514
|
+
temp_model = ResidentResourceAllocationStatus()
|
5515
|
+
self.allocation_status = temp_model.from_map(m['allocationStatus'])
|
5516
|
+
if m.get('createdTime') is not None:
|
5517
|
+
self.created_time = m.get('createdTime')
|
5518
|
+
if m.get('expireTime') is not None:
|
5519
|
+
self.expire_time = m.get('expireTime')
|
5520
|
+
if m.get('lastModifiedTime') is not None:
|
5521
|
+
self.last_modified_time = m.get('lastModifiedTime')
|
5522
|
+
if m.get('residentResourcePoolId') is not None:
|
5523
|
+
self.resident_resource_pool_id = m.get('residentResourcePoolId')
|
5524
|
+
if m.get('residentResourcePoolName') is not None:
|
5525
|
+
self.resident_resource_pool_name = m.get('residentResourcePoolName')
|
5526
|
+
if m.get('resourcePoolCapacity') is not None:
|
5527
|
+
temp_model = ResidentResourceCapacity()
|
5528
|
+
self.resource_pool_capacity = temp_model.from_map(m['resourcePoolCapacity'])
|
5529
|
+
if m.get('resourcePoolConfig') is not None:
|
5530
|
+
temp_model = ResidentResourceCapacity()
|
5531
|
+
self.resource_pool_config = temp_model.from_map(m['resourcePoolConfig'])
|
5532
|
+
return self
|
5533
|
+
|
5534
|
+
|
5535
|
+
class ListResidentResourcePoolsOutput(TeaModel):
|
5536
|
+
def __init__(
|
5537
|
+
self,
|
5538
|
+
next_token: str = None,
|
5539
|
+
resident_resource_pools: List[ResidentResourcePool] = None,
|
5540
|
+
):
|
5541
|
+
self.next_token = next_token
|
5542
|
+
self.resident_resource_pools = resident_resource_pools
|
5543
|
+
|
5544
|
+
def validate(self):
|
5545
|
+
if self.resident_resource_pools:
|
5546
|
+
for k in self.resident_resource_pools:
|
5547
|
+
if k:
|
5548
|
+
k.validate()
|
5549
|
+
|
5550
|
+
def to_map(self):
|
5551
|
+
_map = super().to_map()
|
5552
|
+
if _map is not None:
|
5553
|
+
return _map
|
5554
|
+
|
5555
|
+
result = dict()
|
5556
|
+
if self.next_token is not None:
|
5557
|
+
result['nextToken'] = self.next_token
|
5558
|
+
result['residentResourcePools'] = []
|
5559
|
+
if self.resident_resource_pools is not None:
|
5560
|
+
for k in self.resident_resource_pools:
|
5561
|
+
result['residentResourcePools'].append(k.to_map() if k else None)
|
5562
|
+
return result
|
5563
|
+
|
5564
|
+
def from_map(self, m: dict = None):
|
5565
|
+
m = m or dict()
|
5566
|
+
if m.get('nextToken') is not None:
|
5567
|
+
self.next_token = m.get('nextToken')
|
5568
|
+
self.resident_resource_pools = []
|
5569
|
+
if m.get('residentResourcePools') is not None:
|
5570
|
+
for k in m.get('residentResourcePools'):
|
5571
|
+
temp_model = ResidentResourcePool()
|
5572
|
+
self.resident_resource_pools.append(temp_model.from_map(k))
|
5573
|
+
return self
|
5574
|
+
|
5575
|
+
|
5576
|
+
class ListScalingConfigStatusOutput(TeaModel):
|
5577
|
+
def __init__(self):
|
5578
|
+
pass
|
5579
|
+
|
5580
|
+
def validate(self):
|
5581
|
+
pass
|
5582
|
+
|
5583
|
+
def to_map(self):
|
5584
|
+
_map = super().to_map()
|
5585
|
+
if _map is not None:
|
5586
|
+
return _map
|
5587
|
+
|
5588
|
+
result = dict()
|
5589
|
+
return result
|
5590
|
+
|
5591
|
+
def from_map(self, m: dict = None):
|
5592
|
+
m = m or dict()
|
5593
|
+
return self
|
5594
|
+
|
5595
|
+
|
5596
|
+
class ScalingConfigStatus(TeaModel):
|
5597
|
+
def __init__(
|
5598
|
+
self,
|
5599
|
+
current_error: str = None,
|
5600
|
+
current_instances: int = None,
|
5601
|
+
function_arn: str = None,
|
5602
|
+
horizontal_scaling_policies: List[ScalingPolicy] = None,
|
5603
|
+
min_instances: int = None,
|
5604
|
+
resident_pool_id: str = None,
|
5605
|
+
scheduled_policies: List[ScheduledPolicy] = None,
|
5606
|
+
target_instances: int = None,
|
5607
|
+
):
|
5608
|
+
self.current_error = current_error
|
5609
|
+
self.current_instances = current_instances
|
5610
|
+
self.function_arn = function_arn
|
5611
|
+
self.horizontal_scaling_policies = horizontal_scaling_policies
|
5612
|
+
self.min_instances = min_instances
|
5613
|
+
self.resident_pool_id = resident_pool_id
|
5614
|
+
self.scheduled_policies = scheduled_policies
|
5615
|
+
self.target_instances = target_instances
|
5616
|
+
|
5617
|
+
def validate(self):
|
5618
|
+
if self.horizontal_scaling_policies:
|
5619
|
+
for k in self.horizontal_scaling_policies:
|
5620
|
+
if k:
|
5621
|
+
k.validate()
|
5622
|
+
if self.scheduled_policies:
|
5623
|
+
for k in self.scheduled_policies:
|
5624
|
+
if k:
|
5625
|
+
k.validate()
|
5626
|
+
|
5627
|
+
def to_map(self):
|
5628
|
+
_map = super().to_map()
|
5629
|
+
if _map is not None:
|
5630
|
+
return _map
|
5631
|
+
|
5632
|
+
result = dict()
|
5633
|
+
if self.current_error is not None:
|
5634
|
+
result['currentError'] = self.current_error
|
5635
|
+
if self.current_instances is not None:
|
5636
|
+
result['currentInstances'] = self.current_instances
|
5637
|
+
if self.function_arn is not None:
|
5638
|
+
result['functionArn'] = self.function_arn
|
5639
|
+
result['horizontalScalingPolicies'] = []
|
5640
|
+
if self.horizontal_scaling_policies is not None:
|
5641
|
+
for k in self.horizontal_scaling_policies:
|
5642
|
+
result['horizontalScalingPolicies'].append(k.to_map() if k else None)
|
5643
|
+
if self.min_instances is not None:
|
5644
|
+
result['minInstances'] = self.min_instances
|
5645
|
+
if self.resident_pool_id is not None:
|
5646
|
+
result['residentPoolId'] = self.resident_pool_id
|
5647
|
+
result['scheduledPolicies'] = []
|
5648
|
+
if self.scheduled_policies is not None:
|
5649
|
+
for k in self.scheduled_policies:
|
5650
|
+
result['scheduledPolicies'].append(k.to_map() if k else None)
|
5651
|
+
if self.target_instances is not None:
|
5652
|
+
result['targetInstances'] = self.target_instances
|
5653
|
+
return result
|
5654
|
+
|
5655
|
+
def from_map(self, m: dict = None):
|
5656
|
+
m = m or dict()
|
5657
|
+
if m.get('currentError') is not None:
|
5658
|
+
self.current_error = m.get('currentError')
|
5659
|
+
if m.get('currentInstances') is not None:
|
5660
|
+
self.current_instances = m.get('currentInstances')
|
5661
|
+
if m.get('functionArn') is not None:
|
5662
|
+
self.function_arn = m.get('functionArn')
|
5663
|
+
self.horizontal_scaling_policies = []
|
5664
|
+
if m.get('horizontalScalingPolicies') is not None:
|
5665
|
+
for k in m.get('horizontalScalingPolicies'):
|
5666
|
+
temp_model = ScalingPolicy()
|
5667
|
+
self.horizontal_scaling_policies.append(temp_model.from_map(k))
|
5668
|
+
if m.get('minInstances') is not None:
|
5669
|
+
self.min_instances = m.get('minInstances')
|
5670
|
+
if m.get('residentPoolId') is not None:
|
5671
|
+
self.resident_pool_id = m.get('residentPoolId')
|
5672
|
+
self.scheduled_policies = []
|
5673
|
+
if m.get('scheduledPolicies') is not None:
|
5674
|
+
for k in m.get('scheduledPolicies'):
|
5675
|
+
temp_model = ScheduledPolicy()
|
5676
|
+
self.scheduled_policies.append(temp_model.from_map(k))
|
5677
|
+
if m.get('targetInstances') is not None:
|
5678
|
+
self.target_instances = m.get('targetInstances')
|
5679
|
+
return self
|
5680
|
+
|
5681
|
+
|
5682
|
+
class ListScalingConfigsOutput(TeaModel):
|
5683
|
+
def __init__(
|
5684
|
+
self,
|
5685
|
+
next_token: str = None,
|
5686
|
+
scaling_configs: List[ScalingConfigStatus] = None,
|
5687
|
+
):
|
5688
|
+
self.next_token = next_token
|
5689
|
+
self.scaling_configs = scaling_configs
|
5690
|
+
|
5691
|
+
def validate(self):
|
5692
|
+
if self.scaling_configs:
|
5693
|
+
for k in self.scaling_configs:
|
5694
|
+
if k:
|
5695
|
+
k.validate()
|
5696
|
+
|
5697
|
+
def to_map(self):
|
5698
|
+
_map = super().to_map()
|
5699
|
+
if _map is not None:
|
5700
|
+
return _map
|
5701
|
+
|
5702
|
+
result = dict()
|
5703
|
+
if self.next_token is not None:
|
5704
|
+
result['nextToken'] = self.next_token
|
5705
|
+
result['scalingConfigs'] = []
|
5706
|
+
if self.scaling_configs is not None:
|
5707
|
+
for k in self.scaling_configs:
|
5708
|
+
result['scalingConfigs'].append(k.to_map() if k else None)
|
5709
|
+
return result
|
5710
|
+
|
5711
|
+
def from_map(self, m: dict = None):
|
5712
|
+
m = m or dict()
|
5713
|
+
if m.get('nextToken') is not None:
|
5714
|
+
self.next_token = m.get('nextToken')
|
5715
|
+
self.scaling_configs = []
|
5716
|
+
if m.get('scalingConfigs') is not None:
|
5717
|
+
for k in m.get('scalingConfigs'):
|
5718
|
+
temp_model = ScalingConfigStatus()
|
5719
|
+
self.scaling_configs.append(temp_model.from_map(k))
|
5720
|
+
return self
|
5721
|
+
|
5722
|
+
|
5723
|
+
class TagResource(TeaModel):
|
5724
|
+
def __init__(
|
5725
|
+
self,
|
5726
|
+
resource_id: str = None,
|
5727
|
+
resource_type: str = None,
|
5728
|
+
tag_key: str = None,
|
5729
|
+
tag_value: str = None,
|
5730
|
+
):
|
5731
|
+
self.resource_id = resource_id
|
5732
|
+
self.resource_type = resource_type
|
5733
|
+
self.tag_key = tag_key
|
5734
|
+
self.tag_value = tag_value
|
5735
|
+
|
5736
|
+
def validate(self):
|
5737
|
+
pass
|
5738
|
+
|
5739
|
+
def to_map(self):
|
5740
|
+
_map = super().to_map()
|
5741
|
+
if _map is not None:
|
5742
|
+
return _map
|
5743
|
+
|
5744
|
+
result = dict()
|
5745
|
+
if self.resource_id is not None:
|
5746
|
+
result['ResourceId'] = self.resource_id
|
5747
|
+
if self.resource_type is not None:
|
5748
|
+
result['ResourceType'] = self.resource_type
|
5749
|
+
if self.tag_key is not None:
|
5750
|
+
result['TagKey'] = self.tag_key
|
5751
|
+
if self.tag_value is not None:
|
5752
|
+
result['TagValue'] = self.tag_value
|
5753
|
+
return result
|
5754
|
+
|
5755
|
+
def from_map(self, m: dict = None):
|
4928
5756
|
m = m or dict()
|
4929
5757
|
if m.get('ResourceId') is not None:
|
4930
5758
|
self.resource_id = m.get('ResourceId')
|
@@ -5325,6 +6153,78 @@ class ListVpcBindingsOutput(TeaModel):
|
|
5325
6153
|
return self
|
5326
6154
|
|
5327
6155
|
|
6156
|
+
class MCPSSESessionAffinityConfig(TeaModel):
|
6157
|
+
def __init__(
|
6158
|
+
self,
|
6159
|
+
session_concurrency_per_instance: int = None,
|
6160
|
+
sse_endpoint_path: str = None,
|
6161
|
+
):
|
6162
|
+
self.session_concurrency_per_instance = session_concurrency_per_instance
|
6163
|
+
self.sse_endpoint_path = sse_endpoint_path
|
6164
|
+
|
6165
|
+
def validate(self):
|
6166
|
+
pass
|
6167
|
+
|
6168
|
+
def to_map(self):
|
6169
|
+
_map = super().to_map()
|
6170
|
+
if _map is not None:
|
6171
|
+
return _map
|
6172
|
+
|
6173
|
+
result = dict()
|
6174
|
+
if self.session_concurrency_per_instance is not None:
|
6175
|
+
result['sessionConcurrencyPerInstance'] = self.session_concurrency_per_instance
|
6176
|
+
if self.sse_endpoint_path is not None:
|
6177
|
+
result['sseEndpointPath'] = self.sse_endpoint_path
|
6178
|
+
return result
|
6179
|
+
|
6180
|
+
def from_map(self, m: dict = None):
|
6181
|
+
m = m or dict()
|
6182
|
+
if m.get('sessionConcurrencyPerInstance') is not None:
|
6183
|
+
self.session_concurrency_per_instance = m.get('sessionConcurrencyPerInstance')
|
6184
|
+
if m.get('sseEndpointPath') is not None:
|
6185
|
+
self.sse_endpoint_path = m.get('sseEndpointPath')
|
6186
|
+
return self
|
6187
|
+
|
6188
|
+
|
6189
|
+
class MCPStreamableSessionAffinityConfig(TeaModel):
|
6190
|
+
def __init__(
|
6191
|
+
self,
|
6192
|
+
session_concurrency_per_instance: int = None,
|
6193
|
+
session_idle_timeout_in_seconds: int = None,
|
6194
|
+
session_ttlin_seconds: int = None,
|
6195
|
+
):
|
6196
|
+
self.session_concurrency_per_instance = session_concurrency_per_instance
|
6197
|
+
self.session_idle_timeout_in_seconds = session_idle_timeout_in_seconds
|
6198
|
+
self.session_ttlin_seconds = session_ttlin_seconds
|
6199
|
+
|
6200
|
+
def validate(self):
|
6201
|
+
pass
|
6202
|
+
|
6203
|
+
def to_map(self):
|
6204
|
+
_map = super().to_map()
|
6205
|
+
if _map is not None:
|
6206
|
+
return _map
|
6207
|
+
|
6208
|
+
result = dict()
|
6209
|
+
if self.session_concurrency_per_instance is not None:
|
6210
|
+
result['sessionConcurrencyPerInstance'] = self.session_concurrency_per_instance
|
6211
|
+
if self.session_idle_timeout_in_seconds is not None:
|
6212
|
+
result['sessionIdleTimeoutInSeconds'] = self.session_idle_timeout_in_seconds
|
6213
|
+
if self.session_ttlin_seconds is not None:
|
6214
|
+
result['sessionTTLInSeconds'] = self.session_ttlin_seconds
|
6215
|
+
return result
|
6216
|
+
|
6217
|
+
def from_map(self, m: dict = None):
|
6218
|
+
m = m or dict()
|
6219
|
+
if m.get('sessionConcurrencyPerInstance') is not None:
|
6220
|
+
self.session_concurrency_per_instance = m.get('sessionConcurrencyPerInstance')
|
6221
|
+
if m.get('sessionIdleTimeoutInSeconds') is not None:
|
6222
|
+
self.session_idle_timeout_in_seconds = m.get('sessionIdleTimeoutInSeconds')
|
6223
|
+
if m.get('sessionTTLInSeconds') is not None:
|
6224
|
+
self.session_ttlin_seconds = m.get('sessionTTLInSeconds')
|
6225
|
+
return self
|
6226
|
+
|
6227
|
+
|
5328
6228
|
class MNSTopicTriggerConfig(TeaModel):
|
5329
6229
|
def __init__(
|
5330
6230
|
self,
|
@@ -5506,16 +6406,56 @@ class PutAsyncInvokeConfigInput(TeaModel):
|
|
5506
6406
|
return self
|
5507
6407
|
|
5508
6408
|
|
5509
|
-
class PutConcurrencyInput(TeaModel):
|
6409
|
+
class PutConcurrencyInput(TeaModel):
|
6410
|
+
def __init__(
|
6411
|
+
self,
|
6412
|
+
reserved_concurrency: int = None,
|
6413
|
+
):
|
6414
|
+
# This parameter is required.
|
6415
|
+
self.reserved_concurrency = reserved_concurrency
|
6416
|
+
|
6417
|
+
def validate(self):
|
6418
|
+
pass
|
6419
|
+
|
6420
|
+
def to_map(self):
|
6421
|
+
_map = super().to_map()
|
6422
|
+
if _map is not None:
|
6423
|
+
return _map
|
6424
|
+
|
6425
|
+
result = dict()
|
6426
|
+
if self.reserved_concurrency is not None:
|
6427
|
+
result['reservedConcurrency'] = self.reserved_concurrency
|
6428
|
+
return result
|
6429
|
+
|
6430
|
+
def from_map(self, m: dict = None):
|
6431
|
+
m = m or dict()
|
6432
|
+
if m.get('reservedConcurrency') is not None:
|
6433
|
+
self.reserved_concurrency = m.get('reservedConcurrency')
|
6434
|
+
return self
|
6435
|
+
|
6436
|
+
|
6437
|
+
class PutElasticConfigInput(TeaModel):
|
5510
6438
|
def __init__(
|
5511
6439
|
self,
|
5512
|
-
|
6440
|
+
min_instances: int = None,
|
6441
|
+
resident_pool_id: str = None,
|
6442
|
+
scaling_policies: List[ScalingPolicy] = None,
|
6443
|
+
scheduled_policies: List[ScheduledPolicy] = None,
|
5513
6444
|
):
|
5514
|
-
|
5515
|
-
self.
|
6445
|
+
self.min_instances = min_instances
|
6446
|
+
self.resident_pool_id = resident_pool_id
|
6447
|
+
self.scaling_policies = scaling_policies
|
6448
|
+
self.scheduled_policies = scheduled_policies
|
5516
6449
|
|
5517
6450
|
def validate(self):
|
5518
|
-
|
6451
|
+
if self.scaling_policies:
|
6452
|
+
for k in self.scaling_policies:
|
6453
|
+
if k:
|
6454
|
+
k.validate()
|
6455
|
+
if self.scheduled_policies:
|
6456
|
+
for k in self.scheduled_policies:
|
6457
|
+
if k:
|
6458
|
+
k.validate()
|
5519
6459
|
|
5520
6460
|
def to_map(self):
|
5521
6461
|
_map = super().to_map()
|
@@ -5523,14 +6463,36 @@ class PutConcurrencyInput(TeaModel):
|
|
5523
6463
|
return _map
|
5524
6464
|
|
5525
6465
|
result = dict()
|
5526
|
-
if self.
|
5527
|
-
result['
|
6466
|
+
if self.min_instances is not None:
|
6467
|
+
result['minInstances'] = self.min_instances
|
6468
|
+
if self.resident_pool_id is not None:
|
6469
|
+
result['residentPoolId'] = self.resident_pool_id
|
6470
|
+
result['scalingPolicies'] = []
|
6471
|
+
if self.scaling_policies is not None:
|
6472
|
+
for k in self.scaling_policies:
|
6473
|
+
result['scalingPolicies'].append(k.to_map() if k else None)
|
6474
|
+
result['scheduledPolicies'] = []
|
6475
|
+
if self.scheduled_policies is not None:
|
6476
|
+
for k in self.scheduled_policies:
|
6477
|
+
result['scheduledPolicies'].append(k.to_map() if k else None)
|
5528
6478
|
return result
|
5529
6479
|
|
5530
6480
|
def from_map(self, m: dict = None):
|
5531
6481
|
m = m or dict()
|
5532
|
-
if m.get('
|
5533
|
-
self.
|
6482
|
+
if m.get('minInstances') is not None:
|
6483
|
+
self.min_instances = m.get('minInstances')
|
6484
|
+
if m.get('residentPoolId') is not None:
|
6485
|
+
self.resident_pool_id = m.get('residentPoolId')
|
6486
|
+
self.scaling_policies = []
|
6487
|
+
if m.get('scalingPolicies') is not None:
|
6488
|
+
for k in m.get('scalingPolicies'):
|
6489
|
+
temp_model = ScalingPolicy()
|
6490
|
+
self.scaling_policies.append(temp_model.from_map(k))
|
6491
|
+
self.scheduled_policies = []
|
6492
|
+
if m.get('scheduledPolicies') is not None:
|
6493
|
+
for k in m.get('scheduledPolicies'):
|
6494
|
+
temp_model = ScheduledPolicy()
|
6495
|
+
self.scheduled_policies.append(temp_model.from_map(k))
|
5534
6496
|
return self
|
5535
6497
|
|
5536
6498
|
|
@@ -5609,6 +6571,121 @@ class PutProvisionConfigInput(TeaModel):
|
|
5609
6571
|
return self
|
5610
6572
|
|
5611
6573
|
|
6574
|
+
class PutScalingConfigInput(TeaModel):
|
6575
|
+
def __init__(
|
6576
|
+
self,
|
6577
|
+
horizontal_scaling_policies: List[ScalingPolicy] = None,
|
6578
|
+
min_instances: int = None,
|
6579
|
+
resident_pool_id: str = None,
|
6580
|
+
scheduled_policies: List[ScheduledPolicy] = None,
|
6581
|
+
):
|
6582
|
+
self.horizontal_scaling_policies = horizontal_scaling_policies
|
6583
|
+
self.min_instances = min_instances
|
6584
|
+
self.resident_pool_id = resident_pool_id
|
6585
|
+
self.scheduled_policies = scheduled_policies
|
6586
|
+
|
6587
|
+
def validate(self):
|
6588
|
+
if self.horizontal_scaling_policies:
|
6589
|
+
for k in self.horizontal_scaling_policies:
|
6590
|
+
if k:
|
6591
|
+
k.validate()
|
6592
|
+
if self.scheduled_policies:
|
6593
|
+
for k in self.scheduled_policies:
|
6594
|
+
if k:
|
6595
|
+
k.validate()
|
6596
|
+
|
6597
|
+
def to_map(self):
|
6598
|
+
_map = super().to_map()
|
6599
|
+
if _map is not None:
|
6600
|
+
return _map
|
6601
|
+
|
6602
|
+
result = dict()
|
6603
|
+
result['horizontalScalingPolicies'] = []
|
6604
|
+
if self.horizontal_scaling_policies is not None:
|
6605
|
+
for k in self.horizontal_scaling_policies:
|
6606
|
+
result['horizontalScalingPolicies'].append(k.to_map() if k else None)
|
6607
|
+
if self.min_instances is not None:
|
6608
|
+
result['minInstances'] = self.min_instances
|
6609
|
+
if self.resident_pool_id is not None:
|
6610
|
+
result['residentPoolId'] = self.resident_pool_id
|
6611
|
+
result['scheduledPolicies'] = []
|
6612
|
+
if self.scheduled_policies is not None:
|
6613
|
+
for k in self.scheduled_policies:
|
6614
|
+
result['scheduledPolicies'].append(k.to_map() if k else None)
|
6615
|
+
return result
|
6616
|
+
|
6617
|
+
def from_map(self, m: dict = None):
|
6618
|
+
m = m or dict()
|
6619
|
+
self.horizontal_scaling_policies = []
|
6620
|
+
if m.get('horizontalScalingPolicies') is not None:
|
6621
|
+
for k in m.get('horizontalScalingPolicies'):
|
6622
|
+
temp_model = ScalingPolicy()
|
6623
|
+
self.horizontal_scaling_policies.append(temp_model.from_map(k))
|
6624
|
+
if m.get('minInstances') is not None:
|
6625
|
+
self.min_instances = m.get('minInstances')
|
6626
|
+
if m.get('residentPoolId') is not None:
|
6627
|
+
self.resident_pool_id = m.get('residentPoolId')
|
6628
|
+
self.scheduled_policies = []
|
6629
|
+
if m.get('scheduledPolicies') is not None:
|
6630
|
+
for k in m.get('scheduledPolicies'):
|
6631
|
+
temp_model = ScheduledPolicy()
|
6632
|
+
self.scheduled_policies.append(temp_model.from_map(k))
|
6633
|
+
return self
|
6634
|
+
|
6635
|
+
|
6636
|
+
class PutScalingConfigOutput(TeaModel):
|
6637
|
+
def __init__(self):
|
6638
|
+
pass
|
6639
|
+
|
6640
|
+
def validate(self):
|
6641
|
+
pass
|
6642
|
+
|
6643
|
+
def to_map(self):
|
6644
|
+
_map = super().to_map()
|
6645
|
+
if _map is not None:
|
6646
|
+
return _map
|
6647
|
+
|
6648
|
+
result = dict()
|
6649
|
+
return result
|
6650
|
+
|
6651
|
+
def from_map(self, m: dict = None):
|
6652
|
+
m = m or dict()
|
6653
|
+
return self
|
6654
|
+
|
6655
|
+
|
6656
|
+
class ResidentConfig(TeaModel):
|
6657
|
+
def __init__(
|
6658
|
+
self,
|
6659
|
+
count: int = None,
|
6660
|
+
pool_id: str = None,
|
6661
|
+
):
|
6662
|
+
self.count = count
|
6663
|
+
self.pool_id = pool_id
|
6664
|
+
|
6665
|
+
def validate(self):
|
6666
|
+
pass
|
6667
|
+
|
6668
|
+
def to_map(self):
|
6669
|
+
_map = super().to_map()
|
6670
|
+
if _map is not None:
|
6671
|
+
return _map
|
6672
|
+
|
6673
|
+
result = dict()
|
6674
|
+
if self.count is not None:
|
6675
|
+
result['count'] = self.count
|
6676
|
+
if self.pool_id is not None:
|
6677
|
+
result['poolId'] = self.pool_id
|
6678
|
+
return result
|
6679
|
+
|
6680
|
+
def from_map(self, m: dict = None):
|
6681
|
+
m = m or dict()
|
6682
|
+
if m.get('count') is not None:
|
6683
|
+
self.count = m.get('count')
|
6684
|
+
if m.get('poolId') is not None:
|
6685
|
+
self.pool_id = m.get('poolId')
|
6686
|
+
return self
|
6687
|
+
|
6688
|
+
|
5612
6689
|
class SLSTriggerLogConfig(TeaModel):
|
5613
6690
|
def __init__(
|
5614
6691
|
self,
|
@@ -5734,6 +6811,39 @@ class SLSTriggerConfig(TeaModel):
|
|
5734
6811
|
return self
|
5735
6812
|
|
5736
6813
|
|
6814
|
+
class ScalingStatus(TeaModel):
|
6815
|
+
def __init__(
|
6816
|
+
self,
|
6817
|
+
current_error: str = None,
|
6818
|
+
resource_count: int = None,
|
6819
|
+
):
|
6820
|
+
self.current_error = current_error
|
6821
|
+
self.resource_count = resource_count
|
6822
|
+
|
6823
|
+
def validate(self):
|
6824
|
+
pass
|
6825
|
+
|
6826
|
+
def to_map(self):
|
6827
|
+
_map = super().to_map()
|
6828
|
+
if _map is not None:
|
6829
|
+
return _map
|
6830
|
+
|
6831
|
+
result = dict()
|
6832
|
+
if self.current_error is not None:
|
6833
|
+
result['currentError'] = self.current_error
|
6834
|
+
if self.resource_count is not None:
|
6835
|
+
result['resourceCount'] = self.resource_count
|
6836
|
+
return result
|
6837
|
+
|
6838
|
+
def from_map(self, m: dict = None):
|
6839
|
+
m = m or dict()
|
6840
|
+
if m.get('currentError') is not None:
|
6841
|
+
self.current_error = m.get('currentError')
|
6842
|
+
if m.get('resourceCount') is not None:
|
6843
|
+
self.resource_count = m.get('resourceCount')
|
6844
|
+
return self
|
6845
|
+
|
6846
|
+
|
5737
6847
|
class TagResourceInput(TeaModel):
|
5738
6848
|
def __init__(
|
5739
6849
|
self,
|
@@ -5983,6 +7093,7 @@ class UpdateFunctionInput(TeaModel):
|
|
5983
7093
|
gpu_config: GPUConfig = None,
|
5984
7094
|
handler: str = None,
|
5985
7095
|
instance_concurrency: int = None,
|
7096
|
+
instance_isolation_mode: str = None,
|
5986
7097
|
instance_lifecycle_config: InstanceLifecycleConfig = None,
|
5987
7098
|
internet_access: bool = None,
|
5988
7099
|
layers: List[str] = None,
|
@@ -5993,6 +7104,7 @@ class UpdateFunctionInput(TeaModel):
|
|
5993
7104
|
role: str = None,
|
5994
7105
|
runtime: str = None,
|
5995
7106
|
session_affinity: str = None,
|
7107
|
+
session_affinity_config: str = None,
|
5996
7108
|
timeout: int = None,
|
5997
7109
|
tracing_config: TracingConfig = None,
|
5998
7110
|
vpc_config: VPCConfig = None,
|
@@ -6010,6 +7122,7 @@ class UpdateFunctionInput(TeaModel):
|
|
6010
7122
|
self.gpu_config = gpu_config
|
6011
7123
|
self.handler = handler
|
6012
7124
|
self.instance_concurrency = instance_concurrency
|
7125
|
+
self.instance_isolation_mode = instance_isolation_mode
|
6013
7126
|
self.instance_lifecycle_config = instance_lifecycle_config
|
6014
7127
|
self.internet_access = internet_access
|
6015
7128
|
self.layers = layers
|
@@ -6020,6 +7133,7 @@ class UpdateFunctionInput(TeaModel):
|
|
6020
7133
|
self.role = role
|
6021
7134
|
self.runtime = runtime
|
6022
7135
|
self.session_affinity = session_affinity
|
7136
|
+
self.session_affinity_config = session_affinity_config
|
6023
7137
|
self.timeout = timeout
|
6024
7138
|
self.tracing_config = tracing_config
|
6025
7139
|
self.vpc_config = vpc_config
|
@@ -6080,6 +7194,8 @@ class UpdateFunctionInput(TeaModel):
|
|
6080
7194
|
result['handler'] = self.handler
|
6081
7195
|
if self.instance_concurrency is not None:
|
6082
7196
|
result['instanceConcurrency'] = self.instance_concurrency
|
7197
|
+
if self.instance_isolation_mode is not None:
|
7198
|
+
result['instanceIsolationMode'] = self.instance_isolation_mode
|
6083
7199
|
if self.instance_lifecycle_config is not None:
|
6084
7200
|
result['instanceLifecycleConfig'] = self.instance_lifecycle_config.to_map()
|
6085
7201
|
if self.internet_access is not None:
|
@@ -6100,6 +7216,8 @@ class UpdateFunctionInput(TeaModel):
|
|
6100
7216
|
result['runtime'] = self.runtime
|
6101
7217
|
if self.session_affinity is not None:
|
6102
7218
|
result['sessionAffinity'] = self.session_affinity
|
7219
|
+
if self.session_affinity_config is not None:
|
7220
|
+
result['sessionAffinityConfig'] = self.session_affinity_config
|
6103
7221
|
if self.timeout is not None:
|
6104
7222
|
result['timeout'] = self.timeout
|
6105
7223
|
if self.tracing_config is not None:
|
@@ -6141,6 +7259,8 @@ class UpdateFunctionInput(TeaModel):
|
|
6141
7259
|
self.handler = m.get('handler')
|
6142
7260
|
if m.get('instanceConcurrency') is not None:
|
6143
7261
|
self.instance_concurrency = m.get('instanceConcurrency')
|
7262
|
+
if m.get('instanceIsolationMode') is not None:
|
7263
|
+
self.instance_isolation_mode = m.get('instanceIsolationMode')
|
6144
7264
|
if m.get('instanceLifecycleConfig') is not None:
|
6145
7265
|
temp_model = InstanceLifecycleConfig()
|
6146
7266
|
self.instance_lifecycle_config = temp_model.from_map(m['instanceLifecycleConfig'])
|
@@ -6165,6 +7285,8 @@ class UpdateFunctionInput(TeaModel):
|
|
6165
7285
|
self.runtime = m.get('runtime')
|
6166
7286
|
if m.get('sessionAffinity') is not None:
|
6167
7287
|
self.session_affinity = m.get('sessionAffinity')
|
7288
|
+
if m.get('sessionAffinityConfig') is not None:
|
7289
|
+
self.session_affinity_config = m.get('sessionAffinityConfig')
|
6168
7290
|
if m.get('timeout') is not None:
|
6169
7291
|
self.timeout = m.get('timeout')
|
6170
7292
|
if m.get('tracingConfig') is not None:
|
@@ -6176,6 +7298,39 @@ class UpdateFunctionInput(TeaModel):
|
|
6176
7298
|
return self
|
6177
7299
|
|
6178
7300
|
|
7301
|
+
class UpdateResidentResourcePoolInput(TeaModel):
|
7302
|
+
def __init__(
|
7303
|
+
self,
|
7304
|
+
name: str = None,
|
7305
|
+
use_scaling: bool = None,
|
7306
|
+
):
|
7307
|
+
self.name = name
|
7308
|
+
self.use_scaling = use_scaling
|
7309
|
+
|
7310
|
+
def validate(self):
|
7311
|
+
pass
|
7312
|
+
|
7313
|
+
def to_map(self):
|
7314
|
+
_map = super().to_map()
|
7315
|
+
if _map is not None:
|
7316
|
+
return _map
|
7317
|
+
|
7318
|
+
result = dict()
|
7319
|
+
if self.name is not None:
|
7320
|
+
result['name'] = self.name
|
7321
|
+
if self.use_scaling is not None:
|
7322
|
+
result['useScaling'] = self.use_scaling
|
7323
|
+
return result
|
7324
|
+
|
7325
|
+
def from_map(self, m: dict = None):
|
7326
|
+
m = m or dict()
|
7327
|
+
if m.get('name') is not None:
|
7328
|
+
self.name = m.get('name')
|
7329
|
+
if m.get('useScaling') is not None:
|
7330
|
+
self.use_scaling = m.get('useScaling')
|
7331
|
+
return self
|
7332
|
+
|
7333
|
+
|
6179
7334
|
class UpdateTriggerInput(TeaModel):
|
6180
7335
|
def __init__(
|
6181
7336
|
self,
|
@@ -6221,6 +7376,76 @@ class UpdateTriggerInput(TeaModel):
|
|
6221
7376
|
return self
|
6222
7377
|
|
6223
7378
|
|
7379
|
+
class ChangeResourceGroupRequest(TeaModel):
|
7380
|
+
def __init__(
|
7381
|
+
self,
|
7382
|
+
body: ChangeResourceGroupInput = None,
|
7383
|
+
):
|
7384
|
+
self.body = body
|
7385
|
+
|
7386
|
+
def validate(self):
|
7387
|
+
if self.body:
|
7388
|
+
self.body.validate()
|
7389
|
+
|
7390
|
+
def to_map(self):
|
7391
|
+
_map = super().to_map()
|
7392
|
+
if _map is not None:
|
7393
|
+
return _map
|
7394
|
+
|
7395
|
+
result = dict()
|
7396
|
+
if self.body is not None:
|
7397
|
+
result['body'] = self.body.to_map()
|
7398
|
+
return result
|
7399
|
+
|
7400
|
+
def from_map(self, m: dict = None):
|
7401
|
+
m = m or dict()
|
7402
|
+
if m.get('body') is not None:
|
7403
|
+
temp_model = ChangeResourceGroupInput()
|
7404
|
+
self.body = temp_model.from_map(m['body'])
|
7405
|
+
return self
|
7406
|
+
|
7407
|
+
|
7408
|
+
class ChangeResourceGroupResponse(TeaModel):
|
7409
|
+
def __init__(
|
7410
|
+
self,
|
7411
|
+
headers: Dict[str, str] = None,
|
7412
|
+
status_code: int = None,
|
7413
|
+
body: ChangeResourceGroupOutput = None,
|
7414
|
+
):
|
7415
|
+
self.headers = headers
|
7416
|
+
self.status_code = status_code
|
7417
|
+
self.body = body
|
7418
|
+
|
7419
|
+
def validate(self):
|
7420
|
+
if self.body:
|
7421
|
+
self.body.validate()
|
7422
|
+
|
7423
|
+
def to_map(self):
|
7424
|
+
_map = super().to_map()
|
7425
|
+
if _map is not None:
|
7426
|
+
return _map
|
7427
|
+
|
7428
|
+
result = dict()
|
7429
|
+
if self.headers is not None:
|
7430
|
+
result['headers'] = self.headers
|
7431
|
+
if self.status_code is not None:
|
7432
|
+
result['statusCode'] = self.status_code
|
7433
|
+
if self.body is not None:
|
7434
|
+
result['body'] = self.body.to_map()
|
7435
|
+
return result
|
7436
|
+
|
7437
|
+
def from_map(self, m: dict = None):
|
7438
|
+
m = m or dict()
|
7439
|
+
if m.get('headers') is not None:
|
7440
|
+
self.headers = m.get('headers')
|
7441
|
+
if m.get('statusCode') is not None:
|
7442
|
+
self.status_code = m.get('statusCode')
|
7443
|
+
if m.get('body') is not None:
|
7444
|
+
temp_model = ChangeResourceGroupOutput()
|
7445
|
+
self.body = temp_model.from_map(m['body'])
|
7446
|
+
return self
|
7447
|
+
|
7448
|
+
|
6224
7449
|
class CreateAliasRequest(TeaModel):
|
6225
7450
|
def __init__(
|
6226
7451
|
self,
|
@@ -6701,18 +7926,84 @@ class DeleteAsyncInvokeConfigRequest(TeaModel):
|
|
6701
7926
|
return _map
|
6702
7927
|
|
6703
7928
|
result = dict()
|
6704
|
-
if self.qualifier is not None:
|
6705
|
-
result['qualifier'] = self.qualifier
|
7929
|
+
if self.qualifier is not None:
|
7930
|
+
result['qualifier'] = self.qualifier
|
7931
|
+
return result
|
7932
|
+
|
7933
|
+
def from_map(self, m: dict = None):
|
7934
|
+
m = m or dict()
|
7935
|
+
if m.get('qualifier') is not None:
|
7936
|
+
self.qualifier = m.get('qualifier')
|
7937
|
+
return self
|
7938
|
+
|
7939
|
+
|
7940
|
+
class DeleteAsyncInvokeConfigResponse(TeaModel):
|
7941
|
+
def __init__(
|
7942
|
+
self,
|
7943
|
+
headers: Dict[str, str] = None,
|
7944
|
+
status_code: int = None,
|
7945
|
+
):
|
7946
|
+
self.headers = headers
|
7947
|
+
self.status_code = status_code
|
7948
|
+
|
7949
|
+
def validate(self):
|
7950
|
+
pass
|
7951
|
+
|
7952
|
+
def to_map(self):
|
7953
|
+
_map = super().to_map()
|
7954
|
+
if _map is not None:
|
7955
|
+
return _map
|
7956
|
+
|
7957
|
+
result = dict()
|
7958
|
+
if self.headers is not None:
|
7959
|
+
result['headers'] = self.headers
|
7960
|
+
if self.status_code is not None:
|
7961
|
+
result['statusCode'] = self.status_code
|
7962
|
+
return result
|
7963
|
+
|
7964
|
+
def from_map(self, m: dict = None):
|
7965
|
+
m = m or dict()
|
7966
|
+
if m.get('headers') is not None:
|
7967
|
+
self.headers = m.get('headers')
|
7968
|
+
if m.get('statusCode') is not None:
|
7969
|
+
self.status_code = m.get('statusCode')
|
7970
|
+
return self
|
7971
|
+
|
7972
|
+
|
7973
|
+
class DeleteConcurrencyConfigResponse(TeaModel):
|
7974
|
+
def __init__(
|
7975
|
+
self,
|
7976
|
+
headers: Dict[str, str] = None,
|
7977
|
+
status_code: int = None,
|
7978
|
+
):
|
7979
|
+
self.headers = headers
|
7980
|
+
self.status_code = status_code
|
7981
|
+
|
7982
|
+
def validate(self):
|
7983
|
+
pass
|
7984
|
+
|
7985
|
+
def to_map(self):
|
7986
|
+
_map = super().to_map()
|
7987
|
+
if _map is not None:
|
7988
|
+
return _map
|
7989
|
+
|
7990
|
+
result = dict()
|
7991
|
+
if self.headers is not None:
|
7992
|
+
result['headers'] = self.headers
|
7993
|
+
if self.status_code is not None:
|
7994
|
+
result['statusCode'] = self.status_code
|
6706
7995
|
return result
|
6707
7996
|
|
6708
7997
|
def from_map(self, m: dict = None):
|
6709
7998
|
m = m or dict()
|
6710
|
-
if m.get('
|
6711
|
-
self.
|
7999
|
+
if m.get('headers') is not None:
|
8000
|
+
self.headers = m.get('headers')
|
8001
|
+
if m.get('statusCode') is not None:
|
8002
|
+
self.status_code = m.get('statusCode')
|
6712
8003
|
return self
|
6713
8004
|
|
6714
8005
|
|
6715
|
-
class
|
8006
|
+
class DeleteCustomDomainResponse(TeaModel):
|
6716
8007
|
def __init__(
|
6717
8008
|
self,
|
6718
8009
|
headers: Dict[str, str] = None,
|
@@ -6745,7 +8036,7 @@ class DeleteAsyncInvokeConfigResponse(TeaModel):
|
|
6745
8036
|
return self
|
6746
8037
|
|
6747
8038
|
|
6748
|
-
class
|
8039
|
+
class DeleteFunctionResponse(TeaModel):
|
6749
8040
|
def __init__(
|
6750
8041
|
self,
|
6751
8042
|
headers: Dict[str, str] = None,
|
@@ -6778,7 +8069,7 @@ class DeleteConcurrencyConfigResponse(TeaModel):
|
|
6778
8069
|
return self
|
6779
8070
|
|
6780
8071
|
|
6781
|
-
class
|
8072
|
+
class DeleteFunctionVersionResponse(TeaModel):
|
6782
8073
|
def __init__(
|
6783
8074
|
self,
|
6784
8075
|
headers: Dict[str, str] = None,
|
@@ -6811,7 +8102,7 @@ class DeleteCustomDomainResponse(TeaModel):
|
|
6811
8102
|
return self
|
6812
8103
|
|
6813
8104
|
|
6814
|
-
class
|
8105
|
+
class DeleteLayerVersionResponse(TeaModel):
|
6815
8106
|
def __init__(
|
6816
8107
|
self,
|
6817
8108
|
headers: Dict[str, str] = None,
|
@@ -6844,14 +8135,13 @@ class DeleteFunctionResponse(TeaModel):
|
|
6844
8135
|
return self
|
6845
8136
|
|
6846
8137
|
|
6847
|
-
class
|
8138
|
+
class DeleteProvisionConfigRequest(TeaModel):
|
6848
8139
|
def __init__(
|
6849
8140
|
self,
|
6850
|
-
|
6851
|
-
status_code: int = None,
|
8141
|
+
qualifier: str = None,
|
6852
8142
|
):
|
6853
|
-
|
6854
|
-
self.
|
8143
|
+
# The function alias.
|
8144
|
+
self.qualifier = qualifier
|
6855
8145
|
|
6856
8146
|
def validate(self):
|
6857
8147
|
pass
|
@@ -6862,22 +8152,18 @@ class DeleteFunctionVersionResponse(TeaModel):
|
|
6862
8152
|
return _map
|
6863
8153
|
|
6864
8154
|
result = dict()
|
6865
|
-
if self.
|
6866
|
-
result['
|
6867
|
-
if self.status_code is not None:
|
6868
|
-
result['statusCode'] = self.status_code
|
8155
|
+
if self.qualifier is not None:
|
8156
|
+
result['qualifier'] = self.qualifier
|
6869
8157
|
return result
|
6870
8158
|
|
6871
8159
|
def from_map(self, m: dict = None):
|
6872
8160
|
m = m or dict()
|
6873
|
-
if m.get('
|
6874
|
-
self.
|
6875
|
-
if m.get('statusCode') is not None:
|
6876
|
-
self.status_code = m.get('statusCode')
|
8161
|
+
if m.get('qualifier') is not None:
|
8162
|
+
self.qualifier = m.get('qualifier')
|
6877
8163
|
return self
|
6878
8164
|
|
6879
8165
|
|
6880
|
-
class
|
8166
|
+
class DeleteProvisionConfigResponse(TeaModel):
|
6881
8167
|
def __init__(
|
6882
8168
|
self,
|
6883
8169
|
headers: Dict[str, str] = None,
|
@@ -6910,12 +8196,11 @@ class DeleteLayerVersionResponse(TeaModel):
|
|
6910
8196
|
return self
|
6911
8197
|
|
6912
8198
|
|
6913
|
-
class
|
8199
|
+
class DeleteScalingConfigRequest(TeaModel):
|
6914
8200
|
def __init__(
|
6915
8201
|
self,
|
6916
8202
|
qualifier: str = None,
|
6917
8203
|
):
|
6918
|
-
# The function alias.
|
6919
8204
|
self.qualifier = qualifier
|
6920
8205
|
|
6921
8206
|
def validate(self):
|
@@ -6938,7 +8223,7 @@ class DeleteProvisionConfigRequest(TeaModel):
|
|
6938
8223
|
return self
|
6939
8224
|
|
6940
8225
|
|
6941
|
-
class
|
8226
|
+
class DeleteScalingConfigResponse(TeaModel):
|
6942
8227
|
def __init__(
|
6943
8228
|
self,
|
6944
8229
|
headers: Dict[str, str] = None,
|
@@ -7827,6 +9112,74 @@ class GetProvisionConfigResponse(TeaModel):
|
|
7827
9112
|
return self
|
7828
9113
|
|
7829
9114
|
|
9115
|
+
class GetScalingConfigRequest(TeaModel):
|
9116
|
+
def __init__(
|
9117
|
+
self,
|
9118
|
+
qualifier: str = None,
|
9119
|
+
):
|
9120
|
+
self.qualifier = qualifier
|
9121
|
+
|
9122
|
+
def validate(self):
|
9123
|
+
pass
|
9124
|
+
|
9125
|
+
def to_map(self):
|
9126
|
+
_map = super().to_map()
|
9127
|
+
if _map is not None:
|
9128
|
+
return _map
|
9129
|
+
|
9130
|
+
result = dict()
|
9131
|
+
if self.qualifier is not None:
|
9132
|
+
result['qualifier'] = self.qualifier
|
9133
|
+
return result
|
9134
|
+
|
9135
|
+
def from_map(self, m: dict = None):
|
9136
|
+
m = m or dict()
|
9137
|
+
if m.get('qualifier') is not None:
|
9138
|
+
self.qualifier = m.get('qualifier')
|
9139
|
+
return self
|
9140
|
+
|
9141
|
+
|
9142
|
+
class GetScalingConfigResponse(TeaModel):
|
9143
|
+
def __init__(
|
9144
|
+
self,
|
9145
|
+
headers: Dict[str, str] = None,
|
9146
|
+
status_code: int = None,
|
9147
|
+
body: ScalingConfigStatus = None,
|
9148
|
+
):
|
9149
|
+
self.headers = headers
|
9150
|
+
self.status_code = status_code
|
9151
|
+
self.body = body
|
9152
|
+
|
9153
|
+
def validate(self):
|
9154
|
+
if self.body:
|
9155
|
+
self.body.validate()
|
9156
|
+
|
9157
|
+
def to_map(self):
|
9158
|
+
_map = super().to_map()
|
9159
|
+
if _map is not None:
|
9160
|
+
return _map
|
9161
|
+
|
9162
|
+
result = dict()
|
9163
|
+
if self.headers is not None:
|
9164
|
+
result['headers'] = self.headers
|
9165
|
+
if self.status_code is not None:
|
9166
|
+
result['statusCode'] = self.status_code
|
9167
|
+
if self.body is not None:
|
9168
|
+
result['body'] = self.body.to_map()
|
9169
|
+
return result
|
9170
|
+
|
9171
|
+
def from_map(self, m: dict = None):
|
9172
|
+
m = m or dict()
|
9173
|
+
if m.get('headers') is not None:
|
9174
|
+
self.headers = m.get('headers')
|
9175
|
+
if m.get('statusCode') is not None:
|
9176
|
+
self.status_code = m.get('statusCode')
|
9177
|
+
if m.get('body') is not None:
|
9178
|
+
temp_model = ScalingConfigStatus()
|
9179
|
+
self.body = temp_model.from_map(m['body'])
|
9180
|
+
return self
|
9181
|
+
|
9182
|
+
|
7830
9183
|
class GetTriggerResponse(TeaModel):
|
7831
9184
|
def __init__(
|
7832
9185
|
self,
|
@@ -9227,6 +10580,86 @@ class ListProvisionConfigsResponse(TeaModel):
|
|
9227
10580
|
return self
|
9228
10581
|
|
9229
10582
|
|
10583
|
+
class ListScalingConfigsRequest(TeaModel):
|
10584
|
+
def __init__(
|
10585
|
+
self,
|
10586
|
+
function_name: str = None,
|
10587
|
+
limit: int = None,
|
10588
|
+
next_token: str = None,
|
10589
|
+
):
|
10590
|
+
self.function_name = function_name
|
10591
|
+
self.limit = limit
|
10592
|
+
self.next_token = next_token
|
10593
|
+
|
10594
|
+
def validate(self):
|
10595
|
+
pass
|
10596
|
+
|
10597
|
+
def to_map(self):
|
10598
|
+
_map = super().to_map()
|
10599
|
+
if _map is not None:
|
10600
|
+
return _map
|
10601
|
+
|
10602
|
+
result = dict()
|
10603
|
+
if self.function_name is not None:
|
10604
|
+
result['functionName'] = self.function_name
|
10605
|
+
if self.limit is not None:
|
10606
|
+
result['limit'] = self.limit
|
10607
|
+
if self.next_token is not None:
|
10608
|
+
result['nextToken'] = self.next_token
|
10609
|
+
return result
|
10610
|
+
|
10611
|
+
def from_map(self, m: dict = None):
|
10612
|
+
m = m or dict()
|
10613
|
+
if m.get('functionName') is not None:
|
10614
|
+
self.function_name = m.get('functionName')
|
10615
|
+
if m.get('limit') is not None:
|
10616
|
+
self.limit = m.get('limit')
|
10617
|
+
if m.get('nextToken') is not None:
|
10618
|
+
self.next_token = m.get('nextToken')
|
10619
|
+
return self
|
10620
|
+
|
10621
|
+
|
10622
|
+
class ListScalingConfigsResponse(TeaModel):
|
10623
|
+
def __init__(
|
10624
|
+
self,
|
10625
|
+
headers: Dict[str, str] = None,
|
10626
|
+
status_code: int = None,
|
10627
|
+
body: ListScalingConfigsOutput = None,
|
10628
|
+
):
|
10629
|
+
self.headers = headers
|
10630
|
+
self.status_code = status_code
|
10631
|
+
self.body = body
|
10632
|
+
|
10633
|
+
def validate(self):
|
10634
|
+
if self.body:
|
10635
|
+
self.body.validate()
|
10636
|
+
|
10637
|
+
def to_map(self):
|
10638
|
+
_map = super().to_map()
|
10639
|
+
if _map is not None:
|
10640
|
+
return _map
|
10641
|
+
|
10642
|
+
result = dict()
|
10643
|
+
if self.headers is not None:
|
10644
|
+
result['headers'] = self.headers
|
10645
|
+
if self.status_code is not None:
|
10646
|
+
result['statusCode'] = self.status_code
|
10647
|
+
if self.body is not None:
|
10648
|
+
result['body'] = self.body.to_map()
|
10649
|
+
return result
|
10650
|
+
|
10651
|
+
def from_map(self, m: dict = None):
|
10652
|
+
m = m or dict()
|
10653
|
+
if m.get('headers') is not None:
|
10654
|
+
self.headers = m.get('headers')
|
10655
|
+
if m.get('statusCode') is not None:
|
10656
|
+
self.status_code = m.get('statusCode')
|
10657
|
+
if m.get('body') is not None:
|
10658
|
+
temp_model = ListScalingConfigsOutput()
|
10659
|
+
self.body = temp_model.from_map(m['body'])
|
10660
|
+
return self
|
10661
|
+
|
10662
|
+
|
9230
10663
|
class ListTagResourcesRequestTag(TeaModel):
|
9231
10664
|
def __init__(
|
9232
10665
|
self,
|
@@ -9933,6 +11366,82 @@ class PutProvisionConfigResponse(TeaModel):
|
|
9933
11366
|
return self
|
9934
11367
|
|
9935
11368
|
|
11369
|
+
class PutScalingConfigRequest(TeaModel):
|
11370
|
+
def __init__(
|
11371
|
+
self,
|
11372
|
+
body: PutScalingConfigInput = None,
|
11373
|
+
qualifier: str = None,
|
11374
|
+
):
|
11375
|
+
self.body = body
|
11376
|
+
self.qualifier = qualifier
|
11377
|
+
|
11378
|
+
def validate(self):
|
11379
|
+
if self.body:
|
11380
|
+
self.body.validate()
|
11381
|
+
|
11382
|
+
def to_map(self):
|
11383
|
+
_map = super().to_map()
|
11384
|
+
if _map is not None:
|
11385
|
+
return _map
|
11386
|
+
|
11387
|
+
result = dict()
|
11388
|
+
if self.body is not None:
|
11389
|
+
result['body'] = self.body.to_map()
|
11390
|
+
if self.qualifier is not None:
|
11391
|
+
result['qualifier'] = self.qualifier
|
11392
|
+
return result
|
11393
|
+
|
11394
|
+
def from_map(self, m: dict = None):
|
11395
|
+
m = m or dict()
|
11396
|
+
if m.get('body') is not None:
|
11397
|
+
temp_model = PutScalingConfigInput()
|
11398
|
+
self.body = temp_model.from_map(m['body'])
|
11399
|
+
if m.get('qualifier') is not None:
|
11400
|
+
self.qualifier = m.get('qualifier')
|
11401
|
+
return self
|
11402
|
+
|
11403
|
+
|
11404
|
+
class PutScalingConfigResponse(TeaModel):
|
11405
|
+
def __init__(
|
11406
|
+
self,
|
11407
|
+
headers: Dict[str, str] = None,
|
11408
|
+
status_code: int = None,
|
11409
|
+
body: ScalingConfigStatus = None,
|
11410
|
+
):
|
11411
|
+
self.headers = headers
|
11412
|
+
self.status_code = status_code
|
11413
|
+
self.body = body
|
11414
|
+
|
11415
|
+
def validate(self):
|
11416
|
+
if self.body:
|
11417
|
+
self.body.validate()
|
11418
|
+
|
11419
|
+
def to_map(self):
|
11420
|
+
_map = super().to_map()
|
11421
|
+
if _map is not None:
|
11422
|
+
return _map
|
11423
|
+
|
11424
|
+
result = dict()
|
11425
|
+
if self.headers is not None:
|
11426
|
+
result['headers'] = self.headers
|
11427
|
+
if self.status_code is not None:
|
11428
|
+
result['statusCode'] = self.status_code
|
11429
|
+
if self.body is not None:
|
11430
|
+
result['body'] = self.body.to_map()
|
11431
|
+
return result
|
11432
|
+
|
11433
|
+
def from_map(self, m: dict = None):
|
11434
|
+
m = m or dict()
|
11435
|
+
if m.get('headers') is not None:
|
11436
|
+
self.headers = m.get('headers')
|
11437
|
+
if m.get('statusCode') is not None:
|
11438
|
+
self.status_code = m.get('statusCode')
|
11439
|
+
if m.get('body') is not None:
|
11440
|
+
temp_model = ScalingConfigStatus()
|
11441
|
+
self.body = temp_model.from_map(m['body'])
|
11442
|
+
return self
|
11443
|
+
|
11444
|
+
|
9936
11445
|
class StopAsyncTaskRequest(TeaModel):
|
9937
11446
|
def __init__(
|
9938
11447
|
self,
|