alibabacloud-sls20201230 5.6.0__py3-none-any.whl → 5.7.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- alibabacloud_sls20201230/__init__.py +1 -1
- alibabacloud_sls20201230/client.py +2285 -246
- alibabacloud_sls20201230/models.py +1917 -171
- {alibabacloud_sls20201230-5.6.0.dist-info → alibabacloud_sls20201230-5.7.1.dist-info}/METADATA +2 -2
- alibabacloud_sls20201230-5.7.1.dist-info/RECORD +8 -0
- alibabacloud_sls20201230-5.6.0.dist-info/RECORD +0 -8
- {alibabacloud_sls20201230-5.6.0.dist-info → alibabacloud_sls20201230-5.7.1.dist-info}/LICENSE +0 -0
- {alibabacloud_sls20201230-5.6.0.dist-info → alibabacloud_sls20201230-5.7.1.dist-info}/WHEEL +0 -0
- {alibabacloud_sls20201230-5.6.0.dist-info → alibabacloud_sls20201230-5.7.1.dist-info}/top_level.txt +0 -0
@@ -973,6 +973,176 @@ class ConsumerGroup(TeaModel):
|
|
973
973
|
return self
|
974
974
|
|
975
975
|
|
976
|
+
class CopilotActionParameters(TeaModel):
|
977
|
+
def __init__(
|
978
|
+
self,
|
979
|
+
name: str = None,
|
980
|
+
prompt: str = None,
|
981
|
+
required: str = None,
|
982
|
+
type: str = None,
|
983
|
+
):
|
984
|
+
self.name = name
|
985
|
+
self.prompt = prompt
|
986
|
+
self.required = required
|
987
|
+
self.type = type
|
988
|
+
|
989
|
+
def validate(self):
|
990
|
+
pass
|
991
|
+
|
992
|
+
def to_map(self):
|
993
|
+
_map = super().to_map()
|
994
|
+
if _map is not None:
|
995
|
+
return _map
|
996
|
+
|
997
|
+
result = dict()
|
998
|
+
if self.name is not None:
|
999
|
+
result['name'] = self.name
|
1000
|
+
if self.prompt is not None:
|
1001
|
+
result['prompt'] = self.prompt
|
1002
|
+
if self.required is not None:
|
1003
|
+
result['required'] = self.required
|
1004
|
+
if self.type is not None:
|
1005
|
+
result['type'] = self.type
|
1006
|
+
return result
|
1007
|
+
|
1008
|
+
def from_map(self, m: dict = None):
|
1009
|
+
m = m or dict()
|
1010
|
+
if m.get('name') is not None:
|
1011
|
+
self.name = m.get('name')
|
1012
|
+
if m.get('prompt') is not None:
|
1013
|
+
self.prompt = m.get('prompt')
|
1014
|
+
if m.get('required') is not None:
|
1015
|
+
self.required = m.get('required')
|
1016
|
+
if m.get('type') is not None:
|
1017
|
+
self.type = m.get('type')
|
1018
|
+
return self
|
1019
|
+
|
1020
|
+
|
1021
|
+
class CopilotActionQueryTemplateParameters(TeaModel):
|
1022
|
+
def __init__(
|
1023
|
+
self,
|
1024
|
+
name: str = None,
|
1025
|
+
prompt: str = None,
|
1026
|
+
required: str = None,
|
1027
|
+
type: str = None,
|
1028
|
+
):
|
1029
|
+
self.name = name
|
1030
|
+
self.prompt = prompt
|
1031
|
+
self.required = required
|
1032
|
+
self.type = type
|
1033
|
+
|
1034
|
+
def validate(self):
|
1035
|
+
pass
|
1036
|
+
|
1037
|
+
def to_map(self):
|
1038
|
+
_map = super().to_map()
|
1039
|
+
if _map is not None:
|
1040
|
+
return _map
|
1041
|
+
|
1042
|
+
result = dict()
|
1043
|
+
if self.name is not None:
|
1044
|
+
result['name'] = self.name
|
1045
|
+
if self.prompt is not None:
|
1046
|
+
result['prompt'] = self.prompt
|
1047
|
+
if self.required is not None:
|
1048
|
+
result['required'] = self.required
|
1049
|
+
if self.type is not None:
|
1050
|
+
result['type'] = self.type
|
1051
|
+
return result
|
1052
|
+
|
1053
|
+
def from_map(self, m: dict = None):
|
1054
|
+
m = m or dict()
|
1055
|
+
if m.get('name') is not None:
|
1056
|
+
self.name = m.get('name')
|
1057
|
+
if m.get('prompt') is not None:
|
1058
|
+
self.prompt = m.get('prompt')
|
1059
|
+
if m.get('required') is not None:
|
1060
|
+
self.required = m.get('required')
|
1061
|
+
if m.get('type') is not None:
|
1062
|
+
self.type = m.get('type')
|
1063
|
+
return self
|
1064
|
+
|
1065
|
+
|
1066
|
+
class CopilotAction(TeaModel):
|
1067
|
+
def __init__(
|
1068
|
+
self,
|
1069
|
+
action: str = None,
|
1070
|
+
description: str = None,
|
1071
|
+
name: str = None,
|
1072
|
+
parameters: List[CopilotActionParameters] = None,
|
1073
|
+
query_template: str = None,
|
1074
|
+
query_template_parameters: List[CopilotActionQueryTemplateParameters] = None,
|
1075
|
+
scene: str = None,
|
1076
|
+
):
|
1077
|
+
self.action = action
|
1078
|
+
self.description = description
|
1079
|
+
self.name = name
|
1080
|
+
self.parameters = parameters
|
1081
|
+
self.query_template = query_template
|
1082
|
+
self.query_template_parameters = query_template_parameters
|
1083
|
+
self.scene = scene
|
1084
|
+
|
1085
|
+
def validate(self):
|
1086
|
+
if self.parameters:
|
1087
|
+
for k in self.parameters:
|
1088
|
+
if k:
|
1089
|
+
k.validate()
|
1090
|
+
if self.query_template_parameters:
|
1091
|
+
for k in self.query_template_parameters:
|
1092
|
+
if k:
|
1093
|
+
k.validate()
|
1094
|
+
|
1095
|
+
def to_map(self):
|
1096
|
+
_map = super().to_map()
|
1097
|
+
if _map is not None:
|
1098
|
+
return _map
|
1099
|
+
|
1100
|
+
result = dict()
|
1101
|
+
if self.action is not None:
|
1102
|
+
result['action'] = self.action
|
1103
|
+
if self.description is not None:
|
1104
|
+
result['description'] = self.description
|
1105
|
+
if self.name is not None:
|
1106
|
+
result['name'] = self.name
|
1107
|
+
result['parameters'] = []
|
1108
|
+
if self.parameters is not None:
|
1109
|
+
for k in self.parameters:
|
1110
|
+
result['parameters'].append(k.to_map() if k else None)
|
1111
|
+
if self.query_template is not None:
|
1112
|
+
result['queryTemplate'] = self.query_template
|
1113
|
+
result['queryTemplateParameters'] = []
|
1114
|
+
if self.query_template_parameters is not None:
|
1115
|
+
for k in self.query_template_parameters:
|
1116
|
+
result['queryTemplateParameters'].append(k.to_map() if k else None)
|
1117
|
+
if self.scene is not None:
|
1118
|
+
result['scene'] = self.scene
|
1119
|
+
return result
|
1120
|
+
|
1121
|
+
def from_map(self, m: dict = None):
|
1122
|
+
m = m or dict()
|
1123
|
+
if m.get('action') is not None:
|
1124
|
+
self.action = m.get('action')
|
1125
|
+
if m.get('description') is not None:
|
1126
|
+
self.description = m.get('description')
|
1127
|
+
if m.get('name') is not None:
|
1128
|
+
self.name = m.get('name')
|
1129
|
+
self.parameters = []
|
1130
|
+
if m.get('parameters') is not None:
|
1131
|
+
for k in m.get('parameters'):
|
1132
|
+
temp_model = CopilotActionParameters()
|
1133
|
+
self.parameters.append(temp_model.from_map(k))
|
1134
|
+
if m.get('queryTemplate') is not None:
|
1135
|
+
self.query_template = m.get('queryTemplate')
|
1136
|
+
self.query_template_parameters = []
|
1137
|
+
if m.get('queryTemplateParameters') is not None:
|
1138
|
+
for k in m.get('queryTemplateParameters'):
|
1139
|
+
temp_model = CopilotActionQueryTemplateParameters()
|
1140
|
+
self.query_template_parameters.append(temp_model.from_map(k))
|
1141
|
+
if m.get('scene') is not None:
|
1142
|
+
self.scene = m.get('scene')
|
1143
|
+
return self
|
1144
|
+
|
1145
|
+
|
976
1146
|
class ETLConfigurationSink(TeaModel):
|
977
1147
|
def __init__(
|
978
1148
|
self,
|
@@ -2544,7 +2714,9 @@ class MLServiceParam(TeaModel):
|
|
2544
2714
|
class MaxComputeExportConfigurationSink(TeaModel):
|
2545
2715
|
def __init__(
|
2546
2716
|
self,
|
2717
|
+
buffer_interval: str = None,
|
2547
2718
|
fields: List[str] = None,
|
2719
|
+
filter_invalid: bool = None,
|
2548
2720
|
odps_access_key_id: str = None,
|
2549
2721
|
odps_access_secret: str = None,
|
2550
2722
|
odps_endpoint: str = None,
|
@@ -2554,10 +2726,13 @@ class MaxComputeExportConfigurationSink(TeaModel):
|
|
2554
2726
|
odps_tunnel_endpoint: str = None,
|
2555
2727
|
partition_column: List[str] = None,
|
2556
2728
|
partition_time_format: str = None,
|
2729
|
+
time_format_type: str = None,
|
2557
2730
|
time_zone: str = None,
|
2558
2731
|
):
|
2732
|
+
self.buffer_interval = buffer_interval
|
2559
2733
|
# This parameter is required.
|
2560
2734
|
self.fields = fields
|
2735
|
+
self.filter_invalid = filter_invalid
|
2561
2736
|
self.odps_access_key_id = odps_access_key_id
|
2562
2737
|
self.odps_access_secret = odps_access_secret
|
2563
2738
|
# This parameter is required.
|
@@ -2568,12 +2743,12 @@ class MaxComputeExportConfigurationSink(TeaModel):
|
|
2568
2743
|
self.odps_rolearn = odps_rolearn
|
2569
2744
|
# This parameter is required.
|
2570
2745
|
self.odps_table = odps_table
|
2571
|
-
# This parameter is required.
|
2572
2746
|
self.odps_tunnel_endpoint = odps_tunnel_endpoint
|
2573
2747
|
# This parameter is required.
|
2574
2748
|
self.partition_column = partition_column
|
2575
2749
|
# This parameter is required.
|
2576
2750
|
self.partition_time_format = partition_time_format
|
2751
|
+
self.time_format_type = time_format_type
|
2577
2752
|
# This parameter is required.
|
2578
2753
|
self.time_zone = time_zone
|
2579
2754
|
|
@@ -2586,8 +2761,12 @@ class MaxComputeExportConfigurationSink(TeaModel):
|
|
2586
2761
|
return _map
|
2587
2762
|
|
2588
2763
|
result = dict()
|
2764
|
+
if self.buffer_interval is not None:
|
2765
|
+
result['bufferInterval'] = self.buffer_interval
|
2589
2766
|
if self.fields is not None:
|
2590
2767
|
result['fields'] = self.fields
|
2768
|
+
if self.filter_invalid is not None:
|
2769
|
+
result['filterInvalid'] = self.filter_invalid
|
2591
2770
|
if self.odps_access_key_id is not None:
|
2592
2771
|
result['odpsAccessKeyId'] = self.odps_access_key_id
|
2593
2772
|
if self.odps_access_secret is not None:
|
@@ -2606,14 +2785,20 @@ class MaxComputeExportConfigurationSink(TeaModel):
|
|
2606
2785
|
result['partitionColumn'] = self.partition_column
|
2607
2786
|
if self.partition_time_format is not None:
|
2608
2787
|
result['partitionTimeFormat'] = self.partition_time_format
|
2788
|
+
if self.time_format_type is not None:
|
2789
|
+
result['timeFormatType'] = self.time_format_type
|
2609
2790
|
if self.time_zone is not None:
|
2610
2791
|
result['timeZone'] = self.time_zone
|
2611
2792
|
return result
|
2612
2793
|
|
2613
2794
|
def from_map(self, m: dict = None):
|
2614
2795
|
m = m or dict()
|
2796
|
+
if m.get('bufferInterval') is not None:
|
2797
|
+
self.buffer_interval = m.get('bufferInterval')
|
2615
2798
|
if m.get('fields') is not None:
|
2616
2799
|
self.fields = m.get('fields')
|
2800
|
+
if m.get('filterInvalid') is not None:
|
2801
|
+
self.filter_invalid = m.get('filterInvalid')
|
2617
2802
|
if m.get('odpsAccessKeyId') is not None:
|
2618
2803
|
self.odps_access_key_id = m.get('odpsAccessKeyId')
|
2619
2804
|
if m.get('odpsAccessSecret') is not None:
|
@@ -2632,6 +2817,8 @@ class MaxComputeExportConfigurationSink(TeaModel):
|
|
2632
2817
|
self.partition_column = m.get('partitionColumn')
|
2633
2818
|
if m.get('partitionTimeFormat') is not None:
|
2634
2819
|
self.partition_time_format = m.get('partitionTimeFormat')
|
2820
|
+
if m.get('timeFormatType') is not None:
|
2821
|
+
self.time_format_type = m.get('timeFormatType')
|
2635
2822
|
if m.get('timeZone') is not None:
|
2636
2823
|
self.time_zone = m.get('timeZone')
|
2637
2824
|
return self
|
@@ -3026,6 +3213,7 @@ class OSSIngestionConfigurationSource(TeaModel):
|
|
3026
3213
|
restore_object_enabled: bool = None,
|
3027
3214
|
role_arn: str = None,
|
3028
3215
|
start_time: int = None,
|
3216
|
+
tag_pack_id: bool = None,
|
3029
3217
|
time_field: str = None,
|
3030
3218
|
time_format: str = None,
|
3031
3219
|
time_pattern: str = None,
|
@@ -3050,6 +3238,7 @@ class OSSIngestionConfigurationSource(TeaModel):
|
|
3050
3238
|
self.restore_object_enabled = restore_object_enabled
|
3051
3239
|
self.role_arn = role_arn
|
3052
3240
|
self.start_time = start_time
|
3241
|
+
self.tag_pack_id = tag_pack_id
|
3053
3242
|
self.time_field = time_field
|
3054
3243
|
self.time_format = time_format
|
3055
3244
|
self.time_pattern = time_pattern
|
@@ -3090,6 +3279,8 @@ class OSSIngestionConfigurationSource(TeaModel):
|
|
3090
3279
|
result['roleARN'] = self.role_arn
|
3091
3280
|
if self.start_time is not None:
|
3092
3281
|
result['startTime'] = self.start_time
|
3282
|
+
if self.tag_pack_id is not None:
|
3283
|
+
result['tagPackId'] = self.tag_pack_id
|
3093
3284
|
if self.time_field is not None:
|
3094
3285
|
result['timeField'] = self.time_field
|
3095
3286
|
if self.time_format is not None:
|
@@ -3128,6 +3319,8 @@ class OSSIngestionConfigurationSource(TeaModel):
|
|
3128
3319
|
self.role_arn = m.get('roleARN')
|
3129
3320
|
if m.get('startTime') is not None:
|
3130
3321
|
self.start_time = m.get('startTime')
|
3322
|
+
if m.get('tagPackId') is not None:
|
3323
|
+
self.tag_pack_id = m.get('tagPackId')
|
3131
3324
|
if m.get('timeField') is not None:
|
3132
3325
|
self.time_field = m.get('timeField')
|
3133
3326
|
if m.get('timeFormat') is not None:
|
@@ -3353,24 +3546,58 @@ class ProjectSummary(TeaModel):
|
|
3353
3546
|
return self
|
3354
3547
|
|
3355
3548
|
|
3356
|
-
class
|
3549
|
+
class S3IngestionConfigurationSource(TeaModel):
|
3357
3550
|
def __init__(
|
3358
3551
|
self,
|
3359
|
-
|
3360
|
-
|
3361
|
-
|
3362
|
-
|
3363
|
-
|
3552
|
+
aws_access_key: str = None,
|
3553
|
+
aws_access_key_secret: str = None,
|
3554
|
+
aws_region: str = None,
|
3555
|
+
aws_sqsqueue_url: str = None,
|
3556
|
+
aws_use_sqs: bool = None,
|
3557
|
+
bucket: str = None,
|
3558
|
+
compression_codec: str = None,
|
3559
|
+
encoding: str = None,
|
3560
|
+
end_time: int = None,
|
3561
|
+
format: Dict[str, Any] = None,
|
3562
|
+
interval: str = None,
|
3563
|
+
pattern: str = None,
|
3564
|
+
prefix: str = None,
|
3565
|
+
start_time: int = None,
|
3566
|
+
tag_pack_id: bool = None,
|
3567
|
+
time_field: str = None,
|
3568
|
+
time_format: str = None,
|
3569
|
+
time_pattern: str = None,
|
3570
|
+
time_zone: str = None,
|
3571
|
+
use_aws_sqsonly: bool = None,
|
3364
3572
|
):
|
3365
3573
|
# This parameter is required.
|
3366
|
-
self.
|
3574
|
+
self.aws_access_key = aws_access_key
|
3367
3575
|
# This parameter is required.
|
3368
|
-
self.
|
3576
|
+
self.aws_access_key_secret = aws_access_key_secret
|
3369
3577
|
# This parameter is required.
|
3370
|
-
self.
|
3578
|
+
self.aws_region = aws_region
|
3579
|
+
self.aws_sqsqueue_url = aws_sqsqueue_url
|
3580
|
+
self.aws_use_sqs = aws_use_sqs
|
3371
3581
|
# This parameter is required.
|
3372
|
-
self.
|
3373
|
-
|
3582
|
+
self.bucket = bucket
|
3583
|
+
# This parameter is required.
|
3584
|
+
self.compression_codec = compression_codec
|
3585
|
+
# This parameter is required.
|
3586
|
+
self.encoding = encoding
|
3587
|
+
self.end_time = end_time
|
3588
|
+
# This parameter is required.
|
3589
|
+
self.format = format
|
3590
|
+
# This parameter is required.
|
3591
|
+
self.interval = interval
|
3592
|
+
self.pattern = pattern
|
3593
|
+
self.prefix = prefix
|
3594
|
+
self.start_time = start_time
|
3595
|
+
self.tag_pack_id = tag_pack_id
|
3596
|
+
self.time_field = time_field
|
3597
|
+
self.time_format = time_format
|
3598
|
+
self.time_pattern = time_pattern
|
3599
|
+
self.time_zone = time_zone
|
3600
|
+
self.use_aws_sqsonly = use_aws_sqsonly
|
3374
3601
|
|
3375
3602
|
def validate(self):
|
3376
3603
|
pass
|
@@ -3381,58 +3608,292 @@ class SavedSearch(TeaModel):
|
|
3381
3608
|
return _map
|
3382
3609
|
|
3383
3610
|
result = dict()
|
3384
|
-
if self.
|
3385
|
-
result['
|
3386
|
-
if self.
|
3387
|
-
result['
|
3388
|
-
if self.
|
3389
|
-
result['
|
3390
|
-
if self.
|
3391
|
-
result['
|
3392
|
-
if self.
|
3393
|
-
result['
|
3611
|
+
if self.aws_access_key is not None:
|
3612
|
+
result['awsAccessKey'] = self.aws_access_key
|
3613
|
+
if self.aws_access_key_secret is not None:
|
3614
|
+
result['awsAccessKeySecret'] = self.aws_access_key_secret
|
3615
|
+
if self.aws_region is not None:
|
3616
|
+
result['awsRegion'] = self.aws_region
|
3617
|
+
if self.aws_sqsqueue_url is not None:
|
3618
|
+
result['awsSQSQueueUrl'] = self.aws_sqsqueue_url
|
3619
|
+
if self.aws_use_sqs is not None:
|
3620
|
+
result['awsUseSQS'] = self.aws_use_sqs
|
3621
|
+
if self.bucket is not None:
|
3622
|
+
result['bucket'] = self.bucket
|
3623
|
+
if self.compression_codec is not None:
|
3624
|
+
result['compressionCodec'] = self.compression_codec
|
3625
|
+
if self.encoding is not None:
|
3626
|
+
result['encoding'] = self.encoding
|
3627
|
+
if self.end_time is not None:
|
3628
|
+
result['endTime'] = self.end_time
|
3629
|
+
if self.format is not None:
|
3630
|
+
result['format'] = self.format
|
3631
|
+
if self.interval is not None:
|
3632
|
+
result['interval'] = self.interval
|
3633
|
+
if self.pattern is not None:
|
3634
|
+
result['pattern'] = self.pattern
|
3635
|
+
if self.prefix is not None:
|
3636
|
+
result['prefix'] = self.prefix
|
3637
|
+
if self.start_time is not None:
|
3638
|
+
result['startTime'] = self.start_time
|
3639
|
+
if self.tag_pack_id is not None:
|
3640
|
+
result['tagPackId'] = self.tag_pack_id
|
3641
|
+
if self.time_field is not None:
|
3642
|
+
result['timeField'] = self.time_field
|
3643
|
+
if self.time_format is not None:
|
3644
|
+
result['timeFormat'] = self.time_format
|
3645
|
+
if self.time_pattern is not None:
|
3646
|
+
result['timePattern'] = self.time_pattern
|
3647
|
+
if self.time_zone is not None:
|
3648
|
+
result['timeZone'] = self.time_zone
|
3649
|
+
if self.use_aws_sqsonly is not None:
|
3650
|
+
result['useAwsSQSOnly'] = self.use_aws_sqsonly
|
3394
3651
|
return result
|
3395
3652
|
|
3396
3653
|
def from_map(self, m: dict = None):
|
3397
3654
|
m = m or dict()
|
3398
|
-
if m.get('
|
3399
|
-
self.
|
3400
|
-
if m.get('
|
3401
|
-
self.
|
3402
|
-
if m.get('
|
3403
|
-
self.
|
3404
|
-
if m.get('
|
3405
|
-
self.
|
3406
|
-
if m.get('
|
3407
|
-
self.
|
3655
|
+
if m.get('awsAccessKey') is not None:
|
3656
|
+
self.aws_access_key = m.get('awsAccessKey')
|
3657
|
+
if m.get('awsAccessKeySecret') is not None:
|
3658
|
+
self.aws_access_key_secret = m.get('awsAccessKeySecret')
|
3659
|
+
if m.get('awsRegion') is not None:
|
3660
|
+
self.aws_region = m.get('awsRegion')
|
3661
|
+
if m.get('awsSQSQueueUrl') is not None:
|
3662
|
+
self.aws_sqsqueue_url = m.get('awsSQSQueueUrl')
|
3663
|
+
if m.get('awsUseSQS') is not None:
|
3664
|
+
self.aws_use_sqs = m.get('awsUseSQS')
|
3665
|
+
if m.get('bucket') is not None:
|
3666
|
+
self.bucket = m.get('bucket')
|
3667
|
+
if m.get('compressionCodec') is not None:
|
3668
|
+
self.compression_codec = m.get('compressionCodec')
|
3669
|
+
if m.get('encoding') is not None:
|
3670
|
+
self.encoding = m.get('encoding')
|
3671
|
+
if m.get('endTime') is not None:
|
3672
|
+
self.end_time = m.get('endTime')
|
3673
|
+
if m.get('format') is not None:
|
3674
|
+
self.format = m.get('format')
|
3675
|
+
if m.get('interval') is not None:
|
3676
|
+
self.interval = m.get('interval')
|
3677
|
+
if m.get('pattern') is not None:
|
3678
|
+
self.pattern = m.get('pattern')
|
3679
|
+
if m.get('prefix') is not None:
|
3680
|
+
self.prefix = m.get('prefix')
|
3681
|
+
if m.get('startTime') is not None:
|
3682
|
+
self.start_time = m.get('startTime')
|
3683
|
+
if m.get('tagPackId') is not None:
|
3684
|
+
self.tag_pack_id = m.get('tagPackId')
|
3685
|
+
if m.get('timeField') is not None:
|
3686
|
+
self.time_field = m.get('timeField')
|
3687
|
+
if m.get('timeFormat') is not None:
|
3688
|
+
self.time_format = m.get('timeFormat')
|
3689
|
+
if m.get('timePattern') is not None:
|
3690
|
+
self.time_pattern = m.get('timePattern')
|
3691
|
+
if m.get('timeZone') is not None:
|
3692
|
+
self.time_zone = m.get('timeZone')
|
3693
|
+
if m.get('useAwsSQSOnly') is not None:
|
3694
|
+
self.use_aws_sqsonly = m.get('useAwsSQSOnly')
|
3408
3695
|
return self
|
3409
3696
|
|
3410
3697
|
|
3411
|
-
class
|
3698
|
+
class S3Ingestion(TeaModel):
|
3412
3699
|
def __init__(
|
3413
3700
|
self,
|
3414
|
-
|
3415
|
-
|
3416
|
-
|
3417
|
-
|
3418
|
-
|
3419
|
-
|
3420
|
-
|
3421
|
-
|
3422
|
-
|
3423
|
-
parameters: Dict[str, Any] = None,
|
3424
|
-
resource_pool: str = None,
|
3425
|
-
role_arn: str = None,
|
3426
|
-
script: str = None,
|
3427
|
-
source_logstore: str = None,
|
3428
|
-
sql_type: str = None,
|
3429
|
-
to_time: int = None,
|
3430
|
-
to_time_expr: str = None,
|
3701
|
+
configuration: S3IngestionConfigurationSource = None,
|
3702
|
+
create_time: int = None,
|
3703
|
+
description: str = None,
|
3704
|
+
display_name: str = None,
|
3705
|
+
last_modified_time: int = None,
|
3706
|
+
name: str = None,
|
3707
|
+
schedule: Schedule = None,
|
3708
|
+
schedule_id: str = None,
|
3709
|
+
status: str = None,
|
3431
3710
|
):
|
3432
3711
|
# This parameter is required.
|
3433
|
-
self.
|
3434
|
-
|
3435
|
-
self.
|
3712
|
+
self.configuration = configuration
|
3713
|
+
self.create_time = create_time
|
3714
|
+
self.description = description
|
3715
|
+
# This parameter is required.
|
3716
|
+
self.display_name = display_name
|
3717
|
+
self.last_modified_time = last_modified_time
|
3718
|
+
# This parameter is required.
|
3719
|
+
self.name = name
|
3720
|
+
# This parameter is required.
|
3721
|
+
self.schedule = schedule
|
3722
|
+
self.schedule_id = schedule_id
|
3723
|
+
self.status = status
|
3724
|
+
|
3725
|
+
def validate(self):
|
3726
|
+
if self.configuration:
|
3727
|
+
self.configuration.validate()
|
3728
|
+
if self.schedule:
|
3729
|
+
self.schedule.validate()
|
3730
|
+
|
3731
|
+
def to_map(self):
|
3732
|
+
_map = super().to_map()
|
3733
|
+
if _map is not None:
|
3734
|
+
return _map
|
3735
|
+
|
3736
|
+
result = dict()
|
3737
|
+
if self.configuration is not None:
|
3738
|
+
result['configuration'] = self.configuration.to_map()
|
3739
|
+
if self.create_time is not None:
|
3740
|
+
result['createTime'] = self.create_time
|
3741
|
+
if self.description is not None:
|
3742
|
+
result['description'] = self.description
|
3743
|
+
if self.display_name is not None:
|
3744
|
+
result['displayName'] = self.display_name
|
3745
|
+
if self.last_modified_time is not None:
|
3746
|
+
result['lastModifiedTime'] = self.last_modified_time
|
3747
|
+
if self.name is not None:
|
3748
|
+
result['name'] = self.name
|
3749
|
+
if self.schedule is not None:
|
3750
|
+
result['schedule'] = self.schedule.to_map()
|
3751
|
+
if self.schedule_id is not None:
|
3752
|
+
result['scheduleId'] = self.schedule_id
|
3753
|
+
if self.status is not None:
|
3754
|
+
result['status'] = self.status
|
3755
|
+
return result
|
3756
|
+
|
3757
|
+
def from_map(self, m: dict = None):
|
3758
|
+
m = m or dict()
|
3759
|
+
if m.get('configuration') is not None:
|
3760
|
+
temp_model = S3IngestionConfigurationSource()
|
3761
|
+
self.configuration = temp_model.from_map(m['configuration'])
|
3762
|
+
if m.get('createTime') is not None:
|
3763
|
+
self.create_time = m.get('createTime')
|
3764
|
+
if m.get('description') is not None:
|
3765
|
+
self.description = m.get('description')
|
3766
|
+
if m.get('displayName') is not None:
|
3767
|
+
self.display_name = m.get('displayName')
|
3768
|
+
if m.get('lastModifiedTime') is not None:
|
3769
|
+
self.last_modified_time = m.get('lastModifiedTime')
|
3770
|
+
if m.get('name') is not None:
|
3771
|
+
self.name = m.get('name')
|
3772
|
+
if m.get('schedule') is not None:
|
3773
|
+
temp_model = Schedule()
|
3774
|
+
self.schedule = temp_model.from_map(m['schedule'])
|
3775
|
+
if m.get('scheduleId') is not None:
|
3776
|
+
self.schedule_id = m.get('scheduleId')
|
3777
|
+
if m.get('status') is not None:
|
3778
|
+
self.status = m.get('status')
|
3779
|
+
return self
|
3780
|
+
|
3781
|
+
|
3782
|
+
class S3IngestionConfiguration(TeaModel):
|
3783
|
+
def __init__(
|
3784
|
+
self,
|
3785
|
+
logstore: str = None,
|
3786
|
+
source: S3IngestionConfigurationSource = None,
|
3787
|
+
):
|
3788
|
+
self.logstore = logstore
|
3789
|
+
self.source = source
|
3790
|
+
|
3791
|
+
def validate(self):
|
3792
|
+
if self.source:
|
3793
|
+
self.source.validate()
|
3794
|
+
|
3795
|
+
def to_map(self):
|
3796
|
+
_map = super().to_map()
|
3797
|
+
if _map is not None:
|
3798
|
+
return _map
|
3799
|
+
|
3800
|
+
result = dict()
|
3801
|
+
if self.logstore is not None:
|
3802
|
+
result['logstore'] = self.logstore
|
3803
|
+
if self.source is not None:
|
3804
|
+
result['source'] = self.source.to_map()
|
3805
|
+
return result
|
3806
|
+
|
3807
|
+
def from_map(self, m: dict = None):
|
3808
|
+
m = m or dict()
|
3809
|
+
if m.get('logstore') is not None:
|
3810
|
+
self.logstore = m.get('logstore')
|
3811
|
+
if m.get('source') is not None:
|
3812
|
+
temp_model = S3IngestionConfigurationSource()
|
3813
|
+
self.source = temp_model.from_map(m['source'])
|
3814
|
+
return self
|
3815
|
+
|
3816
|
+
|
3817
|
+
class SavedSearch(TeaModel):
|
3818
|
+
def __init__(
|
3819
|
+
self,
|
3820
|
+
display_name: str = None,
|
3821
|
+
logstore: str = None,
|
3822
|
+
savedsearch_name: str = None,
|
3823
|
+
search_query: str = None,
|
3824
|
+
topic: str = None,
|
3825
|
+
):
|
3826
|
+
# This parameter is required.
|
3827
|
+
self.display_name = display_name
|
3828
|
+
# This parameter is required.
|
3829
|
+
self.logstore = logstore
|
3830
|
+
# This parameter is required.
|
3831
|
+
self.savedsearch_name = savedsearch_name
|
3832
|
+
# This parameter is required.
|
3833
|
+
self.search_query = search_query
|
3834
|
+
self.topic = topic
|
3835
|
+
|
3836
|
+
def validate(self):
|
3837
|
+
pass
|
3838
|
+
|
3839
|
+
def to_map(self):
|
3840
|
+
_map = super().to_map()
|
3841
|
+
if _map is not None:
|
3842
|
+
return _map
|
3843
|
+
|
3844
|
+
result = dict()
|
3845
|
+
if self.display_name is not None:
|
3846
|
+
result['displayName'] = self.display_name
|
3847
|
+
if self.logstore is not None:
|
3848
|
+
result['logstore'] = self.logstore
|
3849
|
+
if self.savedsearch_name is not None:
|
3850
|
+
result['savedsearchName'] = self.savedsearch_name
|
3851
|
+
if self.search_query is not None:
|
3852
|
+
result['searchQuery'] = self.search_query
|
3853
|
+
if self.topic is not None:
|
3854
|
+
result['topic'] = self.topic
|
3855
|
+
return result
|
3856
|
+
|
3857
|
+
def from_map(self, m: dict = None):
|
3858
|
+
m = m or dict()
|
3859
|
+
if m.get('displayName') is not None:
|
3860
|
+
self.display_name = m.get('displayName')
|
3861
|
+
if m.get('logstore') is not None:
|
3862
|
+
self.logstore = m.get('logstore')
|
3863
|
+
if m.get('savedsearchName') is not None:
|
3864
|
+
self.savedsearch_name = m.get('savedsearchName')
|
3865
|
+
if m.get('searchQuery') is not None:
|
3866
|
+
self.search_query = m.get('searchQuery')
|
3867
|
+
if m.get('topic') is not None:
|
3868
|
+
self.topic = m.get('topic')
|
3869
|
+
return self
|
3870
|
+
|
3871
|
+
|
3872
|
+
class ScheduledSQLConfiguration(TeaModel):
|
3873
|
+
def __init__(
|
3874
|
+
self,
|
3875
|
+
data_format: str = None,
|
3876
|
+
dest_endpoint: str = None,
|
3877
|
+
dest_logstore: str = None,
|
3878
|
+
dest_project: str = None,
|
3879
|
+
dest_role_arn: str = None,
|
3880
|
+
from_time: int = None,
|
3881
|
+
from_time_expr: str = None,
|
3882
|
+
max_retries: int = None,
|
3883
|
+
max_run_time_in_seconds: int = None,
|
3884
|
+
parameters: Dict[str, Any] = None,
|
3885
|
+
resource_pool: str = None,
|
3886
|
+
role_arn: str = None,
|
3887
|
+
script: str = None,
|
3888
|
+
source_logstore: str = None,
|
3889
|
+
sql_type: str = None,
|
3890
|
+
to_time: int = None,
|
3891
|
+
to_time_expr: str = None,
|
3892
|
+
):
|
3893
|
+
# This parameter is required.
|
3894
|
+
self.data_format = data_format
|
3895
|
+
# This parameter is required.
|
3896
|
+
self.dest_endpoint = dest_endpoint
|
3436
3897
|
# This parameter is required.
|
3437
3898
|
self.dest_logstore = dest_logstore
|
3438
3899
|
# This parameter is required.
|
@@ -4393,9 +4854,11 @@ class Project(TeaModel):
|
|
4393
4854
|
data_redundancy_type: str = None,
|
4394
4855
|
description: str = None,
|
4395
4856
|
last_modify_time: str = None,
|
4857
|
+
location: str = None,
|
4396
4858
|
owner: str = None,
|
4397
4859
|
project_name: str = None,
|
4398
4860
|
quota: Dict[str, Any] = None,
|
4861
|
+
recycle_bin_enabled: bool = None,
|
4399
4862
|
region: str = None,
|
4400
4863
|
resource_group_id: str = None,
|
4401
4864
|
status: str = None,
|
@@ -4405,10 +4868,12 @@ class Project(TeaModel):
|
|
4405
4868
|
# This parameter is required.
|
4406
4869
|
self.description = description
|
4407
4870
|
self.last_modify_time = last_modify_time
|
4871
|
+
self.location = location
|
4408
4872
|
self.owner = owner
|
4409
4873
|
# This parameter is required.
|
4410
4874
|
self.project_name = project_name
|
4411
4875
|
self.quota = quota
|
4876
|
+
self.recycle_bin_enabled = recycle_bin_enabled
|
4412
4877
|
self.region = region
|
4413
4878
|
self.resource_group_id = resource_group_id
|
4414
4879
|
self.status = status
|
@@ -4430,12 +4895,16 @@ class Project(TeaModel):
|
|
4430
4895
|
result['description'] = self.description
|
4431
4896
|
if self.last_modify_time is not None:
|
4432
4897
|
result['lastModifyTime'] = self.last_modify_time
|
4898
|
+
if self.location is not None:
|
4899
|
+
result['location'] = self.location
|
4433
4900
|
if self.owner is not None:
|
4434
4901
|
result['owner'] = self.owner
|
4435
4902
|
if self.project_name is not None:
|
4436
4903
|
result['projectName'] = self.project_name
|
4437
4904
|
if self.quota is not None:
|
4438
4905
|
result['quota'] = self.quota
|
4906
|
+
if self.recycle_bin_enabled is not None:
|
4907
|
+
result['recycleBinEnabled'] = self.recycle_bin_enabled
|
4439
4908
|
if self.region is not None:
|
4440
4909
|
result['region'] = self.region
|
4441
4910
|
if self.resource_group_id is not None:
|
@@ -4454,12 +4923,16 @@ class Project(TeaModel):
|
|
4454
4923
|
self.description = m.get('description')
|
4455
4924
|
if m.get('lastModifyTime') is not None:
|
4456
4925
|
self.last_modify_time = m.get('lastModifyTime')
|
4926
|
+
if m.get('location') is not None:
|
4927
|
+
self.location = m.get('location')
|
4457
4928
|
if m.get('owner') is not None:
|
4458
4929
|
self.owner = m.get('owner')
|
4459
4930
|
if m.get('projectName') is not None:
|
4460
4931
|
self.project_name = m.get('projectName')
|
4461
4932
|
if m.get('quota') is not None:
|
4462
4933
|
self.quota = m.get('quota')
|
4934
|
+
if m.get('recycleBinEnabled') is not None:
|
4935
|
+
self.recycle_bin_enabled = m.get('recycleBinEnabled')
|
4463
4936
|
if m.get('region') is not None:
|
4464
4937
|
self.region = m.get('region')
|
4465
4938
|
if m.get('resourceGroupId') is not None:
|
@@ -4586,6 +5059,85 @@ class ApplyConfigToMachineGroupResponse(TeaModel):
|
|
4586
5059
|
return self
|
4587
5060
|
|
4588
5061
|
|
5062
|
+
class CallAiToolsRequest(TeaModel):
|
5063
|
+
def __init__(
|
5064
|
+
self,
|
5065
|
+
params: Dict[str, str] = None,
|
5066
|
+
region_id: str = None,
|
5067
|
+
tool_name: str = None,
|
5068
|
+
):
|
5069
|
+
self.params = params
|
5070
|
+
self.region_id = region_id
|
5071
|
+
# This parameter is required.
|
5072
|
+
self.tool_name = tool_name
|
5073
|
+
|
5074
|
+
def validate(self):
|
5075
|
+
pass
|
5076
|
+
|
5077
|
+
def to_map(self):
|
5078
|
+
_map = super().to_map()
|
5079
|
+
if _map is not None:
|
5080
|
+
return _map
|
5081
|
+
|
5082
|
+
result = dict()
|
5083
|
+
if self.params is not None:
|
5084
|
+
result['params'] = self.params
|
5085
|
+
if self.region_id is not None:
|
5086
|
+
result['regionId'] = self.region_id
|
5087
|
+
if self.tool_name is not None:
|
5088
|
+
result['toolName'] = self.tool_name
|
5089
|
+
return result
|
5090
|
+
|
5091
|
+
def from_map(self, m: dict = None):
|
5092
|
+
m = m or dict()
|
5093
|
+
if m.get('params') is not None:
|
5094
|
+
self.params = m.get('params')
|
5095
|
+
if m.get('regionId') is not None:
|
5096
|
+
self.region_id = m.get('regionId')
|
5097
|
+
if m.get('toolName') is not None:
|
5098
|
+
self.tool_name = m.get('toolName')
|
5099
|
+
return self
|
5100
|
+
|
5101
|
+
|
5102
|
+
class CallAiToolsResponse(TeaModel):
|
5103
|
+
def __init__(
|
5104
|
+
self,
|
5105
|
+
headers: Dict[str, str] = None,
|
5106
|
+
status_code: int = None,
|
5107
|
+
body: str = None,
|
5108
|
+
):
|
5109
|
+
self.headers = headers
|
5110
|
+
self.status_code = status_code
|
5111
|
+
self.body = body
|
5112
|
+
|
5113
|
+
def validate(self):
|
5114
|
+
pass
|
5115
|
+
|
5116
|
+
def to_map(self):
|
5117
|
+
_map = super().to_map()
|
5118
|
+
if _map is not None:
|
5119
|
+
return _map
|
5120
|
+
|
5121
|
+
result = dict()
|
5122
|
+
if self.headers is not None:
|
5123
|
+
result['headers'] = self.headers
|
5124
|
+
if self.status_code is not None:
|
5125
|
+
result['statusCode'] = self.status_code
|
5126
|
+
if self.body is not None:
|
5127
|
+
result['body'] = self.body
|
5128
|
+
return result
|
5129
|
+
|
5130
|
+
def from_map(self, m: dict = None):
|
5131
|
+
m = m or dict()
|
5132
|
+
if m.get('headers') is not None:
|
5133
|
+
self.headers = m.get('headers')
|
5134
|
+
if m.get('statusCode') is not None:
|
5135
|
+
self.status_code = m.get('statusCode')
|
5136
|
+
if m.get('body') is not None:
|
5137
|
+
self.body = m.get('body')
|
5138
|
+
return self
|
5139
|
+
|
5140
|
+
|
4589
5141
|
class ChangeResourceGroupRequest(TeaModel):
|
4590
5142
|
def __init__(
|
4591
5143
|
self,
|
@@ -5831,10 +6383,10 @@ class CreateLogStoreRequest(TeaModel):
|
|
5831
6383
|
telemetry_type: str = None,
|
5832
6384
|
ttl: int = None,
|
5833
6385
|
):
|
5834
|
-
# Specifies whether to record the **public IP address** and **log receiving time**. Default value: false. Valid values:
|
6386
|
+
# Specifies whether to record the **public IP address** and the **log receiving time**. Default value: false. Valid values:
|
5835
6387
|
#
|
5836
|
-
# * true
|
5837
|
-
# * false
|
6388
|
+
# * true: records the public IP address and the log receiving time. If you set this parameter to true, Simple Log Service automatically adds the public IP address of the device from which the log is collected and the time when Simple Log Service receives the log to the Tag field of the collected log.
|
6389
|
+
# * false: does not record the public IP address or log receiving time.
|
5838
6390
|
self.append_meta = append_meta
|
5839
6391
|
# Specifies whether to enable automatic sharding. Valid values:
|
5840
6392
|
#
|
@@ -5848,17 +6400,17 @@ class CreateLogStoreRequest(TeaModel):
|
|
5848
6400
|
self.enable_tracking = enable_tracking
|
5849
6401
|
# The data structure of the encryption configuration. The following parameters are included: `enable`, `encrypt_type`, and `user_cmk_info`. For more information, see [EncryptConf](https://help.aliyun.com/document_detail/409461.html).
|
5850
6402
|
self.encrypt_conf = encrypt_conf
|
5851
|
-
# The retention period
|
6403
|
+
# The data retention period for the hot storage tier. Unit: days. Minimum value: 7. The value of this parameter cannot exceed the value of ttl. If you set this parameter to -1, all data is stored in the hot storage tier.
|
5852
6404
|
#
|
5853
6405
|
# After the retention period that is specified for the hot storage tier elapses, the data is moved to the Infrequent Access (IA) storage tier. For more information, see [Enable hot and cold-tiered storage for a Logstore](https://help.aliyun.com/document_detail/308645.html).
|
5854
6406
|
self.hot_ttl = hot_ttl
|
5855
|
-
# The retention period
|
6407
|
+
# The data retention period for the IA storage tier. You must set this parameter to at least 30 days. After the data retention period that you specify for the IA storage tier elapses, the data is moved to the Archive storage tier.
|
5856
6408
|
self.infrequent_access_ttl = infrequent_access_ttl
|
5857
6409
|
# The name of the Logstore. The name must meet the following requirements:
|
5858
6410
|
#
|
5859
6411
|
# * The name must be unique in a project.
|
5860
6412
|
# * The name can contain only lowercase letters, digits, hyphens (-), and underscores (_).
|
5861
|
-
# * The name must start and end with a lowercase letter or
|
6413
|
+
# * The name must start and end with a lowercase letter or digit.
|
5862
6414
|
# * The name must be 3 to 63 characters in length.
|
5863
6415
|
#
|
5864
6416
|
# This parameter is required.
|
@@ -5869,9 +6421,10 @@ class CreateLogStoreRequest(TeaModel):
|
|
5869
6421
|
self.max_split_shard = max_split_shard
|
5870
6422
|
# The type of the Logstore. Simple Log Service provides two types of Logstores: Standard Logstores and Query Logstores. Valid values:
|
5871
6423
|
#
|
5872
|
-
# * **standard**: Standard Logstore. This type of Logstore supports the log analysis feature and is suitable for scenarios such as real-time monitoring and interactive analysis. You can
|
5873
|
-
# * **query**: Query Logstore. This type of Logstore supports high-performance
|
6424
|
+
# * **standard**: Standard Logstore. This type of Logstore supports the log analysis feature and is suitable for scenarios such as real-time monitoring and interactive analysis. You can use this type of Logstore to build a comprehensive observability system.
|
6425
|
+
# * **query**: Query Logstore. This type of Logstore supports high-performance query operations. The index traffic fee of a Query Logstore is approximately half that of a Standard Logstore. Query Logstores do not support SQL analysis. Query Logstores are suitable for scenarios in which the amount of data is large, the data retention period is long, or log analysis is not required. Data retention periods of weeks or months are considered long.
|
5874
6426
|
self.mode = mode
|
6427
|
+
# IngestProcessor ID
|
5875
6428
|
self.processor_id = processor_id
|
5876
6429
|
# The number of shards.
|
5877
6430
|
#
|
@@ -5884,7 +6437,7 @@ class CreateLogStoreRequest(TeaModel):
|
|
5884
6437
|
# * **None** (default): log data
|
5885
6438
|
# * **Metrics**: metric data
|
5886
6439
|
self.telemetry_type = telemetry_type
|
5887
|
-
# The retention period
|
6440
|
+
# The data retention period. Unit: days. Valid values: 1 to 3650. If you set this parameter to 3650, data is permanently stored.
|
5888
6441
|
#
|
5889
6442
|
# This parameter is required.
|
5890
6443
|
self.ttl = ttl
|
@@ -6132,24 +6685,51 @@ class CreateLogtailPipelineConfigRequest(TeaModel):
|
|
6132
6685
|
processors: List[Dict[str, Any]] = None,
|
6133
6686
|
):
|
6134
6687
|
# The aggregation plug-ins.
|
6688
|
+
#
|
6689
|
+
# > This parameter takes effect only when extended plug-ins are used. You can use only one aggregation plug-in.
|
6135
6690
|
self.aggregators = aggregators
|
6136
6691
|
# The name of the configuration.
|
6137
6692
|
#
|
6138
|
-
#
|
6139
|
-
self.config_name = config_name
|
6140
|
-
# The data output plug-ins.
|
6693
|
+
# > The name of the configuration must be unique in the project to which the configuration belongs. After the configuration is created, you cannot change the name of the configuration. The name must meet the following requirements:
|
6141
6694
|
#
|
6142
|
-
#
|
6143
|
-
|
6144
|
-
# The
|
6695
|
+
# * The name can contain only lowercase letters, digits, hyphens (-), and underscores (_).
|
6696
|
+
#
|
6697
|
+
# * The name must start and end with a lowercase letter or a digit.
|
6698
|
+
#
|
6699
|
+
# * The name must be 2 to 128 characters in length.
|
6700
|
+
#
|
6701
|
+
# This parameter is required.
|
6702
|
+
self.config_name = config_name
|
6703
|
+
# The output plug-ins.
|
6704
|
+
#
|
6705
|
+
# > You can configure only one output plug-in.
|
6706
|
+
#
|
6707
|
+
# This parameter is required.
|
6708
|
+
self.flushers = flushers
|
6709
|
+
# The global settings.
|
6145
6710
|
self.global_ = global_
|
6146
|
-
# The
|
6711
|
+
# The input plug-ins.
|
6712
|
+
#
|
6713
|
+
# > You can configure only one input plug-in.
|
6147
6714
|
#
|
6148
6715
|
# This parameter is required.
|
6149
6716
|
self.inputs = inputs
|
6150
|
-
# The sample log.
|
6717
|
+
# The sample log. You can specify multiple sample logs.
|
6151
6718
|
self.log_sample = log_sample
|
6152
6719
|
# The processing plug-ins.
|
6720
|
+
#
|
6721
|
+
# > Logtail plug-ins for data processing are classified into native plug-ins and extended plug-ins. For more information, see [Overview of Logtail plug-ins for data processing](https://help.aliyun.com/document_detail/64957.html).
|
6722
|
+
#
|
6723
|
+
# >
|
6724
|
+
#
|
6725
|
+
# * You can use native plug-ins only to collect text logs.
|
6726
|
+
#
|
6727
|
+
# * You cannot add native plug-ins and extended plug-ins at a time.
|
6728
|
+
#
|
6729
|
+
# * When you add native plug-ins, take note of the following items:
|
6730
|
+
#
|
6731
|
+
# * You must add one of the following Logtail plug-ins for data processing as the first plug-in: Data Parsing (Regex Mode), Data Parsing (Delimiter Mode), Data Parsing (JSON Mode), Data Parsing (NGINX Mode), Data Parsing (Apache Mode), and Data Parsing (IIS Mode).
|
6732
|
+
# * After you add the first plug-in, you can add one Time Parsing plug-in, one Data Filtering plug-in, and multiple Data Masking plug-ins.
|
6153
6733
|
self.processors = processors
|
6154
6734
|
|
6155
6735
|
def validate(self):
|
@@ -6372,10 +6952,102 @@ class CreateMachineGroupResponse(TeaModel):
|
|
6372
6952
|
return self
|
6373
6953
|
|
6374
6954
|
|
6955
|
+
class CreateMaxComputeExportRequest(TeaModel):
|
6956
|
+
def __init__(
|
6957
|
+
self,
|
6958
|
+
configuration: MaxComputeExportConfiguration = None,
|
6959
|
+
description: str = None,
|
6960
|
+
display_name: str = None,
|
6961
|
+
name: str = None,
|
6962
|
+
):
|
6963
|
+
# The setting of the MaxCompute data shipping job.
|
6964
|
+
#
|
6965
|
+
# This parameter is required.
|
6966
|
+
self.configuration = configuration
|
6967
|
+
# The description of the MaxCompute data shipping job.
|
6968
|
+
self.description = description
|
6969
|
+
# The display name of the MaxCompute data shipping job.
|
6970
|
+
#
|
6971
|
+
# This parameter is required.
|
6972
|
+
self.display_name = display_name
|
6973
|
+
# The unique identifier of the MaxCompute data shipping job.
|
6974
|
+
#
|
6975
|
+
# This parameter is required.
|
6976
|
+
self.name = name
|
6977
|
+
|
6978
|
+
def validate(self):
|
6979
|
+
if self.configuration:
|
6980
|
+
self.configuration.validate()
|
6981
|
+
|
6982
|
+
def to_map(self):
|
6983
|
+
_map = super().to_map()
|
6984
|
+
if _map is not None:
|
6985
|
+
return _map
|
6986
|
+
|
6987
|
+
result = dict()
|
6988
|
+
if self.configuration is not None:
|
6989
|
+
result['configuration'] = self.configuration.to_map()
|
6990
|
+
if self.description is not None:
|
6991
|
+
result['description'] = self.description
|
6992
|
+
if self.display_name is not None:
|
6993
|
+
result['displayName'] = self.display_name
|
6994
|
+
if self.name is not None:
|
6995
|
+
result['name'] = self.name
|
6996
|
+
return result
|
6997
|
+
|
6998
|
+
def from_map(self, m: dict = None):
|
6999
|
+
m = m or dict()
|
7000
|
+
if m.get('configuration') is not None:
|
7001
|
+
temp_model = MaxComputeExportConfiguration()
|
7002
|
+
self.configuration = temp_model.from_map(m['configuration'])
|
7003
|
+
if m.get('description') is not None:
|
7004
|
+
self.description = m.get('description')
|
7005
|
+
if m.get('displayName') is not None:
|
7006
|
+
self.display_name = m.get('displayName')
|
7007
|
+
if m.get('name') is not None:
|
7008
|
+
self.name = m.get('name')
|
7009
|
+
return self
|
7010
|
+
|
7011
|
+
|
7012
|
+
class CreateMaxComputeExportResponse(TeaModel):
|
7013
|
+
def __init__(
|
7014
|
+
self,
|
7015
|
+
headers: Dict[str, str] = None,
|
7016
|
+
status_code: int = None,
|
7017
|
+
):
|
7018
|
+
self.headers = headers
|
7019
|
+
self.status_code = status_code
|
7020
|
+
|
7021
|
+
def validate(self):
|
7022
|
+
pass
|
7023
|
+
|
7024
|
+
def to_map(self):
|
7025
|
+
_map = super().to_map()
|
7026
|
+
if _map is not None:
|
7027
|
+
return _map
|
7028
|
+
|
7029
|
+
result = dict()
|
7030
|
+
if self.headers is not None:
|
7031
|
+
result['headers'] = self.headers
|
7032
|
+
if self.status_code is not None:
|
7033
|
+
result['statusCode'] = self.status_code
|
7034
|
+
return result
|
7035
|
+
|
7036
|
+
def from_map(self, m: dict = None):
|
7037
|
+
m = m or dict()
|
7038
|
+
if m.get('headers') is not None:
|
7039
|
+
self.headers = m.get('headers')
|
7040
|
+
if m.get('statusCode') is not None:
|
7041
|
+
self.status_code = m.get('statusCode')
|
7042
|
+
return self
|
7043
|
+
|
7044
|
+
|
6375
7045
|
class CreateMetricStoreRequest(TeaModel):
|
6376
7046
|
def __init__(
|
6377
7047
|
self,
|
6378
7048
|
auto_split: bool = None,
|
7049
|
+
hot_ttl: int = None,
|
7050
|
+
infrequent_access_ttl: int = None,
|
6379
7051
|
max_split_shard: int = None,
|
6380
7052
|
metric_type: str = None,
|
6381
7053
|
mode: str = None,
|
@@ -6385,6 +7057,8 @@ class CreateMetricStoreRequest(TeaModel):
|
|
6385
7057
|
):
|
6386
7058
|
# Specifies whether to enable automatic sharding.
|
6387
7059
|
self.auto_split = auto_split
|
7060
|
+
self.hot_ttl = hot_ttl
|
7061
|
+
self.infrequent_access_ttl = infrequent_access_ttl
|
6388
7062
|
# The maximum number of shards into which existing shards can be automatically split. This parameter is valid only when you set the autoSplit parameter to true.
|
6389
7063
|
self.max_split_shard = max_split_shard
|
6390
7064
|
# The type of the metric data. Example: prometheus.
|
@@ -6415,6 +7089,10 @@ class CreateMetricStoreRequest(TeaModel):
|
|
6415
7089
|
result = dict()
|
6416
7090
|
if self.auto_split is not None:
|
6417
7091
|
result['autoSplit'] = self.auto_split
|
7092
|
+
if self.hot_ttl is not None:
|
7093
|
+
result['hot_ttl'] = self.hot_ttl
|
7094
|
+
if self.infrequent_access_ttl is not None:
|
7095
|
+
result['infrequentAccessTTL'] = self.infrequent_access_ttl
|
6418
7096
|
if self.max_split_shard is not None:
|
6419
7097
|
result['maxSplitShard'] = self.max_split_shard
|
6420
7098
|
if self.metric_type is not None:
|
@@ -6433,6 +7111,10 @@ class CreateMetricStoreRequest(TeaModel):
|
|
6433
7111
|
m = m or dict()
|
6434
7112
|
if m.get('autoSplit') is not None:
|
6435
7113
|
self.auto_split = m.get('autoSplit')
|
7114
|
+
if m.get('hot_ttl') is not None:
|
7115
|
+
self.hot_ttl = m.get('hot_ttl')
|
7116
|
+
if m.get('infrequentAccessTTL') is not None:
|
7117
|
+
self.infrequent_access_ttl = m.get('infrequentAccessTTL')
|
6436
7118
|
if m.get('maxSplitShard') is not None:
|
6437
7119
|
self.max_split_shard = m.get('maxSplitShard')
|
6438
7120
|
if m.get('metricType') is not None:
|
@@ -6670,13 +7352,21 @@ class CreateOSSIngestionRequest(TeaModel):
|
|
6670
7352
|
name: str = None,
|
6671
7353
|
schedule: Schedule = None,
|
6672
7354
|
):
|
7355
|
+
# The configurations of the OSS data import job.
|
7356
|
+
#
|
6673
7357
|
# This parameter is required.
|
6674
7358
|
self.configuration = configuration
|
7359
|
+
# The description of the job.
|
6675
7360
|
self.description = description
|
7361
|
+
# The display name.
|
7362
|
+
#
|
6676
7363
|
# This parameter is required.
|
6677
7364
|
self.display_name = display_name
|
7365
|
+
# The name of the OSS data import job.
|
7366
|
+
#
|
6678
7367
|
# This parameter is required.
|
6679
7368
|
self.name = name
|
7369
|
+
# The scheduling type. By default, you do not need to specify this parameter. If you want to import data at regular intervals, such as importing data every Monday at 08: 00., you can specify a cron expression.
|
6680
7370
|
self.schedule = schedule
|
6681
7371
|
|
6682
7372
|
def validate(self):
|
@@ -6818,11 +7508,11 @@ class CreateOssExternalStoreRequestParameter(TeaModel):
|
|
6818
7508
|
#
|
6819
7509
|
# This parameter is required.
|
6820
7510
|
self.columns = columns
|
6821
|
-
# The OSS endpoint. For more information, see [
|
7511
|
+
# The Object Storage Service (OSS) endpoint. For more information, see [Endpoints](https://help.aliyun.com/document_detail/31837.html).
|
6822
7512
|
#
|
6823
7513
|
# This parameter is required.
|
6824
7514
|
self.endpoint = endpoint
|
6825
|
-
# The associated OSS objects. Valid values of n: 1 to 100.
|
7515
|
+
# The names of the associated OSS objects. Valid values of n: 1 to 100.
|
6826
7516
|
#
|
6827
7517
|
# This parameter is required.
|
6828
7518
|
self.objects = objects
|
@@ -6886,7 +7576,7 @@ class CreateOssExternalStoreRequest(TeaModel):
|
|
6886
7576
|
#
|
6887
7577
|
# This parameter is required.
|
6888
7578
|
self.external_store_name = external_store_name
|
6889
|
-
# The parameters
|
7579
|
+
# The parameters that are configured for the external store.
|
6890
7580
|
#
|
6891
7581
|
# This parameter is required.
|
6892
7582
|
self.parameter = parameter
|
@@ -6964,23 +7654,34 @@ class CreateProjectRequest(TeaModel):
|
|
6964
7654
|
data_redundancy_type: str = None,
|
6965
7655
|
description: str = None,
|
6966
7656
|
project_name: str = None,
|
7657
|
+
recycle_bin_enabled: bool = None,
|
6967
7658
|
resource_group_id: str = None,
|
6968
7659
|
):
|
6969
|
-
#
|
7660
|
+
# The disaster recovery type. Valid values:
|
7661
|
+
#
|
7662
|
+
# * LRS: locally redundant storage
|
7663
|
+
# * ZRS: zone-redundant storage
|
6970
7664
|
self.data_redundancy_type = data_redundancy_type
|
6971
7665
|
# The description of the project.
|
6972
7666
|
#
|
6973
7667
|
# This parameter is required.
|
6974
7668
|
self.description = description
|
6975
|
-
# The
|
7669
|
+
# The project name must be unique in a region. You cannot change the name after you create the project. The name must meet the following requirements:
|
6976
7670
|
#
|
6977
|
-
# * The name must be unique.
|
6978
|
-
# *
|
6979
|
-
# *
|
6980
|
-
# *
|
7671
|
+
# * The name must be globally unique.
|
7672
|
+
# * The name can contain only lowercase letters, digits, and hyphens (-).
|
7673
|
+
# * The name must start and end with a lowercase letter or a digit.
|
7674
|
+
# * The name must be 3 to 63 characters in length.
|
6981
7675
|
#
|
6982
7676
|
# This parameter is required.
|
6983
7677
|
self.project_name = project_name
|
7678
|
+
# Specifies whether to enable the recycle bin feature.
|
7679
|
+
#
|
7680
|
+
# Valid values:
|
7681
|
+
#
|
7682
|
+
# * true
|
7683
|
+
# * false
|
7684
|
+
self.recycle_bin_enabled = recycle_bin_enabled
|
6984
7685
|
# The ID of the resource group.
|
6985
7686
|
self.resource_group_id = resource_group_id
|
6986
7687
|
|
@@ -6999,6 +7700,8 @@ class CreateProjectRequest(TeaModel):
|
|
6999
7700
|
result['description'] = self.description
|
7000
7701
|
if self.project_name is not None:
|
7001
7702
|
result['projectName'] = self.project_name
|
7703
|
+
if self.recycle_bin_enabled is not None:
|
7704
|
+
result['recycleBinEnabled'] = self.recycle_bin_enabled
|
7002
7705
|
if self.resource_group_id is not None:
|
7003
7706
|
result['resourceGroupId'] = self.resource_group_id
|
7004
7707
|
return result
|
@@ -7011,6 +7714,8 @@ class CreateProjectRequest(TeaModel):
|
|
7011
7714
|
self.description = m.get('description')
|
7012
7715
|
if m.get('projectName') is not None:
|
7013
7716
|
self.project_name = m.get('projectName')
|
7717
|
+
if m.get('recycleBinEnabled') is not None:
|
7718
|
+
self.recycle_bin_enabled = m.get('recycleBinEnabled')
|
7014
7719
|
if m.get('resourceGroupId') is not None:
|
7015
7720
|
self.resource_group_id = m.get('resourceGroupId')
|
7016
7721
|
return self
|
@@ -7062,13 +7767,13 @@ class CreateRdsExternalStoreRequestParameter(TeaModel):
|
|
7062
7767
|
username: str = None,
|
7063
7768
|
vpc_id: str = None,
|
7064
7769
|
):
|
7065
|
-
# The name of the database
|
7770
|
+
# The name of the database created on the ApsaraDB RDS for MySQL instance.
|
7066
7771
|
#
|
7067
7772
|
# This parameter is required.
|
7068
7773
|
self.db = db
|
7069
7774
|
# The internal or public endpoint of the ApsaraDB RDS for MySQL instance.
|
7070
7775
|
self.host = host
|
7071
|
-
#
|
7776
|
+
# You do not need to specify this parameter.
|
7072
7777
|
self.instance_id = instance_id
|
7073
7778
|
# The password that is used to log on to the ApsaraDB RDS for MySQL instance.
|
7074
7779
|
#
|
@@ -7082,7 +7787,7 @@ class CreateRdsExternalStoreRequestParameter(TeaModel):
|
|
7082
7787
|
#
|
7083
7788
|
# This parameter is required.
|
7084
7789
|
self.region = region
|
7085
|
-
# The name of the
|
7790
|
+
# The name of the table in the database created on the ApsaraDB RDS for MySQL instance.
|
7086
7791
|
#
|
7087
7792
|
# This parameter is required.
|
7088
7793
|
self.table = table
|
@@ -7160,7 +7865,7 @@ class CreateRdsExternalStoreRequest(TeaModel):
|
|
7160
7865
|
#
|
7161
7866
|
# This parameter is required.
|
7162
7867
|
self.parameter = parameter
|
7163
|
-
# The storage type. Set the value to rds-vpc, which indicates an ApsaraDB RDS for MySQL
|
7868
|
+
# The storage type. Set the value to rds-vpc, which indicates a database created on an ApsaraDB RDS for MySQL instance in a virtual private cloud (VPC).
|
7164
7869
|
#
|
7165
7870
|
# This parameter is required.
|
7166
7871
|
self.store_type = store_type
|
@@ -7591,8 +8296,12 @@ class CreateTicketRequest(TeaModel):
|
|
7591
8296
|
access_token_expiration_time: int = None,
|
7592
8297
|
expiration_time: int = None,
|
7593
8298
|
):
|
8299
|
+
# * The validity period of the access token. Unit: seconds. Default value: 86400, which specifies one day. Valid values: 0 to 86400.
|
8300
|
+
# * The validity period of the access token is the smaller value between accessTokenExpirationTime and expirationTime.
|
8301
|
+
# * If you use a Security Token Service (STS) token to call this operation, the validity period of the access token is the smallest value among accessTokenExpirationTime, expirationTime, and the validity period of the STS token.
|
7594
8302
|
self.access_token_expiration_time = access_token_expiration_time
|
7595
|
-
#
|
8303
|
+
# * You must use the Simple Log Service endpoint for the China (Shanghai) or Singapore region to call the CreateTicket operation. After you obtain the ticket, you can use the ticket regardless of the region.
|
8304
|
+
# * The validity period for the URL of the console page that you want to embed. Unit: seconds. Default value: 86400, which specifies one day. Valid values: 0 to 2592000. The value 2592000 specifies 30 days.
|
7596
8305
|
self.expiration_time = expiration_time
|
7597
8306
|
|
7598
8307
|
def validate(self):
|
@@ -8210,6 +8919,39 @@ class DeleteIndexResponse(TeaModel):
|
|
8210
8919
|
return self
|
8211
8920
|
|
8212
8921
|
|
8922
|
+
class DeleteIngestProcessorResponse(TeaModel):
|
8923
|
+
def __init__(
|
8924
|
+
self,
|
8925
|
+
headers: Dict[str, str] = None,
|
8926
|
+
status_code: int = None,
|
8927
|
+
):
|
8928
|
+
self.headers = headers
|
8929
|
+
self.status_code = status_code
|
8930
|
+
|
8931
|
+
def validate(self):
|
8932
|
+
pass
|
8933
|
+
|
8934
|
+
def to_map(self):
|
8935
|
+
_map = super().to_map()
|
8936
|
+
if _map is not None:
|
8937
|
+
return _map
|
8938
|
+
|
8939
|
+
result = dict()
|
8940
|
+
if self.headers is not None:
|
8941
|
+
result['headers'] = self.headers
|
8942
|
+
if self.status_code is not None:
|
8943
|
+
result['statusCode'] = self.status_code
|
8944
|
+
return result
|
8945
|
+
|
8946
|
+
def from_map(self, m: dict = None):
|
8947
|
+
m = m or dict()
|
8948
|
+
if m.get('headers') is not None:
|
8949
|
+
self.headers = m.get('headers')
|
8950
|
+
if m.get('statusCode') is not None:
|
8951
|
+
self.status_code = m.get('statusCode')
|
8952
|
+
return self
|
8953
|
+
|
8954
|
+
|
8213
8955
|
class DeleteLogStoreResponse(TeaModel):
|
8214
8956
|
def __init__(
|
8215
8957
|
self,
|
@@ -8342,6 +9084,39 @@ class DeleteMachineGroupResponse(TeaModel):
|
|
8342
9084
|
return self
|
8343
9085
|
|
8344
9086
|
|
9087
|
+
class DeleteMaxComputeExportResponse(TeaModel):
|
9088
|
+
def __init__(
|
9089
|
+
self,
|
9090
|
+
headers: Dict[str, str] = None,
|
9091
|
+
status_code: int = None,
|
9092
|
+
):
|
9093
|
+
self.headers = headers
|
9094
|
+
self.status_code = status_code
|
9095
|
+
|
9096
|
+
def validate(self):
|
9097
|
+
pass
|
9098
|
+
|
9099
|
+
def to_map(self):
|
9100
|
+
_map = super().to_map()
|
9101
|
+
if _map is not None:
|
9102
|
+
return _map
|
9103
|
+
|
9104
|
+
result = dict()
|
9105
|
+
if self.headers is not None:
|
9106
|
+
result['headers'] = self.headers
|
9107
|
+
if self.status_code is not None:
|
9108
|
+
result['statusCode'] = self.status_code
|
9109
|
+
return result
|
9110
|
+
|
9111
|
+
def from_map(self, m: dict = None):
|
9112
|
+
m = m or dict()
|
9113
|
+
if m.get('headers') is not None:
|
9114
|
+
self.headers = m.get('headers')
|
9115
|
+
if m.get('statusCode') is not None:
|
9116
|
+
self.status_code = m.get('statusCode')
|
9117
|
+
return self
|
9118
|
+
|
9119
|
+
|
8345
9120
|
class DeleteMetricStoreResponse(TeaModel):
|
8346
9121
|
def __init__(
|
8347
9122
|
self,
|
@@ -8474,6 +9249,33 @@ class DeleteOSSIngestionResponse(TeaModel):
|
|
8474
9249
|
return self
|
8475
9250
|
|
8476
9251
|
|
9252
|
+
class DeleteProjectRequest(TeaModel):
|
9253
|
+
def __init__(
|
9254
|
+
self,
|
9255
|
+
force_delete: bool = None,
|
9256
|
+
):
|
9257
|
+
self.force_delete = force_delete
|
9258
|
+
|
9259
|
+
def validate(self):
|
9260
|
+
pass
|
9261
|
+
|
9262
|
+
def to_map(self):
|
9263
|
+
_map = super().to_map()
|
9264
|
+
if _map is not None:
|
9265
|
+
return _map
|
9266
|
+
|
9267
|
+
result = dict()
|
9268
|
+
if self.force_delete is not None:
|
9269
|
+
result['forceDelete'] = self.force_delete
|
9270
|
+
return result
|
9271
|
+
|
9272
|
+
def from_map(self, m: dict = None):
|
9273
|
+
m = m or dict()
|
9274
|
+
if m.get('forceDelete') is not None:
|
9275
|
+
self.force_delete = m.get('forceDelete')
|
9276
|
+
return self
|
9277
|
+
|
9278
|
+
|
8477
9279
|
class DeleteProjectResponse(TeaModel):
|
8478
9280
|
def __init__(
|
8479
9281
|
self,
|
@@ -8644,6 +9446,13 @@ class DescribeRegionsRequest(TeaModel):
|
|
8644
9446
|
self,
|
8645
9447
|
language: str = None,
|
8646
9448
|
):
|
9449
|
+
# The language of the localName parameter that is returned.
|
9450
|
+
#
|
9451
|
+
# Valid values:
|
9452
|
+
#
|
9453
|
+
# * ja
|
9454
|
+
# * en
|
9455
|
+
# * zh
|
8647
9456
|
self.language = language
|
8648
9457
|
|
8649
9458
|
def validate(self):
|
@@ -8674,9 +9483,13 @@ class DescribeRegionsResponseBodyRegions(TeaModel):
|
|
8674
9483
|
local_name: str = None,
|
8675
9484
|
region: str = None,
|
8676
9485
|
):
|
9486
|
+
# The public endpoint of Simple Log Service.
|
8677
9487
|
self.internet_endpoint = internet_endpoint
|
9488
|
+
# The internal endpoint of Simple Log Service.
|
8678
9489
|
self.intranet_endpoint = intranet_endpoint
|
9490
|
+
# The name of the Simple Log Service region.
|
8679
9491
|
self.local_name = local_name
|
9492
|
+
# SLS region
|
8680
9493
|
self.region = region
|
8681
9494
|
|
8682
9495
|
def validate(self):
|
@@ -10484,7 +11297,7 @@ class GetDownloadJobResponseBody(TeaModel):
|
|
10484
11297
|
self.execution_details = execution_details
|
10485
11298
|
# 代表资源名称的资源属性字段
|
10486
11299
|
self.name = name
|
10487
|
-
#
|
11300
|
+
# The status of the log download task.
|
10488
11301
|
self.status = status
|
10489
11302
|
|
10490
11303
|
def validate(self):
|
@@ -11025,12 +11838,12 @@ class GetIndexResponse(TeaModel):
|
|
11025
11838
|
return self
|
11026
11839
|
|
11027
11840
|
|
11028
|
-
class
|
11841
|
+
class GetIngestProcessorResponse(TeaModel):
|
11029
11842
|
def __init__(
|
11030
11843
|
self,
|
11031
11844
|
headers: Dict[str, str] = None,
|
11032
11845
|
status_code: int = None,
|
11033
|
-
body:
|
11846
|
+
body: IngestProcessor = None,
|
11034
11847
|
):
|
11035
11848
|
self.headers = headers
|
11036
11849
|
self.status_code = status_code
|
@@ -11061,21 +11874,25 @@ class GetLogStoreResponse(TeaModel):
|
|
11061
11874
|
if m.get('statusCode') is not None:
|
11062
11875
|
self.status_code = m.get('statusCode')
|
11063
11876
|
if m.get('body') is not None:
|
11064
|
-
temp_model =
|
11877
|
+
temp_model = IngestProcessor()
|
11065
11878
|
self.body = temp_model.from_map(m['body'])
|
11066
11879
|
return self
|
11067
11880
|
|
11068
11881
|
|
11069
|
-
class
|
11882
|
+
class GetLogStoreResponse(TeaModel):
|
11070
11883
|
def __init__(
|
11071
11884
|
self,
|
11072
|
-
|
11885
|
+
headers: Dict[str, str] = None,
|
11886
|
+
status_code: int = None,
|
11887
|
+
body: Logstore = None,
|
11073
11888
|
):
|
11074
|
-
|
11075
|
-
self.
|
11889
|
+
self.headers = headers
|
11890
|
+
self.status_code = status_code
|
11891
|
+
self.body = body
|
11076
11892
|
|
11077
11893
|
def validate(self):
|
11078
|
-
|
11894
|
+
if self.body:
|
11895
|
+
self.body.validate()
|
11079
11896
|
|
11080
11897
|
def to_map(self):
|
11081
11898
|
_map = super().to_map()
|
@@ -11083,13 +11900,50 @@ class GetLogStoreMeteringModeResponseBody(TeaModel):
|
|
11083
11900
|
return _map
|
11084
11901
|
|
11085
11902
|
result = dict()
|
11086
|
-
if self.
|
11087
|
-
result['
|
11088
|
-
|
11089
|
-
|
11090
|
-
|
11091
|
-
|
11092
|
-
|
11903
|
+
if self.headers is not None:
|
11904
|
+
result['headers'] = self.headers
|
11905
|
+
if self.status_code is not None:
|
11906
|
+
result['statusCode'] = self.status_code
|
11907
|
+
if self.body is not None:
|
11908
|
+
result['body'] = self.body.to_map()
|
11909
|
+
return result
|
11910
|
+
|
11911
|
+
def from_map(self, m: dict = None):
|
11912
|
+
m = m or dict()
|
11913
|
+
if m.get('headers') is not None:
|
11914
|
+
self.headers = m.get('headers')
|
11915
|
+
if m.get('statusCode') is not None:
|
11916
|
+
self.status_code = m.get('statusCode')
|
11917
|
+
if m.get('body') is not None:
|
11918
|
+
temp_model = Logstore()
|
11919
|
+
self.body = temp_model.from_map(m['body'])
|
11920
|
+
return self
|
11921
|
+
|
11922
|
+
|
11923
|
+
class GetLogStoreMeteringModeResponseBody(TeaModel):
|
11924
|
+
def __init__(
|
11925
|
+
self,
|
11926
|
+
metering_mode: str = None,
|
11927
|
+
):
|
11928
|
+
# The billing mode. Default value: ChargeByFunction. Valid values: ChargeByFunction and ChargeByDataIngest.
|
11929
|
+
self.metering_mode = metering_mode
|
11930
|
+
|
11931
|
+
def validate(self):
|
11932
|
+
pass
|
11933
|
+
|
11934
|
+
def to_map(self):
|
11935
|
+
_map = super().to_map()
|
11936
|
+
if _map is not None:
|
11937
|
+
return _map
|
11938
|
+
|
11939
|
+
result = dict()
|
11940
|
+
if self.metering_mode is not None:
|
11941
|
+
result['meteringMode'] = self.metering_mode
|
11942
|
+
return result
|
11943
|
+
|
11944
|
+
def from_map(self, m: dict = None):
|
11945
|
+
m = m or dict()
|
11946
|
+
if m.get('meteringMode') is not None:
|
11093
11947
|
self.metering_mode = m.get('meteringMode')
|
11094
11948
|
return self
|
11095
11949
|
|
@@ -11829,6 +12683,7 @@ class GetMLServiceResultsRequest(TeaModel):
|
|
11829
12683
|
):
|
11830
12684
|
self.allow_builtin = allow_builtin
|
11831
12685
|
self.body = body
|
12686
|
+
# The version of the algorithm. The algorithm varies based on the version.
|
11832
12687
|
self.version = version
|
11833
12688
|
|
11834
12689
|
def validate(self):
|
@@ -11976,11 +12831,54 @@ class GetMachineGroupResponse(TeaModel):
|
|
11976
12831
|
return self
|
11977
12832
|
|
11978
12833
|
|
12834
|
+
class GetMaxComputeExportResponse(TeaModel):
|
12835
|
+
def __init__(
|
12836
|
+
self,
|
12837
|
+
headers: Dict[str, str] = None,
|
12838
|
+
status_code: int = None,
|
12839
|
+
body: MaxComputeExport = None,
|
12840
|
+
):
|
12841
|
+
self.headers = headers
|
12842
|
+
self.status_code = status_code
|
12843
|
+
self.body = body
|
12844
|
+
|
12845
|
+
def validate(self):
|
12846
|
+
if self.body:
|
12847
|
+
self.body.validate()
|
12848
|
+
|
12849
|
+
def to_map(self):
|
12850
|
+
_map = super().to_map()
|
12851
|
+
if _map is not None:
|
12852
|
+
return _map
|
12853
|
+
|
12854
|
+
result = dict()
|
12855
|
+
if self.headers is not None:
|
12856
|
+
result['headers'] = self.headers
|
12857
|
+
if self.status_code is not None:
|
12858
|
+
result['statusCode'] = self.status_code
|
12859
|
+
if self.body is not None:
|
12860
|
+
result['body'] = self.body.to_map()
|
12861
|
+
return result
|
12862
|
+
|
12863
|
+
def from_map(self, m: dict = None):
|
12864
|
+
m = m or dict()
|
12865
|
+
if m.get('headers') is not None:
|
12866
|
+
self.headers = m.get('headers')
|
12867
|
+
if m.get('statusCode') is not None:
|
12868
|
+
self.status_code = m.get('statusCode')
|
12869
|
+
if m.get('body') is not None:
|
12870
|
+
temp_model = MaxComputeExport()
|
12871
|
+
self.body = temp_model.from_map(m['body'])
|
12872
|
+
return self
|
12873
|
+
|
12874
|
+
|
11979
12875
|
class GetMetricStoreResponseBody(TeaModel):
|
11980
12876
|
def __init__(
|
11981
12877
|
self,
|
11982
12878
|
auto_split: bool = None,
|
11983
12879
|
create_time: int = None,
|
12880
|
+
hot_ttl: int = None,
|
12881
|
+
infrequent_access_ttl: int = None,
|
11984
12882
|
last_modify_time: int = None,
|
11985
12883
|
max_split_shard: int = None,
|
11986
12884
|
metric_type: str = None,
|
@@ -11993,6 +12891,8 @@ class GetMetricStoreResponseBody(TeaModel):
|
|
11993
12891
|
self.auto_split = auto_split
|
11994
12892
|
# The creation time. The value is a UNIX timestamp.
|
11995
12893
|
self.create_time = create_time
|
12894
|
+
self.hot_ttl = hot_ttl
|
12895
|
+
self.infrequent_access_ttl = infrequent_access_ttl
|
11996
12896
|
# The last update time. The value is a UNIX timestamp.
|
11997
12897
|
self.last_modify_time = last_modify_time
|
11998
12898
|
# The maximum number of shards into which existing shards can be automatically split.
|
@@ -12021,6 +12921,10 @@ class GetMetricStoreResponseBody(TeaModel):
|
|
12021
12921
|
result['autoSplit'] = self.auto_split
|
12022
12922
|
if self.create_time is not None:
|
12023
12923
|
result['createTime'] = self.create_time
|
12924
|
+
if self.hot_ttl is not None:
|
12925
|
+
result['hot_ttl'] = self.hot_ttl
|
12926
|
+
if self.infrequent_access_ttl is not None:
|
12927
|
+
result['infrequentAccessTTL'] = self.infrequent_access_ttl
|
12024
12928
|
if self.last_modify_time is not None:
|
12025
12929
|
result['lastModifyTime'] = self.last_modify_time
|
12026
12930
|
if self.max_split_shard is not None:
|
@@ -12043,6 +12947,10 @@ class GetMetricStoreResponseBody(TeaModel):
|
|
12043
12947
|
self.auto_split = m.get('autoSplit')
|
12044
12948
|
if m.get('createTime') is not None:
|
12045
12949
|
self.create_time = m.get('createTime')
|
12950
|
+
if m.get('hot_ttl') is not None:
|
12951
|
+
self.hot_ttl = m.get('hot_ttl')
|
12952
|
+
if m.get('infrequentAccessTTL') is not None:
|
12953
|
+
self.infrequent_access_ttl = m.get('infrequentAccessTTL')
|
12046
12954
|
if m.get('lastModifyTime') is not None:
|
12047
12955
|
self.last_modify_time = m.get('lastModifyTime')
|
12048
12956
|
if m.get('maxSplitShard') is not None:
|
@@ -13011,6 +13919,157 @@ class ListAgentInstanceConfigsResponse(TeaModel):
|
|
13011
13919
|
return self
|
13012
13920
|
|
13013
13921
|
|
13922
|
+
class ListAiToolsResponseBodyFields(TeaModel):
|
13923
|
+
def __init__(
|
13924
|
+
self,
|
13925
|
+
name: str = None,
|
13926
|
+
option: List[str] = None,
|
13927
|
+
required: bool = None,
|
13928
|
+
type: str = None,
|
13929
|
+
example: str = None,
|
13930
|
+
description: str = None,
|
13931
|
+
):
|
13932
|
+
self.name = name
|
13933
|
+
self.option = option
|
13934
|
+
self.required = required
|
13935
|
+
self.type = type
|
13936
|
+
self.example = example
|
13937
|
+
self.description = description
|
13938
|
+
|
13939
|
+
def validate(self):
|
13940
|
+
pass
|
13941
|
+
|
13942
|
+
def to_map(self):
|
13943
|
+
_map = super().to_map()
|
13944
|
+
if _map is not None:
|
13945
|
+
return _map
|
13946
|
+
|
13947
|
+
result = dict()
|
13948
|
+
if self.name is not None:
|
13949
|
+
result['name'] = self.name
|
13950
|
+
if self.option is not None:
|
13951
|
+
result['option'] = self.option
|
13952
|
+
if self.required is not None:
|
13953
|
+
result['required'] = self.required
|
13954
|
+
if self.type is not None:
|
13955
|
+
result['type'] = self.type
|
13956
|
+
if self.example is not None:
|
13957
|
+
result['example'] = self.example
|
13958
|
+
if self.description is not None:
|
13959
|
+
result['description'] = self.description
|
13960
|
+
return result
|
13961
|
+
|
13962
|
+
def from_map(self, m: dict = None):
|
13963
|
+
m = m or dict()
|
13964
|
+
if m.get('name') is not None:
|
13965
|
+
self.name = m.get('name')
|
13966
|
+
if m.get('option') is not None:
|
13967
|
+
self.option = m.get('option')
|
13968
|
+
if m.get('required') is not None:
|
13969
|
+
self.required = m.get('required')
|
13970
|
+
if m.get('type') is not None:
|
13971
|
+
self.type = m.get('type')
|
13972
|
+
if m.get('example') is not None:
|
13973
|
+
self.example = m.get('example')
|
13974
|
+
if m.get('description') is not None:
|
13975
|
+
self.description = m.get('description')
|
13976
|
+
return self
|
13977
|
+
|
13978
|
+
|
13979
|
+
class ListAiToolsResponseBody(TeaModel):
|
13980
|
+
def __init__(
|
13981
|
+
self,
|
13982
|
+
fields: List[ListAiToolsResponseBodyFields] = None,
|
13983
|
+
name: str = None,
|
13984
|
+
description: str = None,
|
13985
|
+
):
|
13986
|
+
self.fields = fields
|
13987
|
+
self.name = name
|
13988
|
+
self.description = description
|
13989
|
+
|
13990
|
+
def validate(self):
|
13991
|
+
if self.fields:
|
13992
|
+
for k in self.fields:
|
13993
|
+
if k:
|
13994
|
+
k.validate()
|
13995
|
+
|
13996
|
+
def to_map(self):
|
13997
|
+
_map = super().to_map()
|
13998
|
+
if _map is not None:
|
13999
|
+
return _map
|
14000
|
+
|
14001
|
+
result = dict()
|
14002
|
+
result['fields'] = []
|
14003
|
+
if self.fields is not None:
|
14004
|
+
for k in self.fields:
|
14005
|
+
result['fields'].append(k.to_map() if k else None)
|
14006
|
+
if self.name is not None:
|
14007
|
+
result['name'] = self.name
|
14008
|
+
if self.description is not None:
|
14009
|
+
result['description'] = self.description
|
14010
|
+
return result
|
14011
|
+
|
14012
|
+
def from_map(self, m: dict = None):
|
14013
|
+
m = m or dict()
|
14014
|
+
self.fields = []
|
14015
|
+
if m.get('fields') is not None:
|
14016
|
+
for k in m.get('fields'):
|
14017
|
+
temp_model = ListAiToolsResponseBodyFields()
|
14018
|
+
self.fields.append(temp_model.from_map(k))
|
14019
|
+
if m.get('name') is not None:
|
14020
|
+
self.name = m.get('name')
|
14021
|
+
if m.get('description') is not None:
|
14022
|
+
self.description = m.get('description')
|
14023
|
+
return self
|
14024
|
+
|
14025
|
+
|
14026
|
+
class ListAiToolsResponse(TeaModel):
|
14027
|
+
def __init__(
|
14028
|
+
self,
|
14029
|
+
headers: Dict[str, str] = None,
|
14030
|
+
status_code: int = None,
|
14031
|
+
body: List[ListAiToolsResponseBody] = None,
|
14032
|
+
):
|
14033
|
+
self.headers = headers
|
14034
|
+
self.status_code = status_code
|
14035
|
+
self.body = body
|
14036
|
+
|
14037
|
+
def validate(self):
|
14038
|
+
if self.body:
|
14039
|
+
for k in self.body:
|
14040
|
+
if k:
|
14041
|
+
k.validate()
|
14042
|
+
|
14043
|
+
def to_map(self):
|
14044
|
+
_map = super().to_map()
|
14045
|
+
if _map is not None:
|
14046
|
+
return _map
|
14047
|
+
|
14048
|
+
result = dict()
|
14049
|
+
if self.headers is not None:
|
14050
|
+
result['headers'] = self.headers
|
14051
|
+
if self.status_code is not None:
|
14052
|
+
result['statusCode'] = self.status_code
|
14053
|
+
result['body'] = []
|
14054
|
+
if self.body is not None:
|
14055
|
+
for k in self.body:
|
14056
|
+
result['body'].append(k.to_map() if k else None)
|
14057
|
+
return result
|
14058
|
+
|
14059
|
+
def from_map(self, m: dict = None):
|
14060
|
+
m = m or dict()
|
14061
|
+
if m.get('headers') is not None:
|
14062
|
+
self.headers = m.get('headers')
|
14063
|
+
if m.get('statusCode') is not None:
|
14064
|
+
self.status_code = m.get('statusCode')
|
14065
|
+
self.body = []
|
14066
|
+
if m.get('body') is not None:
|
14067
|
+
for k in m.get('body'):
|
14068
|
+
temp_model = ListAiToolsResponseBody()
|
14069
|
+
self.body.append(temp_model.from_map(k))
|
14070
|
+
return self
|
14071
|
+
|
14072
|
+
|
13014
14073
|
class ListAlertsRequest(TeaModel):
|
13015
14074
|
def __init__(
|
13016
14075
|
self,
|
@@ -13020,6 +14079,7 @@ class ListAlertsRequest(TeaModel):
|
|
13020
14079
|
):
|
13021
14080
|
self.logstore = logstore
|
13022
14081
|
self.offset = offset
|
14082
|
+
# The number of entries per page. Maximum value: 200. Default value: 10.
|
13023
14083
|
self.size = size
|
13024
14084
|
|
13025
14085
|
def validate(self):
|
@@ -13514,6 +14574,7 @@ class ListCollectionPoliciesRequest(TeaModel):
|
|
13514
14574
|
self.instance_id = instance_id
|
13515
14575
|
self.offset = offset
|
13516
14576
|
self.policy_name = policy_name
|
14577
|
+
# The code of the service.
|
13517
14578
|
self.product_code = product_code
|
13518
14579
|
self.size = size
|
13519
14580
|
|
@@ -14727,6 +15788,7 @@ class ListDownloadJobsResponseBodyResults(TeaModel):
|
|
14727
15788
|
self.execution_details = execution_details
|
14728
15789
|
# 代表资源名称的资源属性字段
|
14729
15790
|
self.name = name
|
15791
|
+
# The task status.
|
14730
15792
|
self.status = status
|
14731
15793
|
|
14732
15794
|
def validate(self):
|
@@ -14786,6 +15848,7 @@ class ListDownloadJobsResponseBody(TeaModel):
|
|
14786
15848
|
total: int = None,
|
14787
15849
|
):
|
14788
15850
|
self.count = count
|
15851
|
+
# The log download tasks.
|
14789
15852
|
self.results = results
|
14790
15853
|
self.total = total
|
14791
15854
|
|
@@ -14993,31 +16056,22 @@ class ListETLsResponse(TeaModel):
|
|
14993
16056
|
return self
|
14994
16057
|
|
14995
16058
|
|
14996
|
-
class
|
16059
|
+
class ListIngestProcessorsRequest(TeaModel):
|
14997
16060
|
def __init__(
|
14998
16061
|
self,
|
14999
|
-
|
15000
|
-
mode: str = None,
|
16062
|
+
display_name: str = None,
|
15001
16063
|
offset: int = None,
|
16064
|
+
processor_name: str = None,
|
15002
16065
|
size: int = None,
|
15003
|
-
telemetry_type: str = None,
|
15004
16066
|
):
|
15005
|
-
# The name of the
|
15006
|
-
self.
|
15007
|
-
# The
|
15008
|
-
#
|
15009
|
-
# * **standard**: Standard Logstore. This type of Logstore supports the log analysis feature and is suitable for scenarios such as real-time monitoring and interactive analysis. You can also use this type of Logstore to build a comprehensive observability system.
|
15010
|
-
# * **query**: Query Logstore. This type of Logstore supports high-performance queries. The index traffic fee of a Query Logstore is approximately half that of a Standard Logstore. Query Logstores do not support SQL analysis. Query Logstores are suitable for scenarios in which the volume of data is large, the log retention period is long, or log analysis is not required. Log retention periods of weeks or months are considered long.
|
15011
|
-
self.mode = mode
|
15012
|
-
# The line from which the query starts. Default value: 0.
|
16067
|
+
# The display name of the ingest processor.
|
16068
|
+
self.display_name = display_name
|
16069
|
+
# The offset. Default value: 0.
|
15013
16070
|
self.offset = offset
|
15014
|
-
# The
|
16071
|
+
# The identifier of the ingest processor.
|
16072
|
+
self.processor_name = processor_name
|
16073
|
+
# The number of entries. Default value: 200.
|
15015
16074
|
self.size = size
|
15016
|
-
# The type of the data that you want to query. Valid values:
|
15017
|
-
#
|
15018
|
-
# * None: logs
|
15019
|
-
# * Metrics: metrics
|
15020
|
-
self.telemetry_type = telemetry_type
|
15021
16075
|
|
15022
16076
|
def validate(self):
|
15023
16077
|
pass
|
@@ -15028,38 +16082,187 @@ class ListLogStoresRequest(TeaModel):
|
|
15028
16082
|
return _map
|
15029
16083
|
|
15030
16084
|
result = dict()
|
15031
|
-
if self.
|
15032
|
-
result['
|
15033
|
-
if self.mode is not None:
|
15034
|
-
result['mode'] = self.mode
|
16085
|
+
if self.display_name is not None:
|
16086
|
+
result['displayName'] = self.display_name
|
15035
16087
|
if self.offset is not None:
|
15036
16088
|
result['offset'] = self.offset
|
16089
|
+
if self.processor_name is not None:
|
16090
|
+
result['processorName'] = self.processor_name
|
15037
16091
|
if self.size is not None:
|
15038
16092
|
result['size'] = self.size
|
15039
|
-
if self.telemetry_type is not None:
|
15040
|
-
result['telemetryType'] = self.telemetry_type
|
15041
16093
|
return result
|
15042
16094
|
|
15043
16095
|
def from_map(self, m: dict = None):
|
15044
16096
|
m = m or dict()
|
15045
|
-
if m.get('
|
15046
|
-
self.
|
15047
|
-
if m.get('mode') is not None:
|
15048
|
-
self.mode = m.get('mode')
|
16097
|
+
if m.get('displayName') is not None:
|
16098
|
+
self.display_name = m.get('displayName')
|
15049
16099
|
if m.get('offset') is not None:
|
15050
16100
|
self.offset = m.get('offset')
|
16101
|
+
if m.get('processorName') is not None:
|
16102
|
+
self.processor_name = m.get('processorName')
|
15051
16103
|
if m.get('size') is not None:
|
15052
16104
|
self.size = m.get('size')
|
15053
|
-
if m.get('telemetryType') is not None:
|
15054
|
-
self.telemetry_type = m.get('telemetryType')
|
15055
16105
|
return self
|
15056
16106
|
|
15057
16107
|
|
15058
|
-
class
|
16108
|
+
class ListIngestProcessorsResponseBody(TeaModel):
|
15059
16109
|
def __init__(
|
15060
16110
|
self,
|
15061
16111
|
count: int = None,
|
15062
|
-
|
16112
|
+
processors: List[IngestProcessor] = None,
|
16113
|
+
total: int = None,
|
16114
|
+
):
|
16115
|
+
# The number of entries returned.
|
16116
|
+
self.count = count
|
16117
|
+
# The ingest processors that are returned.
|
16118
|
+
self.processors = processors
|
16119
|
+
# The total number of entries returned.
|
16120
|
+
self.total = total
|
16121
|
+
|
16122
|
+
def validate(self):
|
16123
|
+
if self.processors:
|
16124
|
+
for k in self.processors:
|
16125
|
+
if k:
|
16126
|
+
k.validate()
|
16127
|
+
|
16128
|
+
def to_map(self):
|
16129
|
+
_map = super().to_map()
|
16130
|
+
if _map is not None:
|
16131
|
+
return _map
|
16132
|
+
|
16133
|
+
result = dict()
|
16134
|
+
if self.count is not None:
|
16135
|
+
result['count'] = self.count
|
16136
|
+
result['processors'] = []
|
16137
|
+
if self.processors is not None:
|
16138
|
+
for k in self.processors:
|
16139
|
+
result['processors'].append(k.to_map() if k else None)
|
16140
|
+
if self.total is not None:
|
16141
|
+
result['total'] = self.total
|
16142
|
+
return result
|
16143
|
+
|
16144
|
+
def from_map(self, m: dict = None):
|
16145
|
+
m = m or dict()
|
16146
|
+
if m.get('count') is not None:
|
16147
|
+
self.count = m.get('count')
|
16148
|
+
self.processors = []
|
16149
|
+
if m.get('processors') is not None:
|
16150
|
+
for k in m.get('processors'):
|
16151
|
+
temp_model = IngestProcessor()
|
16152
|
+
self.processors.append(temp_model.from_map(k))
|
16153
|
+
if m.get('total') is not None:
|
16154
|
+
self.total = m.get('total')
|
16155
|
+
return self
|
16156
|
+
|
16157
|
+
|
16158
|
+
class ListIngestProcessorsResponse(TeaModel):
|
16159
|
+
def __init__(
|
16160
|
+
self,
|
16161
|
+
headers: Dict[str, str] = None,
|
16162
|
+
status_code: int = None,
|
16163
|
+
body: ListIngestProcessorsResponseBody = None,
|
16164
|
+
):
|
16165
|
+
self.headers = headers
|
16166
|
+
self.status_code = status_code
|
16167
|
+
self.body = body
|
16168
|
+
|
16169
|
+
def validate(self):
|
16170
|
+
if self.body:
|
16171
|
+
self.body.validate()
|
16172
|
+
|
16173
|
+
def to_map(self):
|
16174
|
+
_map = super().to_map()
|
16175
|
+
if _map is not None:
|
16176
|
+
return _map
|
16177
|
+
|
16178
|
+
result = dict()
|
16179
|
+
if self.headers is not None:
|
16180
|
+
result['headers'] = self.headers
|
16181
|
+
if self.status_code is not None:
|
16182
|
+
result['statusCode'] = self.status_code
|
16183
|
+
if self.body is not None:
|
16184
|
+
result['body'] = self.body.to_map()
|
16185
|
+
return result
|
16186
|
+
|
16187
|
+
def from_map(self, m: dict = None):
|
16188
|
+
m = m or dict()
|
16189
|
+
if m.get('headers') is not None:
|
16190
|
+
self.headers = m.get('headers')
|
16191
|
+
if m.get('statusCode') is not None:
|
16192
|
+
self.status_code = m.get('statusCode')
|
16193
|
+
if m.get('body') is not None:
|
16194
|
+
temp_model = ListIngestProcessorsResponseBody()
|
16195
|
+
self.body = temp_model.from_map(m['body'])
|
16196
|
+
return self
|
16197
|
+
|
16198
|
+
|
16199
|
+
class ListLogStoresRequest(TeaModel):
|
16200
|
+
def __init__(
|
16201
|
+
self,
|
16202
|
+
logstore_name: str = None,
|
16203
|
+
mode: str = None,
|
16204
|
+
offset: int = None,
|
16205
|
+
size: int = None,
|
16206
|
+
telemetry_type: str = None,
|
16207
|
+
):
|
16208
|
+
# The name of the Logstore. Fuzzy match is supported. For example, if you enter test, Logstores whose name contains test are returned.
|
16209
|
+
self.logstore_name = logstore_name
|
16210
|
+
# The type of the Logstore. Valid values: standard and query.
|
16211
|
+
#
|
16212
|
+
# * **standard**: Standard Logstore. This type of Logstore supports the log analysis feature and is suitable for scenarios such as real-time monitoring and interactive analysis. You can also use this type of Logstore to build a comprehensive observability system.
|
16213
|
+
# * **query**: Query Logstore. This type of Logstore supports high-performance queries. The index traffic fee of a Query Logstore is approximately half that of a Standard Logstore. Query Logstores do not support SQL analysis. Query Logstores are suitable for scenarios in which the volume of data is large, the log retention period is long, or log analysis is not required. Log retention periods of weeks or months are considered long.
|
16214
|
+
self.mode = mode
|
16215
|
+
# The line from which the query starts. Default value: 0.
|
16216
|
+
self.offset = offset
|
16217
|
+
# The number of entries per page. Maximum value: 500. Default value: 200.
|
16218
|
+
self.size = size
|
16219
|
+
# The type of the data that you want to query. Valid values:
|
16220
|
+
#
|
16221
|
+
# * None: logs
|
16222
|
+
# * Metrics: metrics
|
16223
|
+
self.telemetry_type = telemetry_type
|
16224
|
+
|
16225
|
+
def validate(self):
|
16226
|
+
pass
|
16227
|
+
|
16228
|
+
def to_map(self):
|
16229
|
+
_map = super().to_map()
|
16230
|
+
if _map is not None:
|
16231
|
+
return _map
|
16232
|
+
|
16233
|
+
result = dict()
|
16234
|
+
if self.logstore_name is not None:
|
16235
|
+
result['logstoreName'] = self.logstore_name
|
16236
|
+
if self.mode is not None:
|
16237
|
+
result['mode'] = self.mode
|
16238
|
+
if self.offset is not None:
|
16239
|
+
result['offset'] = self.offset
|
16240
|
+
if self.size is not None:
|
16241
|
+
result['size'] = self.size
|
16242
|
+
if self.telemetry_type is not None:
|
16243
|
+
result['telemetryType'] = self.telemetry_type
|
16244
|
+
return result
|
16245
|
+
|
16246
|
+
def from_map(self, m: dict = None):
|
16247
|
+
m = m or dict()
|
16248
|
+
if m.get('logstoreName') is not None:
|
16249
|
+
self.logstore_name = m.get('logstoreName')
|
16250
|
+
if m.get('mode') is not None:
|
16251
|
+
self.mode = m.get('mode')
|
16252
|
+
if m.get('offset') is not None:
|
16253
|
+
self.offset = m.get('offset')
|
16254
|
+
if m.get('size') is not None:
|
16255
|
+
self.size = m.get('size')
|
16256
|
+
if m.get('telemetryType') is not None:
|
16257
|
+
self.telemetry_type = m.get('telemetryType')
|
16258
|
+
return self
|
16259
|
+
|
16260
|
+
|
16261
|
+
class ListLogStoresResponseBody(TeaModel):
|
16262
|
+
def __init__(
|
16263
|
+
self,
|
16264
|
+
count: int = None,
|
16265
|
+
logstores: List[str] = None,
|
15063
16266
|
total: int = None,
|
15064
16267
|
):
|
15065
16268
|
# The number of entries returned on the current page.
|
@@ -15521,6 +16724,134 @@ class ListMachinesResponse(TeaModel):
|
|
15521
16724
|
return self
|
15522
16725
|
|
15523
16726
|
|
16727
|
+
class ListMaxComputeExportsRequest(TeaModel):
|
16728
|
+
def __init__(
|
16729
|
+
self,
|
16730
|
+
logstore: str = None,
|
16731
|
+
offset: int = None,
|
16732
|
+
size: int = None,
|
16733
|
+
):
|
16734
|
+
self.logstore = logstore
|
16735
|
+
self.offset = offset
|
16736
|
+
# The number of entries to return. Default value: 10.
|
16737
|
+
self.size = size
|
16738
|
+
|
16739
|
+
def validate(self):
|
16740
|
+
pass
|
16741
|
+
|
16742
|
+
def to_map(self):
|
16743
|
+
_map = super().to_map()
|
16744
|
+
if _map is not None:
|
16745
|
+
return _map
|
16746
|
+
|
16747
|
+
result = dict()
|
16748
|
+
if self.logstore is not None:
|
16749
|
+
result['logstore'] = self.logstore
|
16750
|
+
if self.offset is not None:
|
16751
|
+
result['offset'] = self.offset
|
16752
|
+
if self.size is not None:
|
16753
|
+
result['size'] = self.size
|
16754
|
+
return result
|
16755
|
+
|
16756
|
+
def from_map(self, m: dict = None):
|
16757
|
+
m = m or dict()
|
16758
|
+
if m.get('logstore') is not None:
|
16759
|
+
self.logstore = m.get('logstore')
|
16760
|
+
if m.get('offset') is not None:
|
16761
|
+
self.offset = m.get('offset')
|
16762
|
+
if m.get('size') is not None:
|
16763
|
+
self.size = m.get('size')
|
16764
|
+
return self
|
16765
|
+
|
16766
|
+
|
16767
|
+
class ListMaxComputeExportsResponseBody(TeaModel):
|
16768
|
+
def __init__(
|
16769
|
+
self,
|
16770
|
+
count: int = None,
|
16771
|
+
results: List[MaxComputeExport] = None,
|
16772
|
+
total: int = None,
|
16773
|
+
):
|
16774
|
+
self.count = count
|
16775
|
+
self.results = results
|
16776
|
+
self.total = total
|
16777
|
+
|
16778
|
+
def validate(self):
|
16779
|
+
if self.results:
|
16780
|
+
for k in self.results:
|
16781
|
+
if k:
|
16782
|
+
k.validate()
|
16783
|
+
|
16784
|
+
def to_map(self):
|
16785
|
+
_map = super().to_map()
|
16786
|
+
if _map is not None:
|
16787
|
+
return _map
|
16788
|
+
|
16789
|
+
result = dict()
|
16790
|
+
if self.count is not None:
|
16791
|
+
result['count'] = self.count
|
16792
|
+
result['results'] = []
|
16793
|
+
if self.results is not None:
|
16794
|
+
for k in self.results:
|
16795
|
+
result['results'].append(k.to_map() if k else None)
|
16796
|
+
if self.total is not None:
|
16797
|
+
result['total'] = self.total
|
16798
|
+
return result
|
16799
|
+
|
16800
|
+
def from_map(self, m: dict = None):
|
16801
|
+
m = m or dict()
|
16802
|
+
if m.get('count') is not None:
|
16803
|
+
self.count = m.get('count')
|
16804
|
+
self.results = []
|
16805
|
+
if m.get('results') is not None:
|
16806
|
+
for k in m.get('results'):
|
16807
|
+
temp_model = MaxComputeExport()
|
16808
|
+
self.results.append(temp_model.from_map(k))
|
16809
|
+
if m.get('total') is not None:
|
16810
|
+
self.total = m.get('total')
|
16811
|
+
return self
|
16812
|
+
|
16813
|
+
|
16814
|
+
class ListMaxComputeExportsResponse(TeaModel):
|
16815
|
+
def __init__(
|
16816
|
+
self,
|
16817
|
+
headers: Dict[str, str] = None,
|
16818
|
+
status_code: int = None,
|
16819
|
+
body: ListMaxComputeExportsResponseBody = None,
|
16820
|
+
):
|
16821
|
+
self.headers = headers
|
16822
|
+
self.status_code = status_code
|
16823
|
+
self.body = body
|
16824
|
+
|
16825
|
+
def validate(self):
|
16826
|
+
if self.body:
|
16827
|
+
self.body.validate()
|
16828
|
+
|
16829
|
+
def to_map(self):
|
16830
|
+
_map = super().to_map()
|
16831
|
+
if _map is not None:
|
16832
|
+
return _map
|
16833
|
+
|
16834
|
+
result = dict()
|
16835
|
+
if self.headers is not None:
|
16836
|
+
result['headers'] = self.headers
|
16837
|
+
if self.status_code is not None:
|
16838
|
+
result['statusCode'] = self.status_code
|
16839
|
+
if self.body is not None:
|
16840
|
+
result['body'] = self.body.to_map()
|
16841
|
+
return result
|
16842
|
+
|
16843
|
+
def from_map(self, m: dict = None):
|
16844
|
+
m = m or dict()
|
16845
|
+
if m.get('headers') is not None:
|
16846
|
+
self.headers = m.get('headers')
|
16847
|
+
if m.get('statusCode') is not None:
|
16848
|
+
self.status_code = m.get('statusCode')
|
16849
|
+
if m.get('body') is not None:
|
16850
|
+
temp_model = ListMaxComputeExportsResponseBody()
|
16851
|
+
self.body = temp_model.from_map(m['body'])
|
16852
|
+
return self
|
16853
|
+
|
16854
|
+
|
15524
16855
|
class ListMetricStoresRequest(TeaModel):
|
15525
16856
|
def __init__(
|
15526
16857
|
self,
|
@@ -15662,6 +16993,7 @@ class ListOSSExportsRequest(TeaModel):
|
|
15662
16993
|
):
|
15663
16994
|
self.logstore = logstore
|
15664
16995
|
self.offset = offset
|
16996
|
+
# The number of entries to return. Default value: 10.
|
15665
16997
|
self.size = size
|
15666
16998
|
|
15667
16999
|
def validate(self):
|
@@ -15789,6 +17121,7 @@ class ListOSSHDFSExportsRequest(TeaModel):
|
|
15789
17121
|
):
|
15790
17122
|
self.logstore = logstore
|
15791
17123
|
self.offset = offset
|
17124
|
+
# The number of entries to return. Default value: 10.
|
15792
17125
|
self.size = size
|
15793
17126
|
|
15794
17127
|
def validate(self):
|
@@ -16318,6 +17651,7 @@ class ListScheduledSQLsRequest(TeaModel):
|
|
16318
17651
|
# The name of the Logstore.
|
16319
17652
|
self.logstore = logstore
|
16320
17653
|
self.offset = offset
|
17654
|
+
# The number of entries to return. Default value: 10.
|
16321
17655
|
self.size = size
|
16322
17656
|
|
16323
17657
|
def validate(self):
|
@@ -16684,8 +18018,8 @@ class ListTagResourcesRequest(TeaModel):
|
|
16684
18018
|
# * project
|
16685
18019
|
# * logstore
|
16686
18020
|
# * dashboard
|
16687
|
-
# *
|
16688
|
-
# *
|
18021
|
+
# * machinegroup
|
18022
|
+
# * logtailconfig
|
16689
18023
|
#
|
16690
18024
|
# This parameter is required.
|
16691
18025
|
self.resource_type = resource_type
|
@@ -16744,8 +18078,8 @@ class ListTagResourcesShrinkRequest(TeaModel):
|
|
16744
18078
|
# * project
|
16745
18079
|
# * logstore
|
16746
18080
|
# * dashboard
|
16747
|
-
# *
|
16748
|
-
# *
|
18081
|
+
# * machinegroup
|
18082
|
+
# * logtailconfig
|
16749
18083
|
#
|
16750
18084
|
# This parameter is required.
|
16751
18085
|
self.resource_type = resource_type
|
@@ -17194,6 +18528,87 @@ class PutAnnotationDataResponse(TeaModel):
|
|
17194
18528
|
return self
|
17195
18529
|
|
17196
18530
|
|
18531
|
+
class PutIngestProcessorRequest(TeaModel):
|
18532
|
+
def __init__(
|
18533
|
+
self,
|
18534
|
+
configuration: IngestProcessorConfiguration = None,
|
18535
|
+
description: str = None,
|
18536
|
+
display_name: str = None,
|
18537
|
+
):
|
18538
|
+
# The configuration of the ingest processor.
|
18539
|
+
#
|
18540
|
+
# This parameter is required.
|
18541
|
+
self.configuration = configuration
|
18542
|
+
# The description of the ingest processor.
|
18543
|
+
self.description = description
|
18544
|
+
# The display name of the ingest processor.
|
18545
|
+
#
|
18546
|
+
# This parameter is required.
|
18547
|
+
self.display_name = display_name
|
18548
|
+
|
18549
|
+
def validate(self):
|
18550
|
+
if self.configuration:
|
18551
|
+
self.configuration.validate()
|
18552
|
+
|
18553
|
+
def to_map(self):
|
18554
|
+
_map = super().to_map()
|
18555
|
+
if _map is not None:
|
18556
|
+
return _map
|
18557
|
+
|
18558
|
+
result = dict()
|
18559
|
+
if self.configuration is not None:
|
18560
|
+
result['configuration'] = self.configuration.to_map()
|
18561
|
+
if self.description is not None:
|
18562
|
+
result['description'] = self.description
|
18563
|
+
if self.display_name is not None:
|
18564
|
+
result['displayName'] = self.display_name
|
18565
|
+
return result
|
18566
|
+
|
18567
|
+
def from_map(self, m: dict = None):
|
18568
|
+
m = m or dict()
|
18569
|
+
if m.get('configuration') is not None:
|
18570
|
+
temp_model = IngestProcessorConfiguration()
|
18571
|
+
self.configuration = temp_model.from_map(m['configuration'])
|
18572
|
+
if m.get('description') is not None:
|
18573
|
+
self.description = m.get('description')
|
18574
|
+
if m.get('displayName') is not None:
|
18575
|
+
self.display_name = m.get('displayName')
|
18576
|
+
return self
|
18577
|
+
|
18578
|
+
|
18579
|
+
class PutIngestProcessorResponse(TeaModel):
|
18580
|
+
def __init__(
|
18581
|
+
self,
|
18582
|
+
headers: Dict[str, str] = None,
|
18583
|
+
status_code: int = None,
|
18584
|
+
):
|
18585
|
+
self.headers = headers
|
18586
|
+
self.status_code = status_code
|
18587
|
+
|
18588
|
+
def validate(self):
|
18589
|
+
pass
|
18590
|
+
|
18591
|
+
def to_map(self):
|
18592
|
+
_map = super().to_map()
|
18593
|
+
if _map is not None:
|
18594
|
+
return _map
|
18595
|
+
|
18596
|
+
result = dict()
|
18597
|
+
if self.headers is not None:
|
18598
|
+
result['headers'] = self.headers
|
18599
|
+
if self.status_code is not None:
|
18600
|
+
result['statusCode'] = self.status_code
|
18601
|
+
return result
|
18602
|
+
|
18603
|
+
def from_map(self, m: dict = None):
|
18604
|
+
m = m or dict()
|
18605
|
+
if m.get('headers') is not None:
|
18606
|
+
self.headers = m.get('headers')
|
18607
|
+
if m.get('statusCode') is not None:
|
18608
|
+
self.status_code = m.get('statusCode')
|
18609
|
+
return self
|
18610
|
+
|
18611
|
+
|
17197
18612
|
class PutLogsHeaders(TeaModel):
|
17198
18613
|
def __init__(
|
17199
18614
|
self,
|
@@ -17509,6 +18924,9 @@ class RefreshTokenRequest(TeaModel):
|
|
17509
18924
|
access_token_expiration_time: int = None,
|
17510
18925
|
ticket: str = None,
|
17511
18926
|
):
|
18927
|
+
# * The validity period of the access token. Unit: seconds. Default value: 86400, which specifies one day. Valid values: 0 to 86400.
|
18928
|
+
# * The validity period of the access token is the smaller value between accessTokenExpirationTime and expirationTime.
|
18929
|
+
# * If you use a Security Token Service (STS) token to call this operation, the validity period of the access token is the smallest value among accessTokenExpirationTime, expirationTime, and the validity period of the STS token.
|
17512
18930
|
self.access_token_expiration_time = access_token_expiration_time
|
17513
18931
|
# The ticket that is used for logon-free access.
|
17514
18932
|
self.ticket = ticket
|
@@ -17673,22 +19091,97 @@ class SplitShardRequest(TeaModel):
|
|
17673
19091
|
return self
|
17674
19092
|
|
17675
19093
|
|
17676
|
-
class SplitShardResponse(TeaModel):
|
19094
|
+
class SplitShardResponse(TeaModel):
|
19095
|
+
def __init__(
|
19096
|
+
self,
|
19097
|
+
headers: Dict[str, str] = None,
|
19098
|
+
status_code: int = None,
|
19099
|
+
body: List[Shard] = None,
|
19100
|
+
):
|
19101
|
+
self.headers = headers
|
19102
|
+
self.status_code = status_code
|
19103
|
+
self.body = body
|
19104
|
+
|
19105
|
+
def validate(self):
|
19106
|
+
if self.body:
|
19107
|
+
for k in self.body:
|
19108
|
+
if k:
|
19109
|
+
k.validate()
|
19110
|
+
|
19111
|
+
def to_map(self):
|
19112
|
+
_map = super().to_map()
|
19113
|
+
if _map is not None:
|
19114
|
+
return _map
|
19115
|
+
|
19116
|
+
result = dict()
|
19117
|
+
if self.headers is not None:
|
19118
|
+
result['headers'] = self.headers
|
19119
|
+
if self.status_code is not None:
|
19120
|
+
result['statusCode'] = self.status_code
|
19121
|
+
result['body'] = []
|
19122
|
+
if self.body is not None:
|
19123
|
+
for k in self.body:
|
19124
|
+
result['body'].append(k.to_map() if k else None)
|
19125
|
+
return result
|
19126
|
+
|
19127
|
+
def from_map(self, m: dict = None):
|
19128
|
+
m = m or dict()
|
19129
|
+
if m.get('headers') is not None:
|
19130
|
+
self.headers = m.get('headers')
|
19131
|
+
if m.get('statusCode') is not None:
|
19132
|
+
self.status_code = m.get('statusCode')
|
19133
|
+
self.body = []
|
19134
|
+
if m.get('body') is not None:
|
19135
|
+
for k in m.get('body'):
|
19136
|
+
temp_model = Shard()
|
19137
|
+
self.body.append(temp_model.from_map(k))
|
19138
|
+
return self
|
19139
|
+
|
19140
|
+
|
19141
|
+
class StartETLResponse(TeaModel):
|
19142
|
+
def __init__(
|
19143
|
+
self,
|
19144
|
+
headers: Dict[str, str] = None,
|
19145
|
+
status_code: int = None,
|
19146
|
+
):
|
19147
|
+
self.headers = headers
|
19148
|
+
self.status_code = status_code
|
19149
|
+
|
19150
|
+
def validate(self):
|
19151
|
+
pass
|
19152
|
+
|
19153
|
+
def to_map(self):
|
19154
|
+
_map = super().to_map()
|
19155
|
+
if _map is not None:
|
19156
|
+
return _map
|
19157
|
+
|
19158
|
+
result = dict()
|
19159
|
+
if self.headers is not None:
|
19160
|
+
result['headers'] = self.headers
|
19161
|
+
if self.status_code is not None:
|
19162
|
+
result['statusCode'] = self.status_code
|
19163
|
+
return result
|
19164
|
+
|
19165
|
+
def from_map(self, m: dict = None):
|
19166
|
+
m = m or dict()
|
19167
|
+
if m.get('headers') is not None:
|
19168
|
+
self.headers = m.get('headers')
|
19169
|
+
if m.get('statusCode') is not None:
|
19170
|
+
self.status_code = m.get('statusCode')
|
19171
|
+
return self
|
19172
|
+
|
19173
|
+
|
19174
|
+
class StartMaxComputeExportResponse(TeaModel):
|
17677
19175
|
def __init__(
|
17678
19176
|
self,
|
17679
19177
|
headers: Dict[str, str] = None,
|
17680
19178
|
status_code: int = None,
|
17681
|
-
body: List[Shard] = None,
|
17682
19179
|
):
|
17683
19180
|
self.headers = headers
|
17684
19181
|
self.status_code = status_code
|
17685
|
-
self.body = body
|
17686
19182
|
|
17687
19183
|
def validate(self):
|
17688
|
-
|
17689
|
-
for k in self.body:
|
17690
|
-
if k:
|
17691
|
-
k.validate()
|
19184
|
+
pass
|
17692
19185
|
|
17693
19186
|
def to_map(self):
|
17694
19187
|
_map = super().to_map()
|
@@ -17700,10 +19193,6 @@ class SplitShardResponse(TeaModel):
|
|
17700
19193
|
result['headers'] = self.headers
|
17701
19194
|
if self.status_code is not None:
|
17702
19195
|
result['statusCode'] = self.status_code
|
17703
|
-
result['body'] = []
|
17704
|
-
if self.body is not None:
|
17705
|
-
for k in self.body:
|
17706
|
-
result['body'].append(k.to_map() if k else None)
|
17707
19196
|
return result
|
17708
19197
|
|
17709
19198
|
def from_map(self, m: dict = None):
|
@@ -17712,15 +19201,10 @@ class SplitShardResponse(TeaModel):
|
|
17712
19201
|
self.headers = m.get('headers')
|
17713
19202
|
if m.get('statusCode') is not None:
|
17714
19203
|
self.status_code = m.get('statusCode')
|
17715
|
-
self.body = []
|
17716
|
-
if m.get('body') is not None:
|
17717
|
-
for k in m.get('body'):
|
17718
|
-
temp_model = Shard()
|
17719
|
-
self.body.append(temp_model.from_map(k))
|
17720
19204
|
return self
|
17721
19205
|
|
17722
19206
|
|
17723
|
-
class
|
19207
|
+
class StartOSSExportResponse(TeaModel):
|
17724
19208
|
def __init__(
|
17725
19209
|
self,
|
17726
19210
|
headers: Dict[str, str] = None,
|
@@ -17753,7 +19237,7 @@ class StartETLResponse(TeaModel):
|
|
17753
19237
|
return self
|
17754
19238
|
|
17755
19239
|
|
17756
|
-
class
|
19240
|
+
class StartOSSHDFSExportResponse(TeaModel):
|
17757
19241
|
def __init__(
|
17758
19242
|
self,
|
17759
19243
|
headers: Dict[str, str] = None,
|
@@ -17786,7 +19270,7 @@ class StartOSSExportResponse(TeaModel):
|
|
17786
19270
|
return self
|
17787
19271
|
|
17788
19272
|
|
17789
|
-
class
|
19273
|
+
class StartOSSIngestionResponse(TeaModel):
|
17790
19274
|
def __init__(
|
17791
19275
|
self,
|
17792
19276
|
headers: Dict[str, str] = None,
|
@@ -17819,7 +19303,7 @@ class StartOSSHDFSExportResponse(TeaModel):
|
|
17819
19303
|
return self
|
17820
19304
|
|
17821
19305
|
|
17822
|
-
class
|
19306
|
+
class StopETLResponse(TeaModel):
|
17823
19307
|
def __init__(
|
17824
19308
|
self,
|
17825
19309
|
headers: Dict[str, str] = None,
|
@@ -17852,7 +19336,7 @@ class StartOSSIngestionResponse(TeaModel):
|
|
17852
19336
|
return self
|
17853
19337
|
|
17854
19338
|
|
17855
|
-
class
|
19339
|
+
class StopMaxComputeExportResponse(TeaModel):
|
17856
19340
|
def __init__(
|
17857
19341
|
self,
|
17858
19342
|
headers: Dict[str, str] = None,
|
@@ -18585,7 +20069,7 @@ class UpdateConsumerGroupRequest(TeaModel):
|
|
18585
20069
|
# * true: If a shard is split, the data in the original shard is consumed first. Then, the data in the new shards is consumed at the same time. If shards are merged, the data in the original shards is consumed first. Then, the data in the new shard is consumed.
|
18586
20070
|
# * false: The data in all shards is consumed at the same time. If a new shard is generated after a shard is split or shards are merged, the data in the new shard is immediately consumed.
|
18587
20071
|
self.order = order
|
18588
|
-
# The timeout period. If Simple Log Service does not receive heartbeats from a consumer within the timeout period, Simple Log Service deletes the consumer. Unit: seconds
|
20072
|
+
# The timeout period. If Simple Log Service does not receive heartbeats from a consumer within the timeout period, Simple Log Service deletes the consumer. Unit: seconds
|
18589
20073
|
self.timeout = timeout
|
18590
20074
|
|
18591
20075
|
def validate(self):
|
@@ -19057,8 +20541,11 @@ class UpdateLogStoreEncryptionRequestUserCmkInfo(TeaModel):
|
|
19057
20541
|
region_id: str = None,
|
19058
20542
|
role_arn: str = None,
|
19059
20543
|
):
|
20544
|
+
# The ID of the CMK to which the BYOK key belongs. You can create a CMK in KMS. The CMK must be in the same region as the endpoint of Simple Log Service.
|
19060
20545
|
self.key_id = key_id
|
20546
|
+
# The region ID. Example: cn-hangzhou.
|
19061
20547
|
self.region_id = region_id
|
20548
|
+
# The Alibaba Cloud Resource Name (ARN) of the Resource Access Management (RAM) role.The value is in the acs:ram::12344\\*\\*\\*:role/xxxxx format. To use a BYOK key to encrypt logs, you must create a RAM role and grant the AliyunKMSReadOnlyAccess and AliyunKMSCryptoUserAccess permissions to the RAM role. You must grant the API caller the PassRole permission on the RAM role.
|
19062
20549
|
self.role_arn = role_arn
|
19063
20550
|
|
19064
20551
|
def validate(self):
|
@@ -19096,9 +20583,13 @@ class UpdateLogStoreEncryptionRequest(TeaModel):
|
|
19096
20583
|
encrypt_type: str = None,
|
19097
20584
|
user_cmk_info: UpdateLogStoreEncryptionRequestUserCmkInfo = None,
|
19098
20585
|
):
|
20586
|
+
# Specifies whether to enable the encryption feature. After you update the encryption configuration of the Logstore, you can modify only the enable parameter in subsequent update requests. You cannot modify the encryptType or userCmkInfo parameters.
|
20587
|
+
#
|
19099
20588
|
# This parameter is required.
|
19100
20589
|
self.enable = enable
|
20590
|
+
# The encryption algorithm. Valid values: default, m4, sm4_ecb, sm4_cbc, sm4_gcm, aes_ecb, aes_cbc, aes_cfb, aes_ofb, and aes_gcm.
|
19101
20591
|
self.encrypt_type = encrypt_type
|
20592
|
+
# Optional. If you use a BYOK key to encrypt logs, you must specify this parameter. If you use the service key of Simple Log Service to encrypt logs, you do not need to specify this parameter.
|
19102
20593
|
self.user_cmk_info = user_cmk_info
|
19103
20594
|
|
19104
20595
|
def validate(self):
|
@@ -19227,6 +20718,69 @@ class UpdateLogStoreMeteringModeResponse(TeaModel):
|
|
19227
20718
|
return self
|
19228
20719
|
|
19229
20720
|
|
20721
|
+
class UpdateLogStoreProcessorRequest(TeaModel):
|
20722
|
+
def __init__(
|
20723
|
+
self,
|
20724
|
+
processor_name: str = None,
|
20725
|
+
):
|
20726
|
+
# The identifier of the ingest processor.
|
20727
|
+
#
|
20728
|
+
# This parameter is required.
|
20729
|
+
self.processor_name = processor_name
|
20730
|
+
|
20731
|
+
def validate(self):
|
20732
|
+
pass
|
20733
|
+
|
20734
|
+
def to_map(self):
|
20735
|
+
_map = super().to_map()
|
20736
|
+
if _map is not None:
|
20737
|
+
return _map
|
20738
|
+
|
20739
|
+
result = dict()
|
20740
|
+
if self.processor_name is not None:
|
20741
|
+
result['processorName'] = self.processor_name
|
20742
|
+
return result
|
20743
|
+
|
20744
|
+
def from_map(self, m: dict = None):
|
20745
|
+
m = m or dict()
|
20746
|
+
if m.get('processorName') is not None:
|
20747
|
+
self.processor_name = m.get('processorName')
|
20748
|
+
return self
|
20749
|
+
|
20750
|
+
|
20751
|
+
class UpdateLogStoreProcessorResponse(TeaModel):
|
20752
|
+
def __init__(
|
20753
|
+
self,
|
20754
|
+
headers: Dict[str, str] = None,
|
20755
|
+
status_code: int = None,
|
20756
|
+
):
|
20757
|
+
self.headers = headers
|
20758
|
+
self.status_code = status_code
|
20759
|
+
|
20760
|
+
def validate(self):
|
20761
|
+
pass
|
20762
|
+
|
20763
|
+
def to_map(self):
|
20764
|
+
_map = super().to_map()
|
20765
|
+
if _map is not None:
|
20766
|
+
return _map
|
20767
|
+
|
20768
|
+
result = dict()
|
20769
|
+
if self.headers is not None:
|
20770
|
+
result['headers'] = self.headers
|
20771
|
+
if self.status_code is not None:
|
20772
|
+
result['statusCode'] = self.status_code
|
20773
|
+
return result
|
20774
|
+
|
20775
|
+
def from_map(self, m: dict = None):
|
20776
|
+
m = m or dict()
|
20777
|
+
if m.get('headers') is not None:
|
20778
|
+
self.headers = m.get('headers')
|
20779
|
+
if m.get('statusCode') is not None:
|
20780
|
+
self.status_code = m.get('statusCode')
|
20781
|
+
return self
|
20782
|
+
|
20783
|
+
|
19230
20784
|
class UpdateLoggingRequestLoggingDetails(TeaModel):
|
19231
20785
|
def __init__(
|
19232
20786
|
self,
|
@@ -19378,15 +20932,11 @@ class UpdateLogtailPipelineConfigRequest(TeaModel):
|
|
19378
20932
|
self.config_name = config_name
|
19379
20933
|
# The output plug-ins.
|
19380
20934
|
#
|
19381
|
-
# > You can
|
20935
|
+
# > You can configure only one output plug-in.
|
19382
20936
|
#
|
19383
20937
|
# This parameter is required.
|
19384
20938
|
self.flushers = flushers
|
19385
20939
|
# The global settings.
|
19386
|
-
#
|
19387
|
-
# **\
|
19388
|
-
#
|
19389
|
-
# ****\
|
19390
20940
|
self.global_ = global_
|
19391
20941
|
# The input plug-ins.
|
19392
20942
|
#
|
@@ -19398,13 +20948,13 @@ class UpdateLogtailPipelineConfigRequest(TeaModel):
|
|
19398
20948
|
self.log_sample = log_sample
|
19399
20949
|
# The processing plug-ins.
|
19400
20950
|
#
|
19401
|
-
# > Logtail
|
20951
|
+
# > Logtail plug-ins for data processing are classified into native plug-ins and extended plug-ins. For more information, see [Overview of Logtail plug-ins for data processing](https://help.aliyun.com/document_detail/64957.html).
|
19402
20952
|
#
|
19403
20953
|
# >
|
19404
20954
|
#
|
19405
20955
|
# * You can use native plug-ins only to collect text logs.
|
19406
20956
|
#
|
19407
|
-
# * You cannot add native plug-ins and extended plug-ins at
|
20957
|
+
# * You cannot add native plug-ins and extended plug-ins at a time.
|
19408
20958
|
#
|
19409
20959
|
# * When you add native plug-ins, take note of the following items:
|
19410
20960
|
#
|
@@ -19697,16 +21247,101 @@ class UpdateMachineGroupMachineResponse(TeaModel):
|
|
19697
21247
|
return self
|
19698
21248
|
|
19699
21249
|
|
21250
|
+
class UpdateMaxComputeExportRequest(TeaModel):
|
21251
|
+
def __init__(
|
21252
|
+
self,
|
21253
|
+
configuration: MaxComputeExportConfiguration = None,
|
21254
|
+
description: str = None,
|
21255
|
+
display_name: str = None,
|
21256
|
+
):
|
21257
|
+
# The setting of the MaxCompute data shipping job.
|
21258
|
+
#
|
21259
|
+
# This parameter is required.
|
21260
|
+
self.configuration = configuration
|
21261
|
+
# The description of the MaxCompute data shipping job.
|
21262
|
+
self.description = description
|
21263
|
+
# The display name of the MaxCompute data shipping job.
|
21264
|
+
#
|
21265
|
+
# This parameter is required.
|
21266
|
+
self.display_name = display_name
|
21267
|
+
|
21268
|
+
def validate(self):
|
21269
|
+
if self.configuration:
|
21270
|
+
self.configuration.validate()
|
21271
|
+
|
21272
|
+
def to_map(self):
|
21273
|
+
_map = super().to_map()
|
21274
|
+
if _map is not None:
|
21275
|
+
return _map
|
21276
|
+
|
21277
|
+
result = dict()
|
21278
|
+
if self.configuration is not None:
|
21279
|
+
result['configuration'] = self.configuration.to_map()
|
21280
|
+
if self.description is not None:
|
21281
|
+
result['description'] = self.description
|
21282
|
+
if self.display_name is not None:
|
21283
|
+
result['displayName'] = self.display_name
|
21284
|
+
return result
|
21285
|
+
|
21286
|
+
def from_map(self, m: dict = None):
|
21287
|
+
m = m or dict()
|
21288
|
+
if m.get('configuration') is not None:
|
21289
|
+
temp_model = MaxComputeExportConfiguration()
|
21290
|
+
self.configuration = temp_model.from_map(m['configuration'])
|
21291
|
+
if m.get('description') is not None:
|
21292
|
+
self.description = m.get('description')
|
21293
|
+
if m.get('displayName') is not None:
|
21294
|
+
self.display_name = m.get('displayName')
|
21295
|
+
return self
|
21296
|
+
|
21297
|
+
|
21298
|
+
class UpdateMaxComputeExportResponse(TeaModel):
|
21299
|
+
def __init__(
|
21300
|
+
self,
|
21301
|
+
headers: Dict[str, str] = None,
|
21302
|
+
status_code: int = None,
|
21303
|
+
):
|
21304
|
+
self.headers = headers
|
21305
|
+
self.status_code = status_code
|
21306
|
+
|
21307
|
+
def validate(self):
|
21308
|
+
pass
|
21309
|
+
|
21310
|
+
def to_map(self):
|
21311
|
+
_map = super().to_map()
|
21312
|
+
if _map is not None:
|
21313
|
+
return _map
|
21314
|
+
|
21315
|
+
result = dict()
|
21316
|
+
if self.headers is not None:
|
21317
|
+
result['headers'] = self.headers
|
21318
|
+
if self.status_code is not None:
|
21319
|
+
result['statusCode'] = self.status_code
|
21320
|
+
return result
|
21321
|
+
|
21322
|
+
def from_map(self, m: dict = None):
|
21323
|
+
m = m or dict()
|
21324
|
+
if m.get('headers') is not None:
|
21325
|
+
self.headers = m.get('headers')
|
21326
|
+
if m.get('statusCode') is not None:
|
21327
|
+
self.status_code = m.get('statusCode')
|
21328
|
+
return self
|
21329
|
+
|
21330
|
+
|
19700
21331
|
class UpdateMetricStoreRequest(TeaModel):
|
19701
21332
|
def __init__(
|
19702
21333
|
self,
|
19703
21334
|
auto_split: bool = None,
|
21335
|
+
hot_ttl: int = None,
|
21336
|
+
infrequent_access_ttl: int = None,
|
19704
21337
|
max_split_shard: int = None,
|
19705
21338
|
mode: str = None,
|
19706
21339
|
ttl: int = None,
|
19707
21340
|
):
|
19708
21341
|
# Specifies whether to enable automatic sharding.
|
19709
21342
|
self.auto_split = auto_split
|
21343
|
+
self.hot_ttl = hot_ttl
|
21344
|
+
self.infrequent_access_ttl = infrequent_access_ttl
|
19710
21345
|
# The maximum number of shards into which existing shards can be automatically split. This parameter is valid only when you set the autoSplit parameter to true.
|
19711
21346
|
self.max_split_shard = max_split_shard
|
19712
21347
|
# The type of the Metricstore.
|
@@ -19725,6 +21360,10 @@ class UpdateMetricStoreRequest(TeaModel):
|
|
19725
21360
|
result = dict()
|
19726
21361
|
if self.auto_split is not None:
|
19727
21362
|
result['autoSplit'] = self.auto_split
|
21363
|
+
if self.hot_ttl is not None:
|
21364
|
+
result['hot_ttl'] = self.hot_ttl
|
21365
|
+
if self.infrequent_access_ttl is not None:
|
21366
|
+
result['infrequentAccessTTL'] = self.infrequent_access_ttl
|
19728
21367
|
if self.max_split_shard is not None:
|
19729
21368
|
result['maxSplitShard'] = self.max_split_shard
|
19730
21369
|
if self.mode is not None:
|
@@ -19737,6 +21376,10 @@ class UpdateMetricStoreRequest(TeaModel):
|
|
19737
21376
|
m = m or dict()
|
19738
21377
|
if m.get('autoSplit') is not None:
|
19739
21378
|
self.auto_split = m.get('autoSplit')
|
21379
|
+
if m.get('hot_ttl') is not None:
|
21380
|
+
self.hot_ttl = m.get('hot_ttl')
|
21381
|
+
if m.get('infrequentAccessTTL') is not None:
|
21382
|
+
self.infrequent_access_ttl = m.get('infrequentAccessTTL')
|
19740
21383
|
if m.get('maxSplitShard') is not None:
|
19741
21384
|
self.max_split_shard = m.get('maxSplitShard')
|
19742
21385
|
if m.get('mode') is not None:
|
@@ -19840,6 +21483,69 @@ class UpdateMetricStoreMeteringModeResponse(TeaModel):
|
|
19840
21483
|
return self
|
19841
21484
|
|
19842
21485
|
|
21486
|
+
class UpdateMetricStoreProcessorRequest(TeaModel):
|
21487
|
+
def __init__(
|
21488
|
+
self,
|
21489
|
+
processor_name: str = None,
|
21490
|
+
):
|
21491
|
+
# The identifier of the ingest processor.
|
21492
|
+
#
|
21493
|
+
# This parameter is required.
|
21494
|
+
self.processor_name = processor_name
|
21495
|
+
|
21496
|
+
def validate(self):
|
21497
|
+
pass
|
21498
|
+
|
21499
|
+
def to_map(self):
|
21500
|
+
_map = super().to_map()
|
21501
|
+
if _map is not None:
|
21502
|
+
return _map
|
21503
|
+
|
21504
|
+
result = dict()
|
21505
|
+
if self.processor_name is not None:
|
21506
|
+
result['processorName'] = self.processor_name
|
21507
|
+
return result
|
21508
|
+
|
21509
|
+
def from_map(self, m: dict = None):
|
21510
|
+
m = m or dict()
|
21511
|
+
if m.get('processorName') is not None:
|
21512
|
+
self.processor_name = m.get('processorName')
|
21513
|
+
return self
|
21514
|
+
|
21515
|
+
|
21516
|
+
class UpdateMetricStoreProcessorResponse(TeaModel):
|
21517
|
+
def __init__(
|
21518
|
+
self,
|
21519
|
+
headers: Dict[str, str] = None,
|
21520
|
+
status_code: int = None,
|
21521
|
+
):
|
21522
|
+
self.headers = headers
|
21523
|
+
self.status_code = status_code
|
21524
|
+
|
21525
|
+
def validate(self):
|
21526
|
+
pass
|
21527
|
+
|
21528
|
+
def to_map(self):
|
21529
|
+
_map = super().to_map()
|
21530
|
+
if _map is not None:
|
21531
|
+
return _map
|
21532
|
+
|
21533
|
+
result = dict()
|
21534
|
+
if self.headers is not None:
|
21535
|
+
result['headers'] = self.headers
|
21536
|
+
if self.status_code is not None:
|
21537
|
+
result['statusCode'] = self.status_code
|
21538
|
+
return result
|
21539
|
+
|
21540
|
+
def from_map(self, m: dict = None):
|
21541
|
+
m = m or dict()
|
21542
|
+
if m.get('headers') is not None:
|
21543
|
+
self.headers = m.get('headers')
|
21544
|
+
if m.get('statusCode') is not None:
|
21545
|
+
self.status_code = m.get('statusCode')
|
21546
|
+
return self
|
21547
|
+
|
21548
|
+
|
19843
21549
|
class UpdateOSSExportRequest(TeaModel):
|
19844
21550
|
def __init__(
|
19845
21551
|
self,
|
@@ -19925,10 +21631,14 @@ class UpdateOSSHDFSExportRequest(TeaModel):
|
|
19925
21631
|
display_name: str = None,
|
19926
21632
|
):
|
19927
21633
|
# The configuration details of the job.
|
21634
|
+
#
|
21635
|
+
# This parameter is required.
|
19928
21636
|
self.configuration = configuration
|
19929
21637
|
# The description of the job.
|
19930
21638
|
self.description = description
|
19931
21639
|
# The display name of the job.
|
21640
|
+
#
|
21641
|
+
# This parameter is required.
|
19932
21642
|
self.display_name = display_name
|
19933
21643
|
|
19934
21644
|
def validate(self):
|
@@ -20095,7 +21805,7 @@ class UpdateOssExternalStoreRequestParameterColumns(TeaModel):
|
|
20095
21805
|
#
|
20096
21806
|
# This parameter is required.
|
20097
21807
|
self.name = name
|
20098
|
-
# The type of the field.
|
21808
|
+
# The data type of the field.
|
20099
21809
|
#
|
20100
21810
|
# This parameter is required.
|
20101
21811
|
self.type = type
|
@@ -20134,11 +21844,11 @@ class UpdateOssExternalStoreRequestParameter(TeaModel):
|
|
20134
21844
|
endpoint: str = None,
|
20135
21845
|
objects: List[str] = None,
|
20136
21846
|
):
|
20137
|
-
# The AccessKey ID
|
21847
|
+
# The AccessKey ID.
|
20138
21848
|
#
|
20139
21849
|
# This parameter is required.
|
20140
21850
|
self.accessid = accessid
|
20141
|
-
# The AccessKey secret
|
21851
|
+
# The AccessKey secret.
|
20142
21852
|
#
|
20143
21853
|
# This parameter is required.
|
20144
21854
|
self.accesskey = accesskey
|
@@ -20146,7 +21856,7 @@ class UpdateOssExternalStoreRequestParameter(TeaModel):
|
|
20146
21856
|
#
|
20147
21857
|
# This parameter is required.
|
20148
21858
|
self.bucket = bucket
|
20149
|
-
# The
|
21859
|
+
# The associated fields.
|
20150
21860
|
#
|
20151
21861
|
# This parameter is required.
|
20152
21862
|
self.columns = columns
|
@@ -20154,7 +21864,7 @@ class UpdateOssExternalStoreRequestParameter(TeaModel):
|
|
20154
21864
|
#
|
20155
21865
|
# This parameter is required.
|
20156
21866
|
self.endpoint = endpoint
|
20157
|
-
# The names of the OSS objects
|
21867
|
+
# The names of the associated OSS objects.
|
20158
21868
|
#
|
20159
21869
|
# This parameter is required.
|
20160
21870
|
self.objects = objects
|
@@ -20294,11 +22004,13 @@ class UpdateProjectRequest(TeaModel):
|
|
20294
22004
|
def __init__(
|
20295
22005
|
self,
|
20296
22006
|
description: str = None,
|
22007
|
+
recycle_bin_enabled: bool = None,
|
20297
22008
|
):
|
20298
22009
|
# The description of the project. The default value is an empty string.
|
20299
22010
|
#
|
20300
22011
|
# This parameter is required.
|
20301
22012
|
self.description = description
|
22013
|
+
self.recycle_bin_enabled = recycle_bin_enabled
|
20302
22014
|
|
20303
22015
|
def validate(self):
|
20304
22016
|
pass
|
@@ -20311,12 +22023,16 @@ class UpdateProjectRequest(TeaModel):
|
|
20311
22023
|
result = dict()
|
20312
22024
|
if self.description is not None:
|
20313
22025
|
result['description'] = self.description
|
22026
|
+
if self.recycle_bin_enabled is not None:
|
22027
|
+
result['recycleBinEnabled'] = self.recycle_bin_enabled
|
20314
22028
|
return result
|
20315
22029
|
|
20316
22030
|
def from_map(self, m: dict = None):
|
20317
22031
|
m = m or dict()
|
20318
22032
|
if m.get('description') is not None:
|
20319
22033
|
self.description = m.get('description')
|
22034
|
+
if m.get('recycleBinEnabled') is not None:
|
22035
|
+
self.recycle_bin_enabled = m.get('recycleBinEnabled')
|
20320
22036
|
return self
|
20321
22037
|
|
20322
22038
|
|
@@ -20456,7 +22172,7 @@ class UpdateRdsExternalStoreRequest(TeaModel):
|
|
20456
22172
|
parameter: UpdateRdsExternalStoreRequestParameter = None,
|
20457
22173
|
store_type: str = None,
|
20458
22174
|
):
|
20459
|
-
# The name of the
|
22175
|
+
# The name of the ExternalStore.
|
20460
22176
|
#
|
20461
22177
|
# This parameter is required.
|
20462
22178
|
self.external_store_name = external_store_name
|
@@ -20889,9 +22605,13 @@ class UpsertCollectionPolicyRequestCentralizeConfig(TeaModel):
|
|
20889
22605
|
dest_region: str = None,
|
20890
22606
|
dest_ttl: int = None,
|
20891
22607
|
):
|
22608
|
+
# The destination logstore for centralized storage. Make sure that the region of the destination logstore is consistent with the region specified by destRegion and the destination logstore belongs to the destination project specified by destProject.
|
20892
22609
|
self.dest_logstore = dest_logstore
|
22610
|
+
# The destination project for centralized storage. Make sure that the region of the destination project is consistent with the region specified by destRegion.
|
20893
22611
|
self.dest_project = dest_project
|
22612
|
+
# The destination region for centralized storage.
|
20894
22613
|
self.dest_region = dest_region
|
22614
|
+
# The data retention period for centralized storage. Unit: days. This parameter takes effect only when you use an existing logstore for centralized storage.
|
20895
22615
|
self.dest_ttl = dest_ttl
|
20896
22616
|
|
20897
22617
|
def validate(self):
|
@@ -20931,6 +22651,7 @@ class UpsertCollectionPolicyRequestDataConfig(TeaModel):
|
|
20931
22651
|
self,
|
20932
22652
|
data_region: str = None,
|
20933
22653
|
):
|
22654
|
+
# The region for storing the global logs that are collected for the first time.
|
20934
22655
|
self.data_region = data_region
|
20935
22656
|
|
20936
22657
|
def validate(self):
|
@@ -20961,10 +22682,15 @@ class UpsertCollectionPolicyRequestPolicyConfig(TeaModel):
|
|
20961
22682
|
resource_mode: str = None,
|
20962
22683
|
resource_tags: Dict[str, Any] = None,
|
20963
22684
|
):
|
22685
|
+
# The IDs of the instances. This parameter takes effect only when resourceMode is set to instanceMode. Logs are collected only from instances that use the specified IDs.
|
20964
22686
|
self.instance_ids = instance_ids
|
22687
|
+
# The regions of the instances. This parameter takes effect only when resourceMode is set to attributeMode. Wildcard characters are supported. If you leave this parameter empty, region-based filtering is not performed. The system considers that all instances are matched. If you specify a value for this parameter, logs of instances that reside in the specified regions are collected. Logs are collected from an instance only if the resource tags and region of the instance match the specified conditions.
|
20965
22688
|
self.regions = regions
|
22689
|
+
# The resource collection mode. Valid values: all, attributeMode, and instanceMode. The value all specifies that logs of all instances within your account are collected to the default logstore. The value attributeMode specifies that logs are collected based on the regions of instances and resource tags. The value instanceMode specifies that logs are collected based on instance IDs.
|
22690
|
+
#
|
20966
22691
|
# This parameter is required.
|
20967
22692
|
self.resource_mode = resource_mode
|
22693
|
+
# The resource tags. This parameter takes effect only when resourceMode is set to attributeMode. If you leave this parameter empty, resource tag-based filtering is not performed. The system considers that all instances are matched. If you specify a value for this parameter, logs of instances that use the specified resource tags are collected. Logs are collected from an instance only if the resource tags and region of the instance match the specified conditions.
|
20968
22694
|
self.resource_tags = resource_tags
|
20969
22695
|
|
20970
22696
|
def validate(self):
|
@@ -21005,7 +22731,9 @@ class UpsertCollectionPolicyRequestResourceDirectory(TeaModel):
|
|
21005
22731
|
account_group_type: str = None,
|
21006
22732
|
members: List[str] = None,
|
21007
22733
|
):
|
22734
|
+
# The mode of the resource directory. Valid values: all and custom.
|
21008
22735
|
self.account_group_type = account_group_type
|
22736
|
+
# The members. If accountGroupType is set to custom, the members are returned.
|
21009
22737
|
self.members = members
|
21010
22738
|
|
21011
22739
|
def validate(self):
|
@@ -21045,19 +22773,37 @@ class UpsertCollectionPolicyRequest(TeaModel):
|
|
21045
22773
|
product_code: str = None,
|
21046
22774
|
resource_directory: UpsertCollectionPolicyRequestResourceDirectory = None,
|
21047
22775
|
):
|
22776
|
+
# The configurations of centralized storage.
|
21048
22777
|
self.centralize_config = centralize_config
|
22778
|
+
# Specifies whether to enable centralized storage. Default value: false.
|
21049
22779
|
self.centralize_enabled = centralize_enabled
|
22780
|
+
# The code of the log type.
|
22781
|
+
#
|
21050
22782
|
# This parameter is required.
|
21051
22783
|
self.data_code = data_code
|
22784
|
+
# The data configurations. The configuration is returned only for global logs. For example, if productCode is set to sls, the configuration is returned.
|
21052
22785
|
self.data_config = data_config
|
22786
|
+
# Specifies whether to enable the policy.
|
22787
|
+
#
|
21053
22788
|
# This parameter is required.
|
21054
22789
|
self.enabled = enabled
|
22790
|
+
# The configurations of the policy.
|
22791
|
+
#
|
21055
22792
|
# This parameter is required.
|
21056
22793
|
self.policy_config = policy_config
|
22794
|
+
# The name must meet the following requirements:
|
22795
|
+
#
|
22796
|
+
# * The name can contain only lowercase letters, digits, hyphens (-), and underscores (_).
|
22797
|
+
# * The name must start with a letter.
|
22798
|
+
# * The name must be 3 to 63 characters in length.
|
22799
|
+
#
|
21057
22800
|
# This parameter is required.
|
21058
22801
|
self.policy_name = policy_name
|
22802
|
+
# The code of the service.
|
22803
|
+
#
|
21059
22804
|
# This parameter is required.
|
21060
22805
|
self.product_code = product_code
|
22806
|
+
# The configurations of the resource directory. The account must have activated the resource directory and be a management account or a delegated administrator of the resource directory.
|
21061
22807
|
self.resource_directory = resource_directory
|
21062
22808
|
|
21063
22809
|
def validate(self):
|