alibabacloud-sls20201230 5.6.0__py3-none-any.whl → 5.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- alibabacloud_sls20201230/__init__.py +1 -1
- alibabacloud_sls20201230/client.py +2273 -238
- alibabacloud_sls20201230/models.py +1895 -155
- {alibabacloud_sls20201230-5.6.0.dist-info → alibabacloud_sls20201230-5.7.0.dist-info}/METADATA +2 -2
- alibabacloud_sls20201230-5.7.0.dist-info/RECORD +8 -0
- alibabacloud_sls20201230-5.6.0.dist-info/RECORD +0 -8
- {alibabacloud_sls20201230-5.6.0.dist-info → alibabacloud_sls20201230-5.7.0.dist-info}/LICENSE +0 -0
- {alibabacloud_sls20201230-5.6.0.dist-info → alibabacloud_sls20201230-5.7.0.dist-info}/WHEEL +0 -0
- {alibabacloud_sls20201230-5.6.0.dist-info → alibabacloud_sls20201230-5.7.0.dist-info}/top_level.txt +0 -0
|
@@ -973,6 +973,176 @@ class ConsumerGroup(TeaModel):
|
|
|
973
973
|
return self
|
|
974
974
|
|
|
975
975
|
|
|
976
|
+
class CopilotActionParameters(TeaModel):
|
|
977
|
+
def __init__(
|
|
978
|
+
self,
|
|
979
|
+
name: str = None,
|
|
980
|
+
prompt: str = None,
|
|
981
|
+
required: str = None,
|
|
982
|
+
type: str = None,
|
|
983
|
+
):
|
|
984
|
+
self.name = name
|
|
985
|
+
self.prompt = prompt
|
|
986
|
+
self.required = required
|
|
987
|
+
self.type = type
|
|
988
|
+
|
|
989
|
+
def validate(self):
|
|
990
|
+
pass
|
|
991
|
+
|
|
992
|
+
def to_map(self):
|
|
993
|
+
_map = super().to_map()
|
|
994
|
+
if _map is not None:
|
|
995
|
+
return _map
|
|
996
|
+
|
|
997
|
+
result = dict()
|
|
998
|
+
if self.name is not None:
|
|
999
|
+
result['name'] = self.name
|
|
1000
|
+
if self.prompt is not None:
|
|
1001
|
+
result['prompt'] = self.prompt
|
|
1002
|
+
if self.required is not None:
|
|
1003
|
+
result['required'] = self.required
|
|
1004
|
+
if self.type is not None:
|
|
1005
|
+
result['type'] = self.type
|
|
1006
|
+
return result
|
|
1007
|
+
|
|
1008
|
+
def from_map(self, m: dict = None):
|
|
1009
|
+
m = m or dict()
|
|
1010
|
+
if m.get('name') is not None:
|
|
1011
|
+
self.name = m.get('name')
|
|
1012
|
+
if m.get('prompt') is not None:
|
|
1013
|
+
self.prompt = m.get('prompt')
|
|
1014
|
+
if m.get('required') is not None:
|
|
1015
|
+
self.required = m.get('required')
|
|
1016
|
+
if m.get('type') is not None:
|
|
1017
|
+
self.type = m.get('type')
|
|
1018
|
+
return self
|
|
1019
|
+
|
|
1020
|
+
|
|
1021
|
+
class CopilotActionQueryTemplateParameters(TeaModel):
|
|
1022
|
+
def __init__(
|
|
1023
|
+
self,
|
|
1024
|
+
name: str = None,
|
|
1025
|
+
prompt: str = None,
|
|
1026
|
+
required: str = None,
|
|
1027
|
+
type: str = None,
|
|
1028
|
+
):
|
|
1029
|
+
self.name = name
|
|
1030
|
+
self.prompt = prompt
|
|
1031
|
+
self.required = required
|
|
1032
|
+
self.type = type
|
|
1033
|
+
|
|
1034
|
+
def validate(self):
|
|
1035
|
+
pass
|
|
1036
|
+
|
|
1037
|
+
def to_map(self):
|
|
1038
|
+
_map = super().to_map()
|
|
1039
|
+
if _map is not None:
|
|
1040
|
+
return _map
|
|
1041
|
+
|
|
1042
|
+
result = dict()
|
|
1043
|
+
if self.name is not None:
|
|
1044
|
+
result['name'] = self.name
|
|
1045
|
+
if self.prompt is not None:
|
|
1046
|
+
result['prompt'] = self.prompt
|
|
1047
|
+
if self.required is not None:
|
|
1048
|
+
result['required'] = self.required
|
|
1049
|
+
if self.type is not None:
|
|
1050
|
+
result['type'] = self.type
|
|
1051
|
+
return result
|
|
1052
|
+
|
|
1053
|
+
def from_map(self, m: dict = None):
|
|
1054
|
+
m = m or dict()
|
|
1055
|
+
if m.get('name') is not None:
|
|
1056
|
+
self.name = m.get('name')
|
|
1057
|
+
if m.get('prompt') is not None:
|
|
1058
|
+
self.prompt = m.get('prompt')
|
|
1059
|
+
if m.get('required') is not None:
|
|
1060
|
+
self.required = m.get('required')
|
|
1061
|
+
if m.get('type') is not None:
|
|
1062
|
+
self.type = m.get('type')
|
|
1063
|
+
return self
|
|
1064
|
+
|
|
1065
|
+
|
|
1066
|
+
class CopilotAction(TeaModel):
|
|
1067
|
+
def __init__(
|
|
1068
|
+
self,
|
|
1069
|
+
action: str = None,
|
|
1070
|
+
description: str = None,
|
|
1071
|
+
name: str = None,
|
|
1072
|
+
parameters: List[CopilotActionParameters] = None,
|
|
1073
|
+
query_template: str = None,
|
|
1074
|
+
query_template_parameters: List[CopilotActionQueryTemplateParameters] = None,
|
|
1075
|
+
scene: str = None,
|
|
1076
|
+
):
|
|
1077
|
+
self.action = action
|
|
1078
|
+
self.description = description
|
|
1079
|
+
self.name = name
|
|
1080
|
+
self.parameters = parameters
|
|
1081
|
+
self.query_template = query_template
|
|
1082
|
+
self.query_template_parameters = query_template_parameters
|
|
1083
|
+
self.scene = scene
|
|
1084
|
+
|
|
1085
|
+
def validate(self):
|
|
1086
|
+
if self.parameters:
|
|
1087
|
+
for k in self.parameters:
|
|
1088
|
+
if k:
|
|
1089
|
+
k.validate()
|
|
1090
|
+
if self.query_template_parameters:
|
|
1091
|
+
for k in self.query_template_parameters:
|
|
1092
|
+
if k:
|
|
1093
|
+
k.validate()
|
|
1094
|
+
|
|
1095
|
+
def to_map(self):
|
|
1096
|
+
_map = super().to_map()
|
|
1097
|
+
if _map is not None:
|
|
1098
|
+
return _map
|
|
1099
|
+
|
|
1100
|
+
result = dict()
|
|
1101
|
+
if self.action is not None:
|
|
1102
|
+
result['action'] = self.action
|
|
1103
|
+
if self.description is not None:
|
|
1104
|
+
result['description'] = self.description
|
|
1105
|
+
if self.name is not None:
|
|
1106
|
+
result['name'] = self.name
|
|
1107
|
+
result['parameters'] = []
|
|
1108
|
+
if self.parameters is not None:
|
|
1109
|
+
for k in self.parameters:
|
|
1110
|
+
result['parameters'].append(k.to_map() if k else None)
|
|
1111
|
+
if self.query_template is not None:
|
|
1112
|
+
result['queryTemplate'] = self.query_template
|
|
1113
|
+
result['queryTemplateParameters'] = []
|
|
1114
|
+
if self.query_template_parameters is not None:
|
|
1115
|
+
for k in self.query_template_parameters:
|
|
1116
|
+
result['queryTemplateParameters'].append(k.to_map() if k else None)
|
|
1117
|
+
if self.scene is not None:
|
|
1118
|
+
result['scene'] = self.scene
|
|
1119
|
+
return result
|
|
1120
|
+
|
|
1121
|
+
def from_map(self, m: dict = None):
|
|
1122
|
+
m = m or dict()
|
|
1123
|
+
if m.get('action') is not None:
|
|
1124
|
+
self.action = m.get('action')
|
|
1125
|
+
if m.get('description') is not None:
|
|
1126
|
+
self.description = m.get('description')
|
|
1127
|
+
if m.get('name') is not None:
|
|
1128
|
+
self.name = m.get('name')
|
|
1129
|
+
self.parameters = []
|
|
1130
|
+
if m.get('parameters') is not None:
|
|
1131
|
+
for k in m.get('parameters'):
|
|
1132
|
+
temp_model = CopilotActionParameters()
|
|
1133
|
+
self.parameters.append(temp_model.from_map(k))
|
|
1134
|
+
if m.get('queryTemplate') is not None:
|
|
1135
|
+
self.query_template = m.get('queryTemplate')
|
|
1136
|
+
self.query_template_parameters = []
|
|
1137
|
+
if m.get('queryTemplateParameters') is not None:
|
|
1138
|
+
for k in m.get('queryTemplateParameters'):
|
|
1139
|
+
temp_model = CopilotActionQueryTemplateParameters()
|
|
1140
|
+
self.query_template_parameters.append(temp_model.from_map(k))
|
|
1141
|
+
if m.get('scene') is not None:
|
|
1142
|
+
self.scene = m.get('scene')
|
|
1143
|
+
return self
|
|
1144
|
+
|
|
1145
|
+
|
|
976
1146
|
class ETLConfigurationSink(TeaModel):
|
|
977
1147
|
def __init__(
|
|
978
1148
|
self,
|
|
@@ -2544,7 +2714,9 @@ class MLServiceParam(TeaModel):
|
|
|
2544
2714
|
class MaxComputeExportConfigurationSink(TeaModel):
|
|
2545
2715
|
def __init__(
|
|
2546
2716
|
self,
|
|
2717
|
+
buffer_interval: str = None,
|
|
2547
2718
|
fields: List[str] = None,
|
|
2719
|
+
filter_invalid: bool = None,
|
|
2548
2720
|
odps_access_key_id: str = None,
|
|
2549
2721
|
odps_access_secret: str = None,
|
|
2550
2722
|
odps_endpoint: str = None,
|
|
@@ -2554,10 +2726,13 @@ class MaxComputeExportConfigurationSink(TeaModel):
|
|
|
2554
2726
|
odps_tunnel_endpoint: str = None,
|
|
2555
2727
|
partition_column: List[str] = None,
|
|
2556
2728
|
partition_time_format: str = None,
|
|
2729
|
+
time_format_type: str = None,
|
|
2557
2730
|
time_zone: str = None,
|
|
2558
2731
|
):
|
|
2732
|
+
self.buffer_interval = buffer_interval
|
|
2559
2733
|
# This parameter is required.
|
|
2560
2734
|
self.fields = fields
|
|
2735
|
+
self.filter_invalid = filter_invalid
|
|
2561
2736
|
self.odps_access_key_id = odps_access_key_id
|
|
2562
2737
|
self.odps_access_secret = odps_access_secret
|
|
2563
2738
|
# This parameter is required.
|
|
@@ -2568,12 +2743,12 @@ class MaxComputeExportConfigurationSink(TeaModel):
|
|
|
2568
2743
|
self.odps_rolearn = odps_rolearn
|
|
2569
2744
|
# This parameter is required.
|
|
2570
2745
|
self.odps_table = odps_table
|
|
2571
|
-
# This parameter is required.
|
|
2572
2746
|
self.odps_tunnel_endpoint = odps_tunnel_endpoint
|
|
2573
2747
|
# This parameter is required.
|
|
2574
2748
|
self.partition_column = partition_column
|
|
2575
2749
|
# This parameter is required.
|
|
2576
2750
|
self.partition_time_format = partition_time_format
|
|
2751
|
+
self.time_format_type = time_format_type
|
|
2577
2752
|
# This parameter is required.
|
|
2578
2753
|
self.time_zone = time_zone
|
|
2579
2754
|
|
|
@@ -2586,8 +2761,12 @@ class MaxComputeExportConfigurationSink(TeaModel):
|
|
|
2586
2761
|
return _map
|
|
2587
2762
|
|
|
2588
2763
|
result = dict()
|
|
2764
|
+
if self.buffer_interval is not None:
|
|
2765
|
+
result['bufferInterval'] = self.buffer_interval
|
|
2589
2766
|
if self.fields is not None:
|
|
2590
2767
|
result['fields'] = self.fields
|
|
2768
|
+
if self.filter_invalid is not None:
|
|
2769
|
+
result['filterInvalid'] = self.filter_invalid
|
|
2591
2770
|
if self.odps_access_key_id is not None:
|
|
2592
2771
|
result['odpsAccessKeyId'] = self.odps_access_key_id
|
|
2593
2772
|
if self.odps_access_secret is not None:
|
|
@@ -2606,14 +2785,20 @@ class MaxComputeExportConfigurationSink(TeaModel):
|
|
|
2606
2785
|
result['partitionColumn'] = self.partition_column
|
|
2607
2786
|
if self.partition_time_format is not None:
|
|
2608
2787
|
result['partitionTimeFormat'] = self.partition_time_format
|
|
2788
|
+
if self.time_format_type is not None:
|
|
2789
|
+
result['timeFormatType'] = self.time_format_type
|
|
2609
2790
|
if self.time_zone is not None:
|
|
2610
2791
|
result['timeZone'] = self.time_zone
|
|
2611
2792
|
return result
|
|
2612
2793
|
|
|
2613
2794
|
def from_map(self, m: dict = None):
|
|
2614
2795
|
m = m or dict()
|
|
2796
|
+
if m.get('bufferInterval') is not None:
|
|
2797
|
+
self.buffer_interval = m.get('bufferInterval')
|
|
2615
2798
|
if m.get('fields') is not None:
|
|
2616
2799
|
self.fields = m.get('fields')
|
|
2800
|
+
if m.get('filterInvalid') is not None:
|
|
2801
|
+
self.filter_invalid = m.get('filterInvalid')
|
|
2617
2802
|
if m.get('odpsAccessKeyId') is not None:
|
|
2618
2803
|
self.odps_access_key_id = m.get('odpsAccessKeyId')
|
|
2619
2804
|
if m.get('odpsAccessSecret') is not None:
|
|
@@ -2632,6 +2817,8 @@ class MaxComputeExportConfigurationSink(TeaModel):
|
|
|
2632
2817
|
self.partition_column = m.get('partitionColumn')
|
|
2633
2818
|
if m.get('partitionTimeFormat') is not None:
|
|
2634
2819
|
self.partition_time_format = m.get('partitionTimeFormat')
|
|
2820
|
+
if m.get('timeFormatType') is not None:
|
|
2821
|
+
self.time_format_type = m.get('timeFormatType')
|
|
2635
2822
|
if m.get('timeZone') is not None:
|
|
2636
2823
|
self.time_zone = m.get('timeZone')
|
|
2637
2824
|
return self
|
|
@@ -3353,24 +3540,58 @@ class ProjectSummary(TeaModel):
|
|
|
3353
3540
|
return self
|
|
3354
3541
|
|
|
3355
3542
|
|
|
3356
|
-
class
|
|
3543
|
+
class S3IngestionConfigurationSource(TeaModel):
|
|
3357
3544
|
def __init__(
|
|
3358
3545
|
self,
|
|
3359
|
-
|
|
3360
|
-
|
|
3361
|
-
|
|
3362
|
-
|
|
3363
|
-
|
|
3546
|
+
aws_access_key: str = None,
|
|
3547
|
+
aws_access_key_secret: str = None,
|
|
3548
|
+
aws_region: str = None,
|
|
3549
|
+
aws_sqsqueue_url: str = None,
|
|
3550
|
+
aws_use_sqs: bool = None,
|
|
3551
|
+
bucket: str = None,
|
|
3552
|
+
compression_codec: str = None,
|
|
3553
|
+
encoding: str = None,
|
|
3554
|
+
end_time: int = None,
|
|
3555
|
+
format: Dict[str, Any] = None,
|
|
3556
|
+
interval: str = None,
|
|
3557
|
+
pattern: str = None,
|
|
3558
|
+
prefix: str = None,
|
|
3559
|
+
start_time: int = None,
|
|
3560
|
+
tag_pack_id: bool = None,
|
|
3561
|
+
time_field: str = None,
|
|
3562
|
+
time_format: str = None,
|
|
3563
|
+
time_pattern: str = None,
|
|
3564
|
+
time_zone: str = None,
|
|
3565
|
+
use_aws_sqsonly: bool = None,
|
|
3364
3566
|
):
|
|
3365
3567
|
# This parameter is required.
|
|
3366
|
-
self.
|
|
3568
|
+
self.aws_access_key = aws_access_key
|
|
3367
3569
|
# This parameter is required.
|
|
3368
|
-
self.
|
|
3570
|
+
self.aws_access_key_secret = aws_access_key_secret
|
|
3369
3571
|
# This parameter is required.
|
|
3370
|
-
self.
|
|
3572
|
+
self.aws_region = aws_region
|
|
3573
|
+
self.aws_sqsqueue_url = aws_sqsqueue_url
|
|
3574
|
+
self.aws_use_sqs = aws_use_sqs
|
|
3371
3575
|
# This parameter is required.
|
|
3372
|
-
self.
|
|
3373
|
-
|
|
3576
|
+
self.bucket = bucket
|
|
3577
|
+
# This parameter is required.
|
|
3578
|
+
self.compression_codec = compression_codec
|
|
3579
|
+
# This parameter is required.
|
|
3580
|
+
self.encoding = encoding
|
|
3581
|
+
self.end_time = end_time
|
|
3582
|
+
# This parameter is required.
|
|
3583
|
+
self.format = format
|
|
3584
|
+
# This parameter is required.
|
|
3585
|
+
self.interval = interval
|
|
3586
|
+
self.pattern = pattern
|
|
3587
|
+
self.prefix = prefix
|
|
3588
|
+
self.start_time = start_time
|
|
3589
|
+
self.tag_pack_id = tag_pack_id
|
|
3590
|
+
self.time_field = time_field
|
|
3591
|
+
self.time_format = time_format
|
|
3592
|
+
self.time_pattern = time_pattern
|
|
3593
|
+
self.time_zone = time_zone
|
|
3594
|
+
self.use_aws_sqsonly = use_aws_sqsonly
|
|
3374
3595
|
|
|
3375
3596
|
def validate(self):
|
|
3376
3597
|
pass
|
|
@@ -3381,58 +3602,292 @@ class SavedSearch(TeaModel):
|
|
|
3381
3602
|
return _map
|
|
3382
3603
|
|
|
3383
3604
|
result = dict()
|
|
3384
|
-
if self.
|
|
3385
|
-
result['
|
|
3386
|
-
if self.
|
|
3387
|
-
result['
|
|
3388
|
-
if self.
|
|
3389
|
-
result['
|
|
3390
|
-
if self.
|
|
3391
|
-
result['
|
|
3392
|
-
if self.
|
|
3393
|
-
result['
|
|
3605
|
+
if self.aws_access_key is not None:
|
|
3606
|
+
result['awsAccessKey'] = self.aws_access_key
|
|
3607
|
+
if self.aws_access_key_secret is not None:
|
|
3608
|
+
result['awsAccessKeySecret'] = self.aws_access_key_secret
|
|
3609
|
+
if self.aws_region is not None:
|
|
3610
|
+
result['awsRegion'] = self.aws_region
|
|
3611
|
+
if self.aws_sqsqueue_url is not None:
|
|
3612
|
+
result['awsSQSQueueUrl'] = self.aws_sqsqueue_url
|
|
3613
|
+
if self.aws_use_sqs is not None:
|
|
3614
|
+
result['awsUseSQS'] = self.aws_use_sqs
|
|
3615
|
+
if self.bucket is not None:
|
|
3616
|
+
result['bucket'] = self.bucket
|
|
3617
|
+
if self.compression_codec is not None:
|
|
3618
|
+
result['compressionCodec'] = self.compression_codec
|
|
3619
|
+
if self.encoding is not None:
|
|
3620
|
+
result['encoding'] = self.encoding
|
|
3621
|
+
if self.end_time is not None:
|
|
3622
|
+
result['endTime'] = self.end_time
|
|
3623
|
+
if self.format is not None:
|
|
3624
|
+
result['format'] = self.format
|
|
3625
|
+
if self.interval is not None:
|
|
3626
|
+
result['interval'] = self.interval
|
|
3627
|
+
if self.pattern is not None:
|
|
3628
|
+
result['pattern'] = self.pattern
|
|
3629
|
+
if self.prefix is not None:
|
|
3630
|
+
result['prefix'] = self.prefix
|
|
3631
|
+
if self.start_time is not None:
|
|
3632
|
+
result['startTime'] = self.start_time
|
|
3633
|
+
if self.tag_pack_id is not None:
|
|
3634
|
+
result['tagPackId'] = self.tag_pack_id
|
|
3635
|
+
if self.time_field is not None:
|
|
3636
|
+
result['timeField'] = self.time_field
|
|
3637
|
+
if self.time_format is not None:
|
|
3638
|
+
result['timeFormat'] = self.time_format
|
|
3639
|
+
if self.time_pattern is not None:
|
|
3640
|
+
result['timePattern'] = self.time_pattern
|
|
3641
|
+
if self.time_zone is not None:
|
|
3642
|
+
result['timeZone'] = self.time_zone
|
|
3643
|
+
if self.use_aws_sqsonly is not None:
|
|
3644
|
+
result['useAwsSQSOnly'] = self.use_aws_sqsonly
|
|
3394
3645
|
return result
|
|
3395
3646
|
|
|
3396
3647
|
def from_map(self, m: dict = None):
|
|
3397
3648
|
m = m or dict()
|
|
3398
|
-
if m.get('
|
|
3399
|
-
self.
|
|
3400
|
-
if m.get('
|
|
3401
|
-
self.
|
|
3402
|
-
if m.get('
|
|
3403
|
-
self.
|
|
3404
|
-
if m.get('
|
|
3405
|
-
self.
|
|
3406
|
-
if m.get('
|
|
3407
|
-
self.
|
|
3649
|
+
if m.get('awsAccessKey') is not None:
|
|
3650
|
+
self.aws_access_key = m.get('awsAccessKey')
|
|
3651
|
+
if m.get('awsAccessKeySecret') is not None:
|
|
3652
|
+
self.aws_access_key_secret = m.get('awsAccessKeySecret')
|
|
3653
|
+
if m.get('awsRegion') is not None:
|
|
3654
|
+
self.aws_region = m.get('awsRegion')
|
|
3655
|
+
if m.get('awsSQSQueueUrl') is not None:
|
|
3656
|
+
self.aws_sqsqueue_url = m.get('awsSQSQueueUrl')
|
|
3657
|
+
if m.get('awsUseSQS') is not None:
|
|
3658
|
+
self.aws_use_sqs = m.get('awsUseSQS')
|
|
3659
|
+
if m.get('bucket') is not None:
|
|
3660
|
+
self.bucket = m.get('bucket')
|
|
3661
|
+
if m.get('compressionCodec') is not None:
|
|
3662
|
+
self.compression_codec = m.get('compressionCodec')
|
|
3663
|
+
if m.get('encoding') is not None:
|
|
3664
|
+
self.encoding = m.get('encoding')
|
|
3665
|
+
if m.get('endTime') is not None:
|
|
3666
|
+
self.end_time = m.get('endTime')
|
|
3667
|
+
if m.get('format') is not None:
|
|
3668
|
+
self.format = m.get('format')
|
|
3669
|
+
if m.get('interval') is not None:
|
|
3670
|
+
self.interval = m.get('interval')
|
|
3671
|
+
if m.get('pattern') is not None:
|
|
3672
|
+
self.pattern = m.get('pattern')
|
|
3673
|
+
if m.get('prefix') is not None:
|
|
3674
|
+
self.prefix = m.get('prefix')
|
|
3675
|
+
if m.get('startTime') is not None:
|
|
3676
|
+
self.start_time = m.get('startTime')
|
|
3677
|
+
if m.get('tagPackId') is not None:
|
|
3678
|
+
self.tag_pack_id = m.get('tagPackId')
|
|
3679
|
+
if m.get('timeField') is not None:
|
|
3680
|
+
self.time_field = m.get('timeField')
|
|
3681
|
+
if m.get('timeFormat') is not None:
|
|
3682
|
+
self.time_format = m.get('timeFormat')
|
|
3683
|
+
if m.get('timePattern') is not None:
|
|
3684
|
+
self.time_pattern = m.get('timePattern')
|
|
3685
|
+
if m.get('timeZone') is not None:
|
|
3686
|
+
self.time_zone = m.get('timeZone')
|
|
3687
|
+
if m.get('useAwsSQSOnly') is not None:
|
|
3688
|
+
self.use_aws_sqsonly = m.get('useAwsSQSOnly')
|
|
3408
3689
|
return self
|
|
3409
3690
|
|
|
3410
3691
|
|
|
3411
|
-
class
|
|
3692
|
+
class S3Ingestion(TeaModel):
|
|
3412
3693
|
def __init__(
|
|
3413
3694
|
self,
|
|
3414
|
-
|
|
3415
|
-
|
|
3416
|
-
|
|
3417
|
-
|
|
3418
|
-
|
|
3419
|
-
|
|
3420
|
-
|
|
3421
|
-
|
|
3422
|
-
|
|
3423
|
-
parameters: Dict[str, Any] = None,
|
|
3424
|
-
resource_pool: str = None,
|
|
3425
|
-
role_arn: str = None,
|
|
3426
|
-
script: str = None,
|
|
3427
|
-
source_logstore: str = None,
|
|
3428
|
-
sql_type: str = None,
|
|
3429
|
-
to_time: int = None,
|
|
3430
|
-
to_time_expr: str = None,
|
|
3695
|
+
configuration: S3IngestionConfigurationSource = None,
|
|
3696
|
+
create_time: int = None,
|
|
3697
|
+
description: str = None,
|
|
3698
|
+
display_name: str = None,
|
|
3699
|
+
last_modified_time: int = None,
|
|
3700
|
+
name: str = None,
|
|
3701
|
+
schedule: Schedule = None,
|
|
3702
|
+
schedule_id: str = None,
|
|
3703
|
+
status: str = None,
|
|
3431
3704
|
):
|
|
3432
3705
|
# This parameter is required.
|
|
3433
|
-
self.
|
|
3434
|
-
|
|
3435
|
-
self.
|
|
3706
|
+
self.configuration = configuration
|
|
3707
|
+
self.create_time = create_time
|
|
3708
|
+
self.description = description
|
|
3709
|
+
# This parameter is required.
|
|
3710
|
+
self.display_name = display_name
|
|
3711
|
+
self.last_modified_time = last_modified_time
|
|
3712
|
+
# This parameter is required.
|
|
3713
|
+
self.name = name
|
|
3714
|
+
# This parameter is required.
|
|
3715
|
+
self.schedule = schedule
|
|
3716
|
+
self.schedule_id = schedule_id
|
|
3717
|
+
self.status = status
|
|
3718
|
+
|
|
3719
|
+
def validate(self):
|
|
3720
|
+
if self.configuration:
|
|
3721
|
+
self.configuration.validate()
|
|
3722
|
+
if self.schedule:
|
|
3723
|
+
self.schedule.validate()
|
|
3724
|
+
|
|
3725
|
+
def to_map(self):
|
|
3726
|
+
_map = super().to_map()
|
|
3727
|
+
if _map is not None:
|
|
3728
|
+
return _map
|
|
3729
|
+
|
|
3730
|
+
result = dict()
|
|
3731
|
+
if self.configuration is not None:
|
|
3732
|
+
result['configuration'] = self.configuration.to_map()
|
|
3733
|
+
if self.create_time is not None:
|
|
3734
|
+
result['createTime'] = self.create_time
|
|
3735
|
+
if self.description is not None:
|
|
3736
|
+
result['description'] = self.description
|
|
3737
|
+
if self.display_name is not None:
|
|
3738
|
+
result['displayName'] = self.display_name
|
|
3739
|
+
if self.last_modified_time is not None:
|
|
3740
|
+
result['lastModifiedTime'] = self.last_modified_time
|
|
3741
|
+
if self.name is not None:
|
|
3742
|
+
result['name'] = self.name
|
|
3743
|
+
if self.schedule is not None:
|
|
3744
|
+
result['schedule'] = self.schedule.to_map()
|
|
3745
|
+
if self.schedule_id is not None:
|
|
3746
|
+
result['scheduleId'] = self.schedule_id
|
|
3747
|
+
if self.status is not None:
|
|
3748
|
+
result['status'] = self.status
|
|
3749
|
+
return result
|
|
3750
|
+
|
|
3751
|
+
def from_map(self, m: dict = None):
|
|
3752
|
+
m = m or dict()
|
|
3753
|
+
if m.get('configuration') is not None:
|
|
3754
|
+
temp_model = S3IngestionConfigurationSource()
|
|
3755
|
+
self.configuration = temp_model.from_map(m['configuration'])
|
|
3756
|
+
if m.get('createTime') is not None:
|
|
3757
|
+
self.create_time = m.get('createTime')
|
|
3758
|
+
if m.get('description') is not None:
|
|
3759
|
+
self.description = m.get('description')
|
|
3760
|
+
if m.get('displayName') is not None:
|
|
3761
|
+
self.display_name = m.get('displayName')
|
|
3762
|
+
if m.get('lastModifiedTime') is not None:
|
|
3763
|
+
self.last_modified_time = m.get('lastModifiedTime')
|
|
3764
|
+
if m.get('name') is not None:
|
|
3765
|
+
self.name = m.get('name')
|
|
3766
|
+
if m.get('schedule') is not None:
|
|
3767
|
+
temp_model = Schedule()
|
|
3768
|
+
self.schedule = temp_model.from_map(m['schedule'])
|
|
3769
|
+
if m.get('scheduleId') is not None:
|
|
3770
|
+
self.schedule_id = m.get('scheduleId')
|
|
3771
|
+
if m.get('status') is not None:
|
|
3772
|
+
self.status = m.get('status')
|
|
3773
|
+
return self
|
|
3774
|
+
|
|
3775
|
+
|
|
3776
|
+
class S3IngestionConfiguration(TeaModel):
|
|
3777
|
+
def __init__(
|
|
3778
|
+
self,
|
|
3779
|
+
logstore: str = None,
|
|
3780
|
+
source: S3IngestionConfigurationSource = None,
|
|
3781
|
+
):
|
|
3782
|
+
self.logstore = logstore
|
|
3783
|
+
self.source = source
|
|
3784
|
+
|
|
3785
|
+
def validate(self):
|
|
3786
|
+
if self.source:
|
|
3787
|
+
self.source.validate()
|
|
3788
|
+
|
|
3789
|
+
def to_map(self):
|
|
3790
|
+
_map = super().to_map()
|
|
3791
|
+
if _map is not None:
|
|
3792
|
+
return _map
|
|
3793
|
+
|
|
3794
|
+
result = dict()
|
|
3795
|
+
if self.logstore is not None:
|
|
3796
|
+
result['logstore'] = self.logstore
|
|
3797
|
+
if self.source is not None:
|
|
3798
|
+
result['source'] = self.source.to_map()
|
|
3799
|
+
return result
|
|
3800
|
+
|
|
3801
|
+
def from_map(self, m: dict = None):
|
|
3802
|
+
m = m or dict()
|
|
3803
|
+
if m.get('logstore') is not None:
|
|
3804
|
+
self.logstore = m.get('logstore')
|
|
3805
|
+
if m.get('source') is not None:
|
|
3806
|
+
temp_model = S3IngestionConfigurationSource()
|
|
3807
|
+
self.source = temp_model.from_map(m['source'])
|
|
3808
|
+
return self
|
|
3809
|
+
|
|
3810
|
+
|
|
3811
|
+
class SavedSearch(TeaModel):
|
|
3812
|
+
def __init__(
|
|
3813
|
+
self,
|
|
3814
|
+
display_name: str = None,
|
|
3815
|
+
logstore: str = None,
|
|
3816
|
+
savedsearch_name: str = None,
|
|
3817
|
+
search_query: str = None,
|
|
3818
|
+
topic: str = None,
|
|
3819
|
+
):
|
|
3820
|
+
# This parameter is required.
|
|
3821
|
+
self.display_name = display_name
|
|
3822
|
+
# This parameter is required.
|
|
3823
|
+
self.logstore = logstore
|
|
3824
|
+
# This parameter is required.
|
|
3825
|
+
self.savedsearch_name = savedsearch_name
|
|
3826
|
+
# This parameter is required.
|
|
3827
|
+
self.search_query = search_query
|
|
3828
|
+
self.topic = topic
|
|
3829
|
+
|
|
3830
|
+
def validate(self):
|
|
3831
|
+
pass
|
|
3832
|
+
|
|
3833
|
+
def to_map(self):
|
|
3834
|
+
_map = super().to_map()
|
|
3835
|
+
if _map is not None:
|
|
3836
|
+
return _map
|
|
3837
|
+
|
|
3838
|
+
result = dict()
|
|
3839
|
+
if self.display_name is not None:
|
|
3840
|
+
result['displayName'] = self.display_name
|
|
3841
|
+
if self.logstore is not None:
|
|
3842
|
+
result['logstore'] = self.logstore
|
|
3843
|
+
if self.savedsearch_name is not None:
|
|
3844
|
+
result['savedsearchName'] = self.savedsearch_name
|
|
3845
|
+
if self.search_query is not None:
|
|
3846
|
+
result['searchQuery'] = self.search_query
|
|
3847
|
+
if self.topic is not None:
|
|
3848
|
+
result['topic'] = self.topic
|
|
3849
|
+
return result
|
|
3850
|
+
|
|
3851
|
+
def from_map(self, m: dict = None):
|
|
3852
|
+
m = m or dict()
|
|
3853
|
+
if m.get('displayName') is not None:
|
|
3854
|
+
self.display_name = m.get('displayName')
|
|
3855
|
+
if m.get('logstore') is not None:
|
|
3856
|
+
self.logstore = m.get('logstore')
|
|
3857
|
+
if m.get('savedsearchName') is not None:
|
|
3858
|
+
self.savedsearch_name = m.get('savedsearchName')
|
|
3859
|
+
if m.get('searchQuery') is not None:
|
|
3860
|
+
self.search_query = m.get('searchQuery')
|
|
3861
|
+
if m.get('topic') is not None:
|
|
3862
|
+
self.topic = m.get('topic')
|
|
3863
|
+
return self
|
|
3864
|
+
|
|
3865
|
+
|
|
3866
|
+
class ScheduledSQLConfiguration(TeaModel):
|
|
3867
|
+
def __init__(
|
|
3868
|
+
self,
|
|
3869
|
+
data_format: str = None,
|
|
3870
|
+
dest_endpoint: str = None,
|
|
3871
|
+
dest_logstore: str = None,
|
|
3872
|
+
dest_project: str = None,
|
|
3873
|
+
dest_role_arn: str = None,
|
|
3874
|
+
from_time: int = None,
|
|
3875
|
+
from_time_expr: str = None,
|
|
3876
|
+
max_retries: int = None,
|
|
3877
|
+
max_run_time_in_seconds: int = None,
|
|
3878
|
+
parameters: Dict[str, Any] = None,
|
|
3879
|
+
resource_pool: str = None,
|
|
3880
|
+
role_arn: str = None,
|
|
3881
|
+
script: str = None,
|
|
3882
|
+
source_logstore: str = None,
|
|
3883
|
+
sql_type: str = None,
|
|
3884
|
+
to_time: int = None,
|
|
3885
|
+
to_time_expr: str = None,
|
|
3886
|
+
):
|
|
3887
|
+
# This parameter is required.
|
|
3888
|
+
self.data_format = data_format
|
|
3889
|
+
# This parameter is required.
|
|
3890
|
+
self.dest_endpoint = dest_endpoint
|
|
3436
3891
|
# This parameter is required.
|
|
3437
3892
|
self.dest_logstore = dest_logstore
|
|
3438
3893
|
# This parameter is required.
|
|
@@ -4393,9 +4848,11 @@ class Project(TeaModel):
|
|
|
4393
4848
|
data_redundancy_type: str = None,
|
|
4394
4849
|
description: str = None,
|
|
4395
4850
|
last_modify_time: str = None,
|
|
4851
|
+
location: str = None,
|
|
4396
4852
|
owner: str = None,
|
|
4397
4853
|
project_name: str = None,
|
|
4398
4854
|
quota: Dict[str, Any] = None,
|
|
4855
|
+
recycle_bin_enabled: bool = None,
|
|
4399
4856
|
region: str = None,
|
|
4400
4857
|
resource_group_id: str = None,
|
|
4401
4858
|
status: str = None,
|
|
@@ -4405,10 +4862,12 @@ class Project(TeaModel):
|
|
|
4405
4862
|
# This parameter is required.
|
|
4406
4863
|
self.description = description
|
|
4407
4864
|
self.last_modify_time = last_modify_time
|
|
4865
|
+
self.location = location
|
|
4408
4866
|
self.owner = owner
|
|
4409
4867
|
# This parameter is required.
|
|
4410
4868
|
self.project_name = project_name
|
|
4411
4869
|
self.quota = quota
|
|
4870
|
+
self.recycle_bin_enabled = recycle_bin_enabled
|
|
4412
4871
|
self.region = region
|
|
4413
4872
|
self.resource_group_id = resource_group_id
|
|
4414
4873
|
self.status = status
|
|
@@ -4430,12 +4889,16 @@ class Project(TeaModel):
|
|
|
4430
4889
|
result['description'] = self.description
|
|
4431
4890
|
if self.last_modify_time is not None:
|
|
4432
4891
|
result['lastModifyTime'] = self.last_modify_time
|
|
4892
|
+
if self.location is not None:
|
|
4893
|
+
result['location'] = self.location
|
|
4433
4894
|
if self.owner is not None:
|
|
4434
4895
|
result['owner'] = self.owner
|
|
4435
4896
|
if self.project_name is not None:
|
|
4436
4897
|
result['projectName'] = self.project_name
|
|
4437
4898
|
if self.quota is not None:
|
|
4438
4899
|
result['quota'] = self.quota
|
|
4900
|
+
if self.recycle_bin_enabled is not None:
|
|
4901
|
+
result['recycleBinEnabled'] = self.recycle_bin_enabled
|
|
4439
4902
|
if self.region is not None:
|
|
4440
4903
|
result['region'] = self.region
|
|
4441
4904
|
if self.resource_group_id is not None:
|
|
@@ -4454,12 +4917,16 @@ class Project(TeaModel):
|
|
|
4454
4917
|
self.description = m.get('description')
|
|
4455
4918
|
if m.get('lastModifyTime') is not None:
|
|
4456
4919
|
self.last_modify_time = m.get('lastModifyTime')
|
|
4920
|
+
if m.get('location') is not None:
|
|
4921
|
+
self.location = m.get('location')
|
|
4457
4922
|
if m.get('owner') is not None:
|
|
4458
4923
|
self.owner = m.get('owner')
|
|
4459
4924
|
if m.get('projectName') is not None:
|
|
4460
4925
|
self.project_name = m.get('projectName')
|
|
4461
4926
|
if m.get('quota') is not None:
|
|
4462
4927
|
self.quota = m.get('quota')
|
|
4928
|
+
if m.get('recycleBinEnabled') is not None:
|
|
4929
|
+
self.recycle_bin_enabled = m.get('recycleBinEnabled')
|
|
4463
4930
|
if m.get('region') is not None:
|
|
4464
4931
|
self.region = m.get('region')
|
|
4465
4932
|
if m.get('resourceGroupId') is not None:
|
|
@@ -4586,6 +5053,85 @@ class ApplyConfigToMachineGroupResponse(TeaModel):
|
|
|
4586
5053
|
return self
|
|
4587
5054
|
|
|
4588
5055
|
|
|
5056
|
+
class CallAiToolsRequest(TeaModel):
|
|
5057
|
+
def __init__(
|
|
5058
|
+
self,
|
|
5059
|
+
params: Dict[str, str] = None,
|
|
5060
|
+
region_id: str = None,
|
|
5061
|
+
tool_name: str = None,
|
|
5062
|
+
):
|
|
5063
|
+
self.params = params
|
|
5064
|
+
self.region_id = region_id
|
|
5065
|
+
# This parameter is required.
|
|
5066
|
+
self.tool_name = tool_name
|
|
5067
|
+
|
|
5068
|
+
def validate(self):
|
|
5069
|
+
pass
|
|
5070
|
+
|
|
5071
|
+
def to_map(self):
|
|
5072
|
+
_map = super().to_map()
|
|
5073
|
+
if _map is not None:
|
|
5074
|
+
return _map
|
|
5075
|
+
|
|
5076
|
+
result = dict()
|
|
5077
|
+
if self.params is not None:
|
|
5078
|
+
result['params'] = self.params
|
|
5079
|
+
if self.region_id is not None:
|
|
5080
|
+
result['regionId'] = self.region_id
|
|
5081
|
+
if self.tool_name is not None:
|
|
5082
|
+
result['toolName'] = self.tool_name
|
|
5083
|
+
return result
|
|
5084
|
+
|
|
5085
|
+
def from_map(self, m: dict = None):
|
|
5086
|
+
m = m or dict()
|
|
5087
|
+
if m.get('params') is not None:
|
|
5088
|
+
self.params = m.get('params')
|
|
5089
|
+
if m.get('regionId') is not None:
|
|
5090
|
+
self.region_id = m.get('regionId')
|
|
5091
|
+
if m.get('toolName') is not None:
|
|
5092
|
+
self.tool_name = m.get('toolName')
|
|
5093
|
+
return self
|
|
5094
|
+
|
|
5095
|
+
|
|
5096
|
+
class CallAiToolsResponse(TeaModel):
|
|
5097
|
+
def __init__(
|
|
5098
|
+
self,
|
|
5099
|
+
headers: Dict[str, str] = None,
|
|
5100
|
+
status_code: int = None,
|
|
5101
|
+
body: str = None,
|
|
5102
|
+
):
|
|
5103
|
+
self.headers = headers
|
|
5104
|
+
self.status_code = status_code
|
|
5105
|
+
self.body = body
|
|
5106
|
+
|
|
5107
|
+
def validate(self):
|
|
5108
|
+
pass
|
|
5109
|
+
|
|
5110
|
+
def to_map(self):
|
|
5111
|
+
_map = super().to_map()
|
|
5112
|
+
if _map is not None:
|
|
5113
|
+
return _map
|
|
5114
|
+
|
|
5115
|
+
result = dict()
|
|
5116
|
+
if self.headers is not None:
|
|
5117
|
+
result['headers'] = self.headers
|
|
5118
|
+
if self.status_code is not None:
|
|
5119
|
+
result['statusCode'] = self.status_code
|
|
5120
|
+
if self.body is not None:
|
|
5121
|
+
result['body'] = self.body
|
|
5122
|
+
return result
|
|
5123
|
+
|
|
5124
|
+
def from_map(self, m: dict = None):
|
|
5125
|
+
m = m or dict()
|
|
5126
|
+
if m.get('headers') is not None:
|
|
5127
|
+
self.headers = m.get('headers')
|
|
5128
|
+
if m.get('statusCode') is not None:
|
|
5129
|
+
self.status_code = m.get('statusCode')
|
|
5130
|
+
if m.get('body') is not None:
|
|
5131
|
+
self.body = m.get('body')
|
|
5132
|
+
return self
|
|
5133
|
+
|
|
5134
|
+
|
|
4589
5135
|
class ChangeResourceGroupRequest(TeaModel):
|
|
4590
5136
|
def __init__(
|
|
4591
5137
|
self,
|
|
@@ -5859,7 +6405,7 @@ class CreateLogStoreRequest(TeaModel):
|
|
|
5859
6405
|
# * The name must be unique in a project.
|
|
5860
6406
|
# * The name can contain only lowercase letters, digits, hyphens (-), and underscores (_).
|
|
5861
6407
|
# * The name must start and end with a lowercase letter or a digit.
|
|
5862
|
-
# * The name must be
|
|
6408
|
+
# * The name must be 2 to 63 characters in length.
|
|
5863
6409
|
#
|
|
5864
6410
|
# This parameter is required.
|
|
5865
6411
|
self.logstore_name = logstore_name
|
|
@@ -5872,6 +6418,7 @@ class CreateLogStoreRequest(TeaModel):
|
|
|
5872
6418
|
# * **standard**: Standard Logstore. This type of Logstore supports the log analysis feature and is suitable for scenarios such as real-time monitoring and interactive analysis. You can also use this type of Logstore to build a comprehensive observability system.
|
|
5873
6419
|
# * **query**: Query Logstore. This type of Logstore supports high-performance queries. The index traffic fee of a Query Logstore is approximately half that of a Standard Logstore. Query Logstores do not support SQL analysis. Query Logstores are suitable for scenarios in which the amount of data is large, the log retention period is long, or log analysis is not required. If logs are stored for weeks or months, the log retention period is considered long.
|
|
5874
6420
|
self.mode = mode
|
|
6421
|
+
# IngestProcessor ID
|
|
5875
6422
|
self.processor_id = processor_id
|
|
5876
6423
|
# The number of shards.
|
|
5877
6424
|
#
|
|
@@ -6132,24 +6679,51 @@ class CreateLogtailPipelineConfigRequest(TeaModel):
|
|
|
6132
6679
|
processors: List[Dict[str, Any]] = None,
|
|
6133
6680
|
):
|
|
6134
6681
|
# The aggregation plug-ins.
|
|
6682
|
+
#
|
|
6683
|
+
# > This parameter takes effect only when extended plug-ins are used. You can use only one aggregation plug-in.
|
|
6135
6684
|
self.aggregators = aggregators
|
|
6136
6685
|
# The name of the configuration.
|
|
6137
6686
|
#
|
|
6687
|
+
# > The name of the configuration must be unique in the project to which the configuration belongs. After the configuration is created, you cannot change the name of the configuration. The name must meet the following requirements:
|
|
6688
|
+
#
|
|
6689
|
+
# * The name can contain only lowercase letters, digits, hyphens (-), and underscores (_).
|
|
6690
|
+
#
|
|
6691
|
+
# * The name must start and end with a lowercase letter or a digit.
|
|
6692
|
+
#
|
|
6693
|
+
# * The name must be 2 to 128 characters in length.
|
|
6694
|
+
#
|
|
6138
6695
|
# This parameter is required.
|
|
6139
6696
|
self.config_name = config_name
|
|
6140
|
-
# The
|
|
6697
|
+
# The output plug-ins.
|
|
6698
|
+
#
|
|
6699
|
+
# > You can configure only one output plug-in.
|
|
6141
6700
|
#
|
|
6142
6701
|
# This parameter is required.
|
|
6143
6702
|
self.flushers = flushers
|
|
6144
|
-
# The global
|
|
6703
|
+
# The global settings.
|
|
6145
6704
|
self.global_ = global_
|
|
6146
|
-
# The
|
|
6705
|
+
# The input plug-ins.
|
|
6706
|
+
#
|
|
6707
|
+
# > You can configure only one input plug-in.
|
|
6147
6708
|
#
|
|
6148
6709
|
# This parameter is required.
|
|
6149
6710
|
self.inputs = inputs
|
|
6150
|
-
# The sample log.
|
|
6711
|
+
# The sample log. You can specify multiple sample logs.
|
|
6151
6712
|
self.log_sample = log_sample
|
|
6152
6713
|
# The processing plug-ins.
|
|
6714
|
+
#
|
|
6715
|
+
# > Logtail plug-ins for data processing are classified into native plug-ins and extended plug-ins. For more information, see [Overview of Logtail plug-ins for data processing](https://help.aliyun.com/document_detail/64957.html).
|
|
6716
|
+
#
|
|
6717
|
+
# >
|
|
6718
|
+
#
|
|
6719
|
+
# * You can use native plug-ins only to collect text logs.
|
|
6720
|
+
#
|
|
6721
|
+
# * You cannot add native plug-ins and extended plug-ins at a time.
|
|
6722
|
+
#
|
|
6723
|
+
# * When you add native plug-ins, take note of the following items:
|
|
6724
|
+
#
|
|
6725
|
+
# * You must add one of the following Logtail plug-ins for data processing as the first plug-in: Data Parsing (Regex Mode), Data Parsing (Delimiter Mode), Data Parsing (JSON Mode), Data Parsing (NGINX Mode), Data Parsing (Apache Mode), and Data Parsing (IIS Mode).
|
|
6726
|
+
# * After you add the first plug-in, you can add one Time Parsing plug-in, one Data Filtering plug-in, and multiple Data Masking plug-ins.
|
|
6153
6727
|
self.processors = processors
|
|
6154
6728
|
|
|
6155
6729
|
def validate(self):
|
|
@@ -6372,10 +6946,102 @@ class CreateMachineGroupResponse(TeaModel):
|
|
|
6372
6946
|
return self
|
|
6373
6947
|
|
|
6374
6948
|
|
|
6949
|
+
class CreateMaxComputeExportRequest(TeaModel):
|
|
6950
|
+
def __init__(
|
|
6951
|
+
self,
|
|
6952
|
+
configuration: MaxComputeExportConfiguration = None,
|
|
6953
|
+
description: str = None,
|
|
6954
|
+
display_name: str = None,
|
|
6955
|
+
name: str = None,
|
|
6956
|
+
):
|
|
6957
|
+
# The setting of the MaxCompute data shipping job.
|
|
6958
|
+
#
|
|
6959
|
+
# This parameter is required.
|
|
6960
|
+
self.configuration = configuration
|
|
6961
|
+
# The description of the MaxCompute data shipping job.
|
|
6962
|
+
self.description = description
|
|
6963
|
+
# The display name of the MaxCompute data shipping job.
|
|
6964
|
+
#
|
|
6965
|
+
# This parameter is required.
|
|
6966
|
+
self.display_name = display_name
|
|
6967
|
+
# The unique identifier of the MaxCompute data shipping job.
|
|
6968
|
+
#
|
|
6969
|
+
# This parameter is required.
|
|
6970
|
+
self.name = name
|
|
6971
|
+
|
|
6972
|
+
def validate(self):
|
|
6973
|
+
if self.configuration:
|
|
6974
|
+
self.configuration.validate()
|
|
6975
|
+
|
|
6976
|
+
def to_map(self):
|
|
6977
|
+
_map = super().to_map()
|
|
6978
|
+
if _map is not None:
|
|
6979
|
+
return _map
|
|
6980
|
+
|
|
6981
|
+
result = dict()
|
|
6982
|
+
if self.configuration is not None:
|
|
6983
|
+
result['configuration'] = self.configuration.to_map()
|
|
6984
|
+
if self.description is not None:
|
|
6985
|
+
result['description'] = self.description
|
|
6986
|
+
if self.display_name is not None:
|
|
6987
|
+
result['displayName'] = self.display_name
|
|
6988
|
+
if self.name is not None:
|
|
6989
|
+
result['name'] = self.name
|
|
6990
|
+
return result
|
|
6991
|
+
|
|
6992
|
+
def from_map(self, m: dict = None):
|
|
6993
|
+
m = m or dict()
|
|
6994
|
+
if m.get('configuration') is not None:
|
|
6995
|
+
temp_model = MaxComputeExportConfiguration()
|
|
6996
|
+
self.configuration = temp_model.from_map(m['configuration'])
|
|
6997
|
+
if m.get('description') is not None:
|
|
6998
|
+
self.description = m.get('description')
|
|
6999
|
+
if m.get('displayName') is not None:
|
|
7000
|
+
self.display_name = m.get('displayName')
|
|
7001
|
+
if m.get('name') is not None:
|
|
7002
|
+
self.name = m.get('name')
|
|
7003
|
+
return self
|
|
7004
|
+
|
|
7005
|
+
|
|
7006
|
+
class CreateMaxComputeExportResponse(TeaModel):
|
|
7007
|
+
def __init__(
|
|
7008
|
+
self,
|
|
7009
|
+
headers: Dict[str, str] = None,
|
|
7010
|
+
status_code: int = None,
|
|
7011
|
+
):
|
|
7012
|
+
self.headers = headers
|
|
7013
|
+
self.status_code = status_code
|
|
7014
|
+
|
|
7015
|
+
def validate(self):
|
|
7016
|
+
pass
|
|
7017
|
+
|
|
7018
|
+
def to_map(self):
|
|
7019
|
+
_map = super().to_map()
|
|
7020
|
+
if _map is not None:
|
|
7021
|
+
return _map
|
|
7022
|
+
|
|
7023
|
+
result = dict()
|
|
7024
|
+
if self.headers is not None:
|
|
7025
|
+
result['headers'] = self.headers
|
|
7026
|
+
if self.status_code is not None:
|
|
7027
|
+
result['statusCode'] = self.status_code
|
|
7028
|
+
return result
|
|
7029
|
+
|
|
7030
|
+
def from_map(self, m: dict = None):
|
|
7031
|
+
m = m or dict()
|
|
7032
|
+
if m.get('headers') is not None:
|
|
7033
|
+
self.headers = m.get('headers')
|
|
7034
|
+
if m.get('statusCode') is not None:
|
|
7035
|
+
self.status_code = m.get('statusCode')
|
|
7036
|
+
return self
|
|
7037
|
+
|
|
7038
|
+
|
|
6375
7039
|
class CreateMetricStoreRequest(TeaModel):
|
|
6376
7040
|
def __init__(
|
|
6377
7041
|
self,
|
|
6378
7042
|
auto_split: bool = None,
|
|
7043
|
+
hot_ttl: int = None,
|
|
7044
|
+
infrequent_access_ttl: int = None,
|
|
6379
7045
|
max_split_shard: int = None,
|
|
6380
7046
|
metric_type: str = None,
|
|
6381
7047
|
mode: str = None,
|
|
@@ -6385,6 +7051,8 @@ class CreateMetricStoreRequest(TeaModel):
|
|
|
6385
7051
|
):
|
|
6386
7052
|
# Specifies whether to enable automatic sharding.
|
|
6387
7053
|
self.auto_split = auto_split
|
|
7054
|
+
self.hot_ttl = hot_ttl
|
|
7055
|
+
self.infrequent_access_ttl = infrequent_access_ttl
|
|
6388
7056
|
# The maximum number of shards into which existing shards can be automatically split. This parameter is valid only when you set the autoSplit parameter to true.
|
|
6389
7057
|
self.max_split_shard = max_split_shard
|
|
6390
7058
|
# The type of the metric data. Example: prometheus.
|
|
@@ -6415,6 +7083,10 @@ class CreateMetricStoreRequest(TeaModel):
|
|
|
6415
7083
|
result = dict()
|
|
6416
7084
|
if self.auto_split is not None:
|
|
6417
7085
|
result['autoSplit'] = self.auto_split
|
|
7086
|
+
if self.hot_ttl is not None:
|
|
7087
|
+
result['hot_ttl'] = self.hot_ttl
|
|
7088
|
+
if self.infrequent_access_ttl is not None:
|
|
7089
|
+
result['infrequentAccessTTL'] = self.infrequent_access_ttl
|
|
6418
7090
|
if self.max_split_shard is not None:
|
|
6419
7091
|
result['maxSplitShard'] = self.max_split_shard
|
|
6420
7092
|
if self.metric_type is not None:
|
|
@@ -6433,6 +7105,10 @@ class CreateMetricStoreRequest(TeaModel):
|
|
|
6433
7105
|
m = m or dict()
|
|
6434
7106
|
if m.get('autoSplit') is not None:
|
|
6435
7107
|
self.auto_split = m.get('autoSplit')
|
|
7108
|
+
if m.get('hot_ttl') is not None:
|
|
7109
|
+
self.hot_ttl = m.get('hot_ttl')
|
|
7110
|
+
if m.get('infrequentAccessTTL') is not None:
|
|
7111
|
+
self.infrequent_access_ttl = m.get('infrequentAccessTTL')
|
|
6436
7112
|
if m.get('maxSplitShard') is not None:
|
|
6437
7113
|
self.max_split_shard = m.get('maxSplitShard')
|
|
6438
7114
|
if m.get('metricType') is not None:
|
|
@@ -6670,13 +7346,21 @@ class CreateOSSIngestionRequest(TeaModel):
|
|
|
6670
7346
|
name: str = None,
|
|
6671
7347
|
schedule: Schedule = None,
|
|
6672
7348
|
):
|
|
7349
|
+
# The configurations of the OSS data import job.
|
|
7350
|
+
#
|
|
6673
7351
|
# This parameter is required.
|
|
6674
7352
|
self.configuration = configuration
|
|
7353
|
+
# The description of the job.
|
|
6675
7354
|
self.description = description
|
|
7355
|
+
# The display name.
|
|
7356
|
+
#
|
|
6676
7357
|
# This parameter is required.
|
|
6677
7358
|
self.display_name = display_name
|
|
7359
|
+
# The name of the OSS data import job.
|
|
7360
|
+
#
|
|
6678
7361
|
# This parameter is required.
|
|
6679
7362
|
self.name = name
|
|
7363
|
+
# The scheduling type. By default, you do not need to specify this parameter. If you want to import data at regular intervals, such as importing data every Monday at 08: 00., you can specify a cron expression.
|
|
6680
7364
|
self.schedule = schedule
|
|
6681
7365
|
|
|
6682
7366
|
def validate(self):
|
|
@@ -6964,23 +7648,34 @@ class CreateProjectRequest(TeaModel):
|
|
|
6964
7648
|
data_redundancy_type: str = None,
|
|
6965
7649
|
description: str = None,
|
|
6966
7650
|
project_name: str = None,
|
|
7651
|
+
recycle_bin_enabled: bool = None,
|
|
6967
7652
|
resource_group_id: str = None,
|
|
6968
7653
|
):
|
|
6969
|
-
#
|
|
7654
|
+
# The disaster recovery type. Valid values:
|
|
7655
|
+
#
|
|
7656
|
+
# * LRS: locally redundant storage
|
|
7657
|
+
# * ZRS: zone-redundant storage
|
|
6970
7658
|
self.data_redundancy_type = data_redundancy_type
|
|
6971
7659
|
# The description of the project.
|
|
6972
7660
|
#
|
|
6973
7661
|
# This parameter is required.
|
|
6974
7662
|
self.description = description
|
|
6975
|
-
# The
|
|
7663
|
+
# The project name must be unique in a region. You cannot change the name after you create the project. The name must meet the following requirements:
|
|
6976
7664
|
#
|
|
6977
|
-
# * The name must be unique.
|
|
6978
|
-
# *
|
|
6979
|
-
# *
|
|
6980
|
-
# *
|
|
7665
|
+
# * The name must be globally unique.
|
|
7666
|
+
# * The name can contain only lowercase letters, digits, and hyphens (-).
|
|
7667
|
+
# * The name must start and end with a lowercase letter or a digit.
|
|
7668
|
+
# * The name must be 3 to 63 characters in length.
|
|
6981
7669
|
#
|
|
6982
7670
|
# This parameter is required.
|
|
6983
7671
|
self.project_name = project_name
|
|
7672
|
+
# Specifies whether to enable the recycle bin feature.
|
|
7673
|
+
#
|
|
7674
|
+
# Valid values:
|
|
7675
|
+
#
|
|
7676
|
+
# * true
|
|
7677
|
+
# * false
|
|
7678
|
+
self.recycle_bin_enabled = recycle_bin_enabled
|
|
6984
7679
|
# The ID of the resource group.
|
|
6985
7680
|
self.resource_group_id = resource_group_id
|
|
6986
7681
|
|
|
@@ -6999,6 +7694,8 @@ class CreateProjectRequest(TeaModel):
|
|
|
6999
7694
|
result['description'] = self.description
|
|
7000
7695
|
if self.project_name is not None:
|
|
7001
7696
|
result['projectName'] = self.project_name
|
|
7697
|
+
if self.recycle_bin_enabled is not None:
|
|
7698
|
+
result['recycleBinEnabled'] = self.recycle_bin_enabled
|
|
7002
7699
|
if self.resource_group_id is not None:
|
|
7003
7700
|
result['resourceGroupId'] = self.resource_group_id
|
|
7004
7701
|
return result
|
|
@@ -7011,6 +7708,8 @@ class CreateProjectRequest(TeaModel):
|
|
|
7011
7708
|
self.description = m.get('description')
|
|
7012
7709
|
if m.get('projectName') is not None:
|
|
7013
7710
|
self.project_name = m.get('projectName')
|
|
7711
|
+
if m.get('recycleBinEnabled') is not None:
|
|
7712
|
+
self.recycle_bin_enabled = m.get('recycleBinEnabled')
|
|
7014
7713
|
if m.get('resourceGroupId') is not None:
|
|
7015
7714
|
self.resource_group_id = m.get('resourceGroupId')
|
|
7016
7715
|
return self
|
|
@@ -7591,8 +8290,12 @@ class CreateTicketRequest(TeaModel):
|
|
|
7591
8290
|
access_token_expiration_time: int = None,
|
|
7592
8291
|
expiration_time: int = None,
|
|
7593
8292
|
):
|
|
8293
|
+
# * The validity period of the access token. Unit: seconds. Default value: 86400, which specifies one day. Valid values: 0 to 86400.
|
|
8294
|
+
# * The validity period of the access token is the smaller value between accessTokenExpirationTime and expirationTime.
|
|
8295
|
+
# * If you use a Security Token Service (STS) token to call this operation, the validity period of the access token is the smallest value among accessTokenExpirationTime, expirationTime, and the validity period of the STS token.
|
|
7594
8296
|
self.access_token_expiration_time = access_token_expiration_time
|
|
7595
|
-
#
|
|
8297
|
+
# * You must use the Simple Log Service endpoint for the China (Shanghai) or Singapore region to call the CreateTicket operation. After you obtain the ticket, you can use the ticket regardless of the region.
|
|
8298
|
+
# * The validity period for the URL of the console page that you want to embed. Unit: seconds. Default value: 86400, which specifies one day. Valid values: 0 to 2592000. The value 2592000 specifies 30 days.
|
|
7596
8299
|
self.expiration_time = expiration_time
|
|
7597
8300
|
|
|
7598
8301
|
def validate(self):
|
|
@@ -8210,6 +8913,39 @@ class DeleteIndexResponse(TeaModel):
|
|
|
8210
8913
|
return self
|
|
8211
8914
|
|
|
8212
8915
|
|
|
8916
|
+
class DeleteIngestProcessorResponse(TeaModel):
|
|
8917
|
+
def __init__(
|
|
8918
|
+
self,
|
|
8919
|
+
headers: Dict[str, str] = None,
|
|
8920
|
+
status_code: int = None,
|
|
8921
|
+
):
|
|
8922
|
+
self.headers = headers
|
|
8923
|
+
self.status_code = status_code
|
|
8924
|
+
|
|
8925
|
+
def validate(self):
|
|
8926
|
+
pass
|
|
8927
|
+
|
|
8928
|
+
def to_map(self):
|
|
8929
|
+
_map = super().to_map()
|
|
8930
|
+
if _map is not None:
|
|
8931
|
+
return _map
|
|
8932
|
+
|
|
8933
|
+
result = dict()
|
|
8934
|
+
if self.headers is not None:
|
|
8935
|
+
result['headers'] = self.headers
|
|
8936
|
+
if self.status_code is not None:
|
|
8937
|
+
result['statusCode'] = self.status_code
|
|
8938
|
+
return result
|
|
8939
|
+
|
|
8940
|
+
def from_map(self, m: dict = None):
|
|
8941
|
+
m = m or dict()
|
|
8942
|
+
if m.get('headers') is not None:
|
|
8943
|
+
self.headers = m.get('headers')
|
|
8944
|
+
if m.get('statusCode') is not None:
|
|
8945
|
+
self.status_code = m.get('statusCode')
|
|
8946
|
+
return self
|
|
8947
|
+
|
|
8948
|
+
|
|
8213
8949
|
class DeleteLogStoreResponse(TeaModel):
|
|
8214
8950
|
def __init__(
|
|
8215
8951
|
self,
|
|
@@ -8342,6 +9078,39 @@ class DeleteMachineGroupResponse(TeaModel):
|
|
|
8342
9078
|
return self
|
|
8343
9079
|
|
|
8344
9080
|
|
|
9081
|
+
class DeleteMaxComputeExportResponse(TeaModel):
|
|
9082
|
+
def __init__(
|
|
9083
|
+
self,
|
|
9084
|
+
headers: Dict[str, str] = None,
|
|
9085
|
+
status_code: int = None,
|
|
9086
|
+
):
|
|
9087
|
+
self.headers = headers
|
|
9088
|
+
self.status_code = status_code
|
|
9089
|
+
|
|
9090
|
+
def validate(self):
|
|
9091
|
+
pass
|
|
9092
|
+
|
|
9093
|
+
def to_map(self):
|
|
9094
|
+
_map = super().to_map()
|
|
9095
|
+
if _map is not None:
|
|
9096
|
+
return _map
|
|
9097
|
+
|
|
9098
|
+
result = dict()
|
|
9099
|
+
if self.headers is not None:
|
|
9100
|
+
result['headers'] = self.headers
|
|
9101
|
+
if self.status_code is not None:
|
|
9102
|
+
result['statusCode'] = self.status_code
|
|
9103
|
+
return result
|
|
9104
|
+
|
|
9105
|
+
def from_map(self, m: dict = None):
|
|
9106
|
+
m = m or dict()
|
|
9107
|
+
if m.get('headers') is not None:
|
|
9108
|
+
self.headers = m.get('headers')
|
|
9109
|
+
if m.get('statusCode') is not None:
|
|
9110
|
+
self.status_code = m.get('statusCode')
|
|
9111
|
+
return self
|
|
9112
|
+
|
|
9113
|
+
|
|
8345
9114
|
class DeleteMetricStoreResponse(TeaModel):
|
|
8346
9115
|
def __init__(
|
|
8347
9116
|
self,
|
|
@@ -8474,6 +9243,33 @@ class DeleteOSSIngestionResponse(TeaModel):
|
|
|
8474
9243
|
return self
|
|
8475
9244
|
|
|
8476
9245
|
|
|
9246
|
+
class DeleteProjectRequest(TeaModel):
|
|
9247
|
+
def __init__(
|
|
9248
|
+
self,
|
|
9249
|
+
force_delete: bool = None,
|
|
9250
|
+
):
|
|
9251
|
+
self.force_delete = force_delete
|
|
9252
|
+
|
|
9253
|
+
def validate(self):
|
|
9254
|
+
pass
|
|
9255
|
+
|
|
9256
|
+
def to_map(self):
|
|
9257
|
+
_map = super().to_map()
|
|
9258
|
+
if _map is not None:
|
|
9259
|
+
return _map
|
|
9260
|
+
|
|
9261
|
+
result = dict()
|
|
9262
|
+
if self.force_delete is not None:
|
|
9263
|
+
result['forceDelete'] = self.force_delete
|
|
9264
|
+
return result
|
|
9265
|
+
|
|
9266
|
+
def from_map(self, m: dict = None):
|
|
9267
|
+
m = m or dict()
|
|
9268
|
+
if m.get('forceDelete') is not None:
|
|
9269
|
+
self.force_delete = m.get('forceDelete')
|
|
9270
|
+
return self
|
|
9271
|
+
|
|
9272
|
+
|
|
8477
9273
|
class DeleteProjectResponse(TeaModel):
|
|
8478
9274
|
def __init__(
|
|
8479
9275
|
self,
|
|
@@ -8644,6 +9440,13 @@ class DescribeRegionsRequest(TeaModel):
|
|
|
8644
9440
|
self,
|
|
8645
9441
|
language: str = None,
|
|
8646
9442
|
):
|
|
9443
|
+
# The language of the localName parameter that is returned.
|
|
9444
|
+
#
|
|
9445
|
+
# Valid values:
|
|
9446
|
+
#
|
|
9447
|
+
# * ja
|
|
9448
|
+
# * en
|
|
9449
|
+
# * zh
|
|
8647
9450
|
self.language = language
|
|
8648
9451
|
|
|
8649
9452
|
def validate(self):
|
|
@@ -8674,9 +9477,13 @@ class DescribeRegionsResponseBodyRegions(TeaModel):
|
|
|
8674
9477
|
local_name: str = None,
|
|
8675
9478
|
region: str = None,
|
|
8676
9479
|
):
|
|
9480
|
+
# The public endpoint of Simple Log Service.
|
|
8677
9481
|
self.internet_endpoint = internet_endpoint
|
|
9482
|
+
# The internal endpoint of Simple Log Service.
|
|
8678
9483
|
self.intranet_endpoint = intranet_endpoint
|
|
9484
|
+
# The name of the Simple Log Service region.
|
|
8679
9485
|
self.local_name = local_name
|
|
9486
|
+
# SLS region
|
|
8680
9487
|
self.region = region
|
|
8681
9488
|
|
|
8682
9489
|
def validate(self):
|
|
@@ -10484,7 +11291,7 @@ class GetDownloadJobResponseBody(TeaModel):
|
|
|
10484
11291
|
self.execution_details = execution_details
|
|
10485
11292
|
# 代表资源名称的资源属性字段
|
|
10486
11293
|
self.name = name
|
|
10487
|
-
#
|
|
11294
|
+
# The status of the log download task.
|
|
10488
11295
|
self.status = status
|
|
10489
11296
|
|
|
10490
11297
|
def validate(self):
|
|
@@ -11025,6 +11832,47 @@ class GetIndexResponse(TeaModel):
|
|
|
11025
11832
|
return self
|
|
11026
11833
|
|
|
11027
11834
|
|
|
11835
|
+
class GetIngestProcessorResponse(TeaModel):
|
|
11836
|
+
def __init__(
|
|
11837
|
+
self,
|
|
11838
|
+
headers: Dict[str, str] = None,
|
|
11839
|
+
status_code: int = None,
|
|
11840
|
+
body: IngestProcessor = None,
|
|
11841
|
+
):
|
|
11842
|
+
self.headers = headers
|
|
11843
|
+
self.status_code = status_code
|
|
11844
|
+
self.body = body
|
|
11845
|
+
|
|
11846
|
+
def validate(self):
|
|
11847
|
+
if self.body:
|
|
11848
|
+
self.body.validate()
|
|
11849
|
+
|
|
11850
|
+
def to_map(self):
|
|
11851
|
+
_map = super().to_map()
|
|
11852
|
+
if _map is not None:
|
|
11853
|
+
return _map
|
|
11854
|
+
|
|
11855
|
+
result = dict()
|
|
11856
|
+
if self.headers is not None:
|
|
11857
|
+
result['headers'] = self.headers
|
|
11858
|
+
if self.status_code is not None:
|
|
11859
|
+
result['statusCode'] = self.status_code
|
|
11860
|
+
if self.body is not None:
|
|
11861
|
+
result['body'] = self.body.to_map()
|
|
11862
|
+
return result
|
|
11863
|
+
|
|
11864
|
+
def from_map(self, m: dict = None):
|
|
11865
|
+
m = m or dict()
|
|
11866
|
+
if m.get('headers') is not None:
|
|
11867
|
+
self.headers = m.get('headers')
|
|
11868
|
+
if m.get('statusCode') is not None:
|
|
11869
|
+
self.status_code = m.get('statusCode')
|
|
11870
|
+
if m.get('body') is not None:
|
|
11871
|
+
temp_model = IngestProcessor()
|
|
11872
|
+
self.body = temp_model.from_map(m['body'])
|
|
11873
|
+
return self
|
|
11874
|
+
|
|
11875
|
+
|
|
11028
11876
|
class GetLogStoreResponse(TeaModel):
|
|
11029
11877
|
def __init__(
|
|
11030
11878
|
self,
|
|
@@ -11829,6 +12677,7 @@ class GetMLServiceResultsRequest(TeaModel):
|
|
|
11829
12677
|
):
|
|
11830
12678
|
self.allow_builtin = allow_builtin
|
|
11831
12679
|
self.body = body
|
|
12680
|
+
# The version of the algorithm. The algorithm varies based on the version.
|
|
11832
12681
|
self.version = version
|
|
11833
12682
|
|
|
11834
12683
|
def validate(self):
|
|
@@ -11976,11 +12825,54 @@ class GetMachineGroupResponse(TeaModel):
|
|
|
11976
12825
|
return self
|
|
11977
12826
|
|
|
11978
12827
|
|
|
11979
|
-
class
|
|
12828
|
+
class GetMaxComputeExportResponse(TeaModel):
|
|
11980
12829
|
def __init__(
|
|
11981
12830
|
self,
|
|
11982
|
-
|
|
12831
|
+
headers: Dict[str, str] = None,
|
|
12832
|
+
status_code: int = None,
|
|
12833
|
+
body: MaxComputeExport = None,
|
|
12834
|
+
):
|
|
12835
|
+
self.headers = headers
|
|
12836
|
+
self.status_code = status_code
|
|
12837
|
+
self.body = body
|
|
12838
|
+
|
|
12839
|
+
def validate(self):
|
|
12840
|
+
if self.body:
|
|
12841
|
+
self.body.validate()
|
|
12842
|
+
|
|
12843
|
+
def to_map(self):
|
|
12844
|
+
_map = super().to_map()
|
|
12845
|
+
if _map is not None:
|
|
12846
|
+
return _map
|
|
12847
|
+
|
|
12848
|
+
result = dict()
|
|
12849
|
+
if self.headers is not None:
|
|
12850
|
+
result['headers'] = self.headers
|
|
12851
|
+
if self.status_code is not None:
|
|
12852
|
+
result['statusCode'] = self.status_code
|
|
12853
|
+
if self.body is not None:
|
|
12854
|
+
result['body'] = self.body.to_map()
|
|
12855
|
+
return result
|
|
12856
|
+
|
|
12857
|
+
def from_map(self, m: dict = None):
|
|
12858
|
+
m = m or dict()
|
|
12859
|
+
if m.get('headers') is not None:
|
|
12860
|
+
self.headers = m.get('headers')
|
|
12861
|
+
if m.get('statusCode') is not None:
|
|
12862
|
+
self.status_code = m.get('statusCode')
|
|
12863
|
+
if m.get('body') is not None:
|
|
12864
|
+
temp_model = MaxComputeExport()
|
|
12865
|
+
self.body = temp_model.from_map(m['body'])
|
|
12866
|
+
return self
|
|
12867
|
+
|
|
12868
|
+
|
|
12869
|
+
class GetMetricStoreResponseBody(TeaModel):
|
|
12870
|
+
def __init__(
|
|
12871
|
+
self,
|
|
12872
|
+
auto_split: bool = None,
|
|
11983
12873
|
create_time: int = None,
|
|
12874
|
+
hot_ttl: int = None,
|
|
12875
|
+
infrequent_access_ttl: int = None,
|
|
11984
12876
|
last_modify_time: int = None,
|
|
11985
12877
|
max_split_shard: int = None,
|
|
11986
12878
|
metric_type: str = None,
|
|
@@ -11993,6 +12885,8 @@ class GetMetricStoreResponseBody(TeaModel):
|
|
|
11993
12885
|
self.auto_split = auto_split
|
|
11994
12886
|
# The creation time. The value is a UNIX timestamp.
|
|
11995
12887
|
self.create_time = create_time
|
|
12888
|
+
self.hot_ttl = hot_ttl
|
|
12889
|
+
self.infrequent_access_ttl = infrequent_access_ttl
|
|
11996
12890
|
# The last update time. The value is a UNIX timestamp.
|
|
11997
12891
|
self.last_modify_time = last_modify_time
|
|
11998
12892
|
# The maximum number of shards into which existing shards can be automatically split.
|
|
@@ -12021,6 +12915,10 @@ class GetMetricStoreResponseBody(TeaModel):
|
|
|
12021
12915
|
result['autoSplit'] = self.auto_split
|
|
12022
12916
|
if self.create_time is not None:
|
|
12023
12917
|
result['createTime'] = self.create_time
|
|
12918
|
+
if self.hot_ttl is not None:
|
|
12919
|
+
result['hot_ttl'] = self.hot_ttl
|
|
12920
|
+
if self.infrequent_access_ttl is not None:
|
|
12921
|
+
result['infrequentAccessTTL'] = self.infrequent_access_ttl
|
|
12024
12922
|
if self.last_modify_time is not None:
|
|
12025
12923
|
result['lastModifyTime'] = self.last_modify_time
|
|
12026
12924
|
if self.max_split_shard is not None:
|
|
@@ -12043,6 +12941,10 @@ class GetMetricStoreResponseBody(TeaModel):
|
|
|
12043
12941
|
self.auto_split = m.get('autoSplit')
|
|
12044
12942
|
if m.get('createTime') is not None:
|
|
12045
12943
|
self.create_time = m.get('createTime')
|
|
12944
|
+
if m.get('hot_ttl') is not None:
|
|
12945
|
+
self.hot_ttl = m.get('hot_ttl')
|
|
12946
|
+
if m.get('infrequentAccessTTL') is not None:
|
|
12947
|
+
self.infrequent_access_ttl = m.get('infrequentAccessTTL')
|
|
12046
12948
|
if m.get('lastModifyTime') is not None:
|
|
12047
12949
|
self.last_modify_time = m.get('lastModifyTime')
|
|
12048
12950
|
if m.get('maxSplitShard') is not None:
|
|
@@ -13011,6 +13913,157 @@ class ListAgentInstanceConfigsResponse(TeaModel):
|
|
|
13011
13913
|
return self
|
|
13012
13914
|
|
|
13013
13915
|
|
|
13916
|
+
class ListAiToolsResponseBodyFields(TeaModel):
|
|
13917
|
+
def __init__(
|
|
13918
|
+
self,
|
|
13919
|
+
name: str = None,
|
|
13920
|
+
option: List[str] = None,
|
|
13921
|
+
required: bool = None,
|
|
13922
|
+
type: str = None,
|
|
13923
|
+
example: str = None,
|
|
13924
|
+
description: str = None,
|
|
13925
|
+
):
|
|
13926
|
+
self.name = name
|
|
13927
|
+
self.option = option
|
|
13928
|
+
self.required = required
|
|
13929
|
+
self.type = type
|
|
13930
|
+
self.example = example
|
|
13931
|
+
self.description = description
|
|
13932
|
+
|
|
13933
|
+
def validate(self):
|
|
13934
|
+
pass
|
|
13935
|
+
|
|
13936
|
+
def to_map(self):
|
|
13937
|
+
_map = super().to_map()
|
|
13938
|
+
if _map is not None:
|
|
13939
|
+
return _map
|
|
13940
|
+
|
|
13941
|
+
result = dict()
|
|
13942
|
+
if self.name is not None:
|
|
13943
|
+
result['name'] = self.name
|
|
13944
|
+
if self.option is not None:
|
|
13945
|
+
result['option'] = self.option
|
|
13946
|
+
if self.required is not None:
|
|
13947
|
+
result['required'] = self.required
|
|
13948
|
+
if self.type is not None:
|
|
13949
|
+
result['type'] = self.type
|
|
13950
|
+
if self.example is not None:
|
|
13951
|
+
result['example'] = self.example
|
|
13952
|
+
if self.description is not None:
|
|
13953
|
+
result['description'] = self.description
|
|
13954
|
+
return result
|
|
13955
|
+
|
|
13956
|
+
def from_map(self, m: dict = None):
|
|
13957
|
+
m = m or dict()
|
|
13958
|
+
if m.get('name') is not None:
|
|
13959
|
+
self.name = m.get('name')
|
|
13960
|
+
if m.get('option') is not None:
|
|
13961
|
+
self.option = m.get('option')
|
|
13962
|
+
if m.get('required') is not None:
|
|
13963
|
+
self.required = m.get('required')
|
|
13964
|
+
if m.get('type') is not None:
|
|
13965
|
+
self.type = m.get('type')
|
|
13966
|
+
if m.get('example') is not None:
|
|
13967
|
+
self.example = m.get('example')
|
|
13968
|
+
if m.get('description') is not None:
|
|
13969
|
+
self.description = m.get('description')
|
|
13970
|
+
return self
|
|
13971
|
+
|
|
13972
|
+
|
|
13973
|
+
class ListAiToolsResponseBody(TeaModel):
|
|
13974
|
+
def __init__(
|
|
13975
|
+
self,
|
|
13976
|
+
fields: List[ListAiToolsResponseBodyFields] = None,
|
|
13977
|
+
name: str = None,
|
|
13978
|
+
description: str = None,
|
|
13979
|
+
):
|
|
13980
|
+
self.fields = fields
|
|
13981
|
+
self.name = name
|
|
13982
|
+
self.description = description
|
|
13983
|
+
|
|
13984
|
+
def validate(self):
|
|
13985
|
+
if self.fields:
|
|
13986
|
+
for k in self.fields:
|
|
13987
|
+
if k:
|
|
13988
|
+
k.validate()
|
|
13989
|
+
|
|
13990
|
+
def to_map(self):
|
|
13991
|
+
_map = super().to_map()
|
|
13992
|
+
if _map is not None:
|
|
13993
|
+
return _map
|
|
13994
|
+
|
|
13995
|
+
result = dict()
|
|
13996
|
+
result['fields'] = []
|
|
13997
|
+
if self.fields is not None:
|
|
13998
|
+
for k in self.fields:
|
|
13999
|
+
result['fields'].append(k.to_map() if k else None)
|
|
14000
|
+
if self.name is not None:
|
|
14001
|
+
result['name'] = self.name
|
|
14002
|
+
if self.description is not None:
|
|
14003
|
+
result['description'] = self.description
|
|
14004
|
+
return result
|
|
14005
|
+
|
|
14006
|
+
def from_map(self, m: dict = None):
|
|
14007
|
+
m = m or dict()
|
|
14008
|
+
self.fields = []
|
|
14009
|
+
if m.get('fields') is not None:
|
|
14010
|
+
for k in m.get('fields'):
|
|
14011
|
+
temp_model = ListAiToolsResponseBodyFields()
|
|
14012
|
+
self.fields.append(temp_model.from_map(k))
|
|
14013
|
+
if m.get('name') is not None:
|
|
14014
|
+
self.name = m.get('name')
|
|
14015
|
+
if m.get('description') is not None:
|
|
14016
|
+
self.description = m.get('description')
|
|
14017
|
+
return self
|
|
14018
|
+
|
|
14019
|
+
|
|
14020
|
+
class ListAiToolsResponse(TeaModel):
|
|
14021
|
+
def __init__(
|
|
14022
|
+
self,
|
|
14023
|
+
headers: Dict[str, str] = None,
|
|
14024
|
+
status_code: int = None,
|
|
14025
|
+
body: List[ListAiToolsResponseBody] = None,
|
|
14026
|
+
):
|
|
14027
|
+
self.headers = headers
|
|
14028
|
+
self.status_code = status_code
|
|
14029
|
+
self.body = body
|
|
14030
|
+
|
|
14031
|
+
def validate(self):
|
|
14032
|
+
if self.body:
|
|
14033
|
+
for k in self.body:
|
|
14034
|
+
if k:
|
|
14035
|
+
k.validate()
|
|
14036
|
+
|
|
14037
|
+
def to_map(self):
|
|
14038
|
+
_map = super().to_map()
|
|
14039
|
+
if _map is not None:
|
|
14040
|
+
return _map
|
|
14041
|
+
|
|
14042
|
+
result = dict()
|
|
14043
|
+
if self.headers is not None:
|
|
14044
|
+
result['headers'] = self.headers
|
|
14045
|
+
if self.status_code is not None:
|
|
14046
|
+
result['statusCode'] = self.status_code
|
|
14047
|
+
result['body'] = []
|
|
14048
|
+
if self.body is not None:
|
|
14049
|
+
for k in self.body:
|
|
14050
|
+
result['body'].append(k.to_map() if k else None)
|
|
14051
|
+
return result
|
|
14052
|
+
|
|
14053
|
+
def from_map(self, m: dict = None):
|
|
14054
|
+
m = m or dict()
|
|
14055
|
+
if m.get('headers') is not None:
|
|
14056
|
+
self.headers = m.get('headers')
|
|
14057
|
+
if m.get('statusCode') is not None:
|
|
14058
|
+
self.status_code = m.get('statusCode')
|
|
14059
|
+
self.body = []
|
|
14060
|
+
if m.get('body') is not None:
|
|
14061
|
+
for k in m.get('body'):
|
|
14062
|
+
temp_model = ListAiToolsResponseBody()
|
|
14063
|
+
self.body.append(temp_model.from_map(k))
|
|
14064
|
+
return self
|
|
14065
|
+
|
|
14066
|
+
|
|
13014
14067
|
class ListAlertsRequest(TeaModel):
|
|
13015
14068
|
def __init__(
|
|
13016
14069
|
self,
|
|
@@ -13020,6 +14073,7 @@ class ListAlertsRequest(TeaModel):
|
|
|
13020
14073
|
):
|
|
13021
14074
|
self.logstore = logstore
|
|
13022
14075
|
self.offset = offset
|
|
14076
|
+
# The number of entries per page. Maximum value: 200. Default value: 10.
|
|
13023
14077
|
self.size = size
|
|
13024
14078
|
|
|
13025
14079
|
def validate(self):
|
|
@@ -13514,6 +14568,7 @@ class ListCollectionPoliciesRequest(TeaModel):
|
|
|
13514
14568
|
self.instance_id = instance_id
|
|
13515
14569
|
self.offset = offset
|
|
13516
14570
|
self.policy_name = policy_name
|
|
14571
|
+
# The code of the service.
|
|
13517
14572
|
self.product_code = product_code
|
|
13518
14573
|
self.size = size
|
|
13519
14574
|
|
|
@@ -14727,6 +15782,7 @@ class ListDownloadJobsResponseBodyResults(TeaModel):
|
|
|
14727
15782
|
self.execution_details = execution_details
|
|
14728
15783
|
# 代表资源名称的资源属性字段
|
|
14729
15784
|
self.name = name
|
|
15785
|
+
# The task status.
|
|
14730
15786
|
self.status = status
|
|
14731
15787
|
|
|
14732
15788
|
def validate(self):
|
|
@@ -14786,6 +15842,7 @@ class ListDownloadJobsResponseBody(TeaModel):
|
|
|
14786
15842
|
total: int = None,
|
|
14787
15843
|
):
|
|
14788
15844
|
self.count = count
|
|
15845
|
+
# The log download tasks.
|
|
14789
15846
|
self.results = results
|
|
14790
15847
|
self.total = total
|
|
14791
15848
|
|
|
@@ -14993,6 +16050,146 @@ class ListETLsResponse(TeaModel):
|
|
|
14993
16050
|
return self
|
|
14994
16051
|
|
|
14995
16052
|
|
|
16053
|
+
class ListIngestProcessorsRequest(TeaModel):
|
|
16054
|
+
def __init__(
|
|
16055
|
+
self,
|
|
16056
|
+
display_name: str = None,
|
|
16057
|
+
offset: int = None,
|
|
16058
|
+
processor_name: str = None,
|
|
16059
|
+
size: int = None,
|
|
16060
|
+
):
|
|
16061
|
+
# The display name of the ingest processor.
|
|
16062
|
+
self.display_name = display_name
|
|
16063
|
+
# The offset. Default value: 0.
|
|
16064
|
+
self.offset = offset
|
|
16065
|
+
# The identifier of the ingest processor.
|
|
16066
|
+
self.processor_name = processor_name
|
|
16067
|
+
# The number of entries. Default value: 200.
|
|
16068
|
+
self.size = size
|
|
16069
|
+
|
|
16070
|
+
def validate(self):
|
|
16071
|
+
pass
|
|
16072
|
+
|
|
16073
|
+
def to_map(self):
|
|
16074
|
+
_map = super().to_map()
|
|
16075
|
+
if _map is not None:
|
|
16076
|
+
return _map
|
|
16077
|
+
|
|
16078
|
+
result = dict()
|
|
16079
|
+
if self.display_name is not None:
|
|
16080
|
+
result['displayName'] = self.display_name
|
|
16081
|
+
if self.offset is not None:
|
|
16082
|
+
result['offset'] = self.offset
|
|
16083
|
+
if self.processor_name is not None:
|
|
16084
|
+
result['processorName'] = self.processor_name
|
|
16085
|
+
if self.size is not None:
|
|
16086
|
+
result['size'] = self.size
|
|
16087
|
+
return result
|
|
16088
|
+
|
|
16089
|
+
def from_map(self, m: dict = None):
|
|
16090
|
+
m = m or dict()
|
|
16091
|
+
if m.get('displayName') is not None:
|
|
16092
|
+
self.display_name = m.get('displayName')
|
|
16093
|
+
if m.get('offset') is not None:
|
|
16094
|
+
self.offset = m.get('offset')
|
|
16095
|
+
if m.get('processorName') is not None:
|
|
16096
|
+
self.processor_name = m.get('processorName')
|
|
16097
|
+
if m.get('size') is not None:
|
|
16098
|
+
self.size = m.get('size')
|
|
16099
|
+
return self
|
|
16100
|
+
|
|
16101
|
+
|
|
16102
|
+
class ListIngestProcessorsResponseBody(TeaModel):
|
|
16103
|
+
def __init__(
|
|
16104
|
+
self,
|
|
16105
|
+
count: int = None,
|
|
16106
|
+
processors: List[IngestProcessor] = None,
|
|
16107
|
+
total: int = None,
|
|
16108
|
+
):
|
|
16109
|
+
# The number of entries returned.
|
|
16110
|
+
self.count = count
|
|
16111
|
+
# The ingest processors that are returned.
|
|
16112
|
+
self.processors = processors
|
|
16113
|
+
# The total number of entries returned.
|
|
16114
|
+
self.total = total
|
|
16115
|
+
|
|
16116
|
+
def validate(self):
|
|
16117
|
+
if self.processors:
|
|
16118
|
+
for k in self.processors:
|
|
16119
|
+
if k:
|
|
16120
|
+
k.validate()
|
|
16121
|
+
|
|
16122
|
+
def to_map(self):
|
|
16123
|
+
_map = super().to_map()
|
|
16124
|
+
if _map is not None:
|
|
16125
|
+
return _map
|
|
16126
|
+
|
|
16127
|
+
result = dict()
|
|
16128
|
+
if self.count is not None:
|
|
16129
|
+
result['count'] = self.count
|
|
16130
|
+
result['processors'] = []
|
|
16131
|
+
if self.processors is not None:
|
|
16132
|
+
for k in self.processors:
|
|
16133
|
+
result['processors'].append(k.to_map() if k else None)
|
|
16134
|
+
if self.total is not None:
|
|
16135
|
+
result['total'] = self.total
|
|
16136
|
+
return result
|
|
16137
|
+
|
|
16138
|
+
def from_map(self, m: dict = None):
|
|
16139
|
+
m = m or dict()
|
|
16140
|
+
if m.get('count') is not None:
|
|
16141
|
+
self.count = m.get('count')
|
|
16142
|
+
self.processors = []
|
|
16143
|
+
if m.get('processors') is not None:
|
|
16144
|
+
for k in m.get('processors'):
|
|
16145
|
+
temp_model = IngestProcessor()
|
|
16146
|
+
self.processors.append(temp_model.from_map(k))
|
|
16147
|
+
if m.get('total') is not None:
|
|
16148
|
+
self.total = m.get('total')
|
|
16149
|
+
return self
|
|
16150
|
+
|
|
16151
|
+
|
|
16152
|
+
class ListIngestProcessorsResponse(TeaModel):
|
|
16153
|
+
def __init__(
|
|
16154
|
+
self,
|
|
16155
|
+
headers: Dict[str, str] = None,
|
|
16156
|
+
status_code: int = None,
|
|
16157
|
+
body: ListIngestProcessorsResponseBody = None,
|
|
16158
|
+
):
|
|
16159
|
+
self.headers = headers
|
|
16160
|
+
self.status_code = status_code
|
|
16161
|
+
self.body = body
|
|
16162
|
+
|
|
16163
|
+
def validate(self):
|
|
16164
|
+
if self.body:
|
|
16165
|
+
self.body.validate()
|
|
16166
|
+
|
|
16167
|
+
def to_map(self):
|
|
16168
|
+
_map = super().to_map()
|
|
16169
|
+
if _map is not None:
|
|
16170
|
+
return _map
|
|
16171
|
+
|
|
16172
|
+
result = dict()
|
|
16173
|
+
if self.headers is not None:
|
|
16174
|
+
result['headers'] = self.headers
|
|
16175
|
+
if self.status_code is not None:
|
|
16176
|
+
result['statusCode'] = self.status_code
|
|
16177
|
+
if self.body is not None:
|
|
16178
|
+
result['body'] = self.body.to_map()
|
|
16179
|
+
return result
|
|
16180
|
+
|
|
16181
|
+
def from_map(self, m: dict = None):
|
|
16182
|
+
m = m or dict()
|
|
16183
|
+
if m.get('headers') is not None:
|
|
16184
|
+
self.headers = m.get('headers')
|
|
16185
|
+
if m.get('statusCode') is not None:
|
|
16186
|
+
self.status_code = m.get('statusCode')
|
|
16187
|
+
if m.get('body') is not None:
|
|
16188
|
+
temp_model = ListIngestProcessorsResponseBody()
|
|
16189
|
+
self.body = temp_model.from_map(m['body'])
|
|
16190
|
+
return self
|
|
16191
|
+
|
|
16192
|
+
|
|
14996
16193
|
class ListLogStoresRequest(TeaModel):
|
|
14997
16194
|
def __init__(
|
|
14998
16195
|
self,
|
|
@@ -15011,7 +16208,7 @@ class ListLogStoresRequest(TeaModel):
|
|
|
15011
16208
|
self.mode = mode
|
|
15012
16209
|
# The line from which the query starts. Default value: 0.
|
|
15013
16210
|
self.offset = offset
|
|
15014
|
-
# The number of entries per page. Maximum value: 500. Default value:
|
|
16211
|
+
# The number of entries per page. Maximum value: 500. Default value: 200.
|
|
15015
16212
|
self.size = size
|
|
15016
16213
|
# The type of the data that you want to query. Valid values:
|
|
15017
16214
|
#
|
|
@@ -15210,31 +16407,156 @@ class ListLogtailPipelineConfigResponseBody(TeaModel):
|
|
|
15210
16407
|
return _map
|
|
15211
16408
|
|
|
15212
16409
|
result = dict()
|
|
15213
|
-
if self.configs is not None:
|
|
15214
|
-
result['configs'] = self.configs
|
|
16410
|
+
if self.configs is not None:
|
|
16411
|
+
result['configs'] = self.configs
|
|
16412
|
+
if self.count is not None:
|
|
16413
|
+
result['count'] = self.count
|
|
16414
|
+
if self.total is not None:
|
|
16415
|
+
result['total'] = self.total
|
|
16416
|
+
return result
|
|
16417
|
+
|
|
16418
|
+
def from_map(self, m: dict = None):
|
|
16419
|
+
m = m or dict()
|
|
16420
|
+
if m.get('configs') is not None:
|
|
16421
|
+
self.configs = m.get('configs')
|
|
16422
|
+
if m.get('count') is not None:
|
|
16423
|
+
self.count = m.get('count')
|
|
16424
|
+
if m.get('total') is not None:
|
|
16425
|
+
self.total = m.get('total')
|
|
16426
|
+
return self
|
|
16427
|
+
|
|
16428
|
+
|
|
16429
|
+
class ListLogtailPipelineConfigResponse(TeaModel):
|
|
16430
|
+
def __init__(
|
|
16431
|
+
self,
|
|
16432
|
+
headers: Dict[str, str] = None,
|
|
16433
|
+
status_code: int = None,
|
|
16434
|
+
body: ListLogtailPipelineConfigResponseBody = None,
|
|
16435
|
+
):
|
|
16436
|
+
self.headers = headers
|
|
16437
|
+
self.status_code = status_code
|
|
16438
|
+
self.body = body
|
|
16439
|
+
|
|
16440
|
+
def validate(self):
|
|
16441
|
+
if self.body:
|
|
16442
|
+
self.body.validate()
|
|
16443
|
+
|
|
16444
|
+
def to_map(self):
|
|
16445
|
+
_map = super().to_map()
|
|
16446
|
+
if _map is not None:
|
|
16447
|
+
return _map
|
|
16448
|
+
|
|
16449
|
+
result = dict()
|
|
16450
|
+
if self.headers is not None:
|
|
16451
|
+
result['headers'] = self.headers
|
|
16452
|
+
if self.status_code is not None:
|
|
16453
|
+
result['statusCode'] = self.status_code
|
|
16454
|
+
if self.body is not None:
|
|
16455
|
+
result['body'] = self.body.to_map()
|
|
16456
|
+
return result
|
|
16457
|
+
|
|
16458
|
+
def from_map(self, m: dict = None):
|
|
16459
|
+
m = m or dict()
|
|
16460
|
+
if m.get('headers') is not None:
|
|
16461
|
+
self.headers = m.get('headers')
|
|
16462
|
+
if m.get('statusCode') is not None:
|
|
16463
|
+
self.status_code = m.get('statusCode')
|
|
16464
|
+
if m.get('body') is not None:
|
|
16465
|
+
temp_model = ListLogtailPipelineConfigResponseBody()
|
|
16466
|
+
self.body = temp_model.from_map(m['body'])
|
|
16467
|
+
return self
|
|
16468
|
+
|
|
16469
|
+
|
|
16470
|
+
class ListMachineGroupRequest(TeaModel):
|
|
16471
|
+
def __init__(
|
|
16472
|
+
self,
|
|
16473
|
+
group_name: str = None,
|
|
16474
|
+
offset: int = None,
|
|
16475
|
+
size: int = None,
|
|
16476
|
+
):
|
|
16477
|
+
# The name of the machine group. This parameter is used to filter machine groups. Partial match is supported.
|
|
16478
|
+
self.group_name = group_name
|
|
16479
|
+
# The line from which the query starts. Default value: 0.
|
|
16480
|
+
self.offset = offset
|
|
16481
|
+
# The number of entries per page. Maximum value: 500.
|
|
16482
|
+
self.size = size
|
|
16483
|
+
|
|
16484
|
+
def validate(self):
|
|
16485
|
+
pass
|
|
16486
|
+
|
|
16487
|
+
def to_map(self):
|
|
16488
|
+
_map = super().to_map()
|
|
16489
|
+
if _map is not None:
|
|
16490
|
+
return _map
|
|
16491
|
+
|
|
16492
|
+
result = dict()
|
|
16493
|
+
if self.group_name is not None:
|
|
16494
|
+
result['groupName'] = self.group_name
|
|
16495
|
+
if self.offset is not None:
|
|
16496
|
+
result['offset'] = self.offset
|
|
16497
|
+
if self.size is not None:
|
|
16498
|
+
result['size'] = self.size
|
|
16499
|
+
return result
|
|
16500
|
+
|
|
16501
|
+
def from_map(self, m: dict = None):
|
|
16502
|
+
m = m or dict()
|
|
16503
|
+
if m.get('groupName') is not None:
|
|
16504
|
+
self.group_name = m.get('groupName')
|
|
16505
|
+
if m.get('offset') is not None:
|
|
16506
|
+
self.offset = m.get('offset')
|
|
16507
|
+
if m.get('size') is not None:
|
|
16508
|
+
self.size = m.get('size')
|
|
16509
|
+
return self
|
|
16510
|
+
|
|
16511
|
+
|
|
16512
|
+
class ListMachineGroupResponseBody(TeaModel):
|
|
16513
|
+
def __init__(
|
|
16514
|
+
self,
|
|
16515
|
+
count: int = None,
|
|
16516
|
+
machinegroups: List[str] = None,
|
|
16517
|
+
total: int = None,
|
|
16518
|
+
):
|
|
16519
|
+
# The number of machine groups that are returned on the current page.
|
|
16520
|
+
self.count = count
|
|
16521
|
+
# The machine groups that meet the query conditions.
|
|
16522
|
+
self.machinegroups = machinegroups
|
|
16523
|
+
# The total number of machine groups that meet the query conditions.
|
|
16524
|
+
self.total = total
|
|
16525
|
+
|
|
16526
|
+
def validate(self):
|
|
16527
|
+
pass
|
|
16528
|
+
|
|
16529
|
+
def to_map(self):
|
|
16530
|
+
_map = super().to_map()
|
|
16531
|
+
if _map is not None:
|
|
16532
|
+
return _map
|
|
16533
|
+
|
|
16534
|
+
result = dict()
|
|
15215
16535
|
if self.count is not None:
|
|
15216
16536
|
result['count'] = self.count
|
|
16537
|
+
if self.machinegroups is not None:
|
|
16538
|
+
result['machinegroups'] = self.machinegroups
|
|
15217
16539
|
if self.total is not None:
|
|
15218
16540
|
result['total'] = self.total
|
|
15219
16541
|
return result
|
|
15220
16542
|
|
|
15221
16543
|
def from_map(self, m: dict = None):
|
|
15222
16544
|
m = m or dict()
|
|
15223
|
-
if m.get('configs') is not None:
|
|
15224
|
-
self.configs = m.get('configs')
|
|
15225
16545
|
if m.get('count') is not None:
|
|
15226
16546
|
self.count = m.get('count')
|
|
16547
|
+
if m.get('machinegroups') is not None:
|
|
16548
|
+
self.machinegroups = m.get('machinegroups')
|
|
15227
16549
|
if m.get('total') is not None:
|
|
15228
16550
|
self.total = m.get('total')
|
|
15229
16551
|
return self
|
|
15230
16552
|
|
|
15231
16553
|
|
|
15232
|
-
class
|
|
16554
|
+
class ListMachineGroupResponse(TeaModel):
|
|
15233
16555
|
def __init__(
|
|
15234
16556
|
self,
|
|
15235
16557
|
headers: Dict[str, str] = None,
|
|
15236
16558
|
status_code: int = None,
|
|
15237
|
-
body:
|
|
16559
|
+
body: ListMachineGroupResponseBody = None,
|
|
15238
16560
|
):
|
|
15239
16561
|
self.headers = headers
|
|
15240
16562
|
self.status_code = status_code
|
|
@@ -15265,23 +16587,20 @@ class ListLogtailPipelineConfigResponse(TeaModel):
|
|
|
15265
16587
|
if m.get('statusCode') is not None:
|
|
15266
16588
|
self.status_code = m.get('statusCode')
|
|
15267
16589
|
if m.get('body') is not None:
|
|
15268
|
-
temp_model =
|
|
16590
|
+
temp_model = ListMachineGroupResponseBody()
|
|
15269
16591
|
self.body = temp_model.from_map(m['body'])
|
|
15270
16592
|
return self
|
|
15271
16593
|
|
|
15272
16594
|
|
|
15273
|
-
class
|
|
16595
|
+
class ListMachinesRequest(TeaModel):
|
|
15274
16596
|
def __init__(
|
|
15275
16597
|
self,
|
|
15276
|
-
group_name: str = None,
|
|
15277
16598
|
offset: int = None,
|
|
15278
16599
|
size: int = None,
|
|
15279
16600
|
):
|
|
15280
|
-
# The name of the machine group. This parameter is used to filter machine groups. Partial match is supported.
|
|
15281
|
-
self.group_name = group_name
|
|
15282
16601
|
# The line from which the query starts. Default value: 0.
|
|
15283
16602
|
self.offset = offset
|
|
15284
|
-
# The number of entries per page. Maximum value: 500.
|
|
16603
|
+
# The number of entries per page. Default value: 100. Maximum value: 500.
|
|
15285
16604
|
self.size = size
|
|
15286
16605
|
|
|
15287
16606
|
def validate(self):
|
|
@@ -15293,8 +16612,6 @@ class ListMachineGroupRequest(TeaModel):
|
|
|
15293
16612
|
return _map
|
|
15294
16613
|
|
|
15295
16614
|
result = dict()
|
|
15296
|
-
if self.group_name is not None:
|
|
15297
|
-
result['groupName'] = self.group_name
|
|
15298
16615
|
if self.offset is not None:
|
|
15299
16616
|
result['offset'] = self.offset
|
|
15300
16617
|
if self.size is not None:
|
|
@@ -15303,8 +16620,6 @@ class ListMachineGroupRequest(TeaModel):
|
|
|
15303
16620
|
|
|
15304
16621
|
def from_map(self, m: dict = None):
|
|
15305
16622
|
m = m or dict()
|
|
15306
|
-
if m.get('groupName') is not None:
|
|
15307
|
-
self.group_name = m.get('groupName')
|
|
15308
16623
|
if m.get('offset') is not None:
|
|
15309
16624
|
self.offset = m.get('offset')
|
|
15310
16625
|
if m.get('size') is not None:
|
|
@@ -15312,22 +16627,25 @@ class ListMachineGroupRequest(TeaModel):
|
|
|
15312
16627
|
return self
|
|
15313
16628
|
|
|
15314
16629
|
|
|
15315
|
-
class
|
|
16630
|
+
class ListMachinesResponseBody(TeaModel):
|
|
15316
16631
|
def __init__(
|
|
15317
16632
|
self,
|
|
15318
16633
|
count: int = None,
|
|
15319
|
-
|
|
16634
|
+
machines: List[Machine] = None,
|
|
15320
16635
|
total: int = None,
|
|
15321
16636
|
):
|
|
15322
|
-
# The number of
|
|
16637
|
+
# The number of machines that are returned on the current page.
|
|
15323
16638
|
self.count = count
|
|
15324
|
-
# The
|
|
15325
|
-
self.
|
|
15326
|
-
# The total number of
|
|
16639
|
+
# The machines that are returned.
|
|
16640
|
+
self.machines = machines
|
|
16641
|
+
# The total number of machines.
|
|
15327
16642
|
self.total = total
|
|
15328
16643
|
|
|
15329
16644
|
def validate(self):
|
|
15330
|
-
|
|
16645
|
+
if self.machines:
|
|
16646
|
+
for k in self.machines:
|
|
16647
|
+
if k:
|
|
16648
|
+
k.validate()
|
|
15331
16649
|
|
|
15332
16650
|
def to_map(self):
|
|
15333
16651
|
_map = super().to_map()
|
|
@@ -15337,8 +16655,10 @@ class ListMachineGroupResponseBody(TeaModel):
|
|
|
15337
16655
|
result = dict()
|
|
15338
16656
|
if self.count is not None:
|
|
15339
16657
|
result['count'] = self.count
|
|
15340
|
-
|
|
15341
|
-
|
|
16658
|
+
result['machines'] = []
|
|
16659
|
+
if self.machines is not None:
|
|
16660
|
+
for k in self.machines:
|
|
16661
|
+
result['machines'].append(k.to_map() if k else None)
|
|
15342
16662
|
if self.total is not None:
|
|
15343
16663
|
result['total'] = self.total
|
|
15344
16664
|
return result
|
|
@@ -15347,19 +16667,22 @@ class ListMachineGroupResponseBody(TeaModel):
|
|
|
15347
16667
|
m = m or dict()
|
|
15348
16668
|
if m.get('count') is not None:
|
|
15349
16669
|
self.count = m.get('count')
|
|
15350
|
-
|
|
15351
|
-
|
|
16670
|
+
self.machines = []
|
|
16671
|
+
if m.get('machines') is not None:
|
|
16672
|
+
for k in m.get('machines'):
|
|
16673
|
+
temp_model = Machine()
|
|
16674
|
+
self.machines.append(temp_model.from_map(k))
|
|
15352
16675
|
if m.get('total') is not None:
|
|
15353
16676
|
self.total = m.get('total')
|
|
15354
16677
|
return self
|
|
15355
16678
|
|
|
15356
16679
|
|
|
15357
|
-
class
|
|
16680
|
+
class ListMachinesResponse(TeaModel):
|
|
15358
16681
|
def __init__(
|
|
15359
16682
|
self,
|
|
15360
16683
|
headers: Dict[str, str] = None,
|
|
15361
16684
|
status_code: int = None,
|
|
15362
|
-
body:
|
|
16685
|
+
body: ListMachinesResponseBody = None,
|
|
15363
16686
|
):
|
|
15364
16687
|
self.headers = headers
|
|
15365
16688
|
self.status_code = status_code
|
|
@@ -15390,20 +16713,21 @@ class ListMachineGroupResponse(TeaModel):
|
|
|
15390
16713
|
if m.get('statusCode') is not None:
|
|
15391
16714
|
self.status_code = m.get('statusCode')
|
|
15392
16715
|
if m.get('body') is not None:
|
|
15393
|
-
temp_model =
|
|
16716
|
+
temp_model = ListMachinesResponseBody()
|
|
15394
16717
|
self.body = temp_model.from_map(m['body'])
|
|
15395
16718
|
return self
|
|
15396
16719
|
|
|
15397
16720
|
|
|
15398
|
-
class
|
|
16721
|
+
class ListMaxComputeExportsRequest(TeaModel):
|
|
15399
16722
|
def __init__(
|
|
15400
16723
|
self,
|
|
16724
|
+
logstore: str = None,
|
|
15401
16725
|
offset: int = None,
|
|
15402
16726
|
size: int = None,
|
|
15403
16727
|
):
|
|
15404
|
-
|
|
16728
|
+
self.logstore = logstore
|
|
15405
16729
|
self.offset = offset
|
|
15406
|
-
# The number of entries
|
|
16730
|
+
# The number of entries to return. Default value: 10.
|
|
15407
16731
|
self.size = size
|
|
15408
16732
|
|
|
15409
16733
|
def validate(self):
|
|
@@ -15415,6 +16739,8 @@ class ListMachinesRequest(TeaModel):
|
|
|
15415
16739
|
return _map
|
|
15416
16740
|
|
|
15417
16741
|
result = dict()
|
|
16742
|
+
if self.logstore is not None:
|
|
16743
|
+
result['logstore'] = self.logstore
|
|
15418
16744
|
if self.offset is not None:
|
|
15419
16745
|
result['offset'] = self.offset
|
|
15420
16746
|
if self.size is not None:
|
|
@@ -15423,6 +16749,8 @@ class ListMachinesRequest(TeaModel):
|
|
|
15423
16749
|
|
|
15424
16750
|
def from_map(self, m: dict = None):
|
|
15425
16751
|
m = m or dict()
|
|
16752
|
+
if m.get('logstore') is not None:
|
|
16753
|
+
self.logstore = m.get('logstore')
|
|
15426
16754
|
if m.get('offset') is not None:
|
|
15427
16755
|
self.offset = m.get('offset')
|
|
15428
16756
|
if m.get('size') is not None:
|
|
@@ -15430,23 +16758,20 @@ class ListMachinesRequest(TeaModel):
|
|
|
15430
16758
|
return self
|
|
15431
16759
|
|
|
15432
16760
|
|
|
15433
|
-
class
|
|
16761
|
+
class ListMaxComputeExportsResponseBody(TeaModel):
|
|
15434
16762
|
def __init__(
|
|
15435
16763
|
self,
|
|
15436
16764
|
count: int = None,
|
|
15437
|
-
|
|
16765
|
+
results: List[MaxComputeExport] = None,
|
|
15438
16766
|
total: int = None,
|
|
15439
16767
|
):
|
|
15440
|
-
# The number of machines that are returned on the current page.
|
|
15441
16768
|
self.count = count
|
|
15442
|
-
|
|
15443
|
-
self.machines = machines
|
|
15444
|
-
# The total number of machines.
|
|
16769
|
+
self.results = results
|
|
15445
16770
|
self.total = total
|
|
15446
16771
|
|
|
15447
16772
|
def validate(self):
|
|
15448
|
-
if self.
|
|
15449
|
-
for k in self.
|
|
16773
|
+
if self.results:
|
|
16774
|
+
for k in self.results:
|
|
15450
16775
|
if k:
|
|
15451
16776
|
k.validate()
|
|
15452
16777
|
|
|
@@ -15458,10 +16783,10 @@ class ListMachinesResponseBody(TeaModel):
|
|
|
15458
16783
|
result = dict()
|
|
15459
16784
|
if self.count is not None:
|
|
15460
16785
|
result['count'] = self.count
|
|
15461
|
-
result['
|
|
15462
|
-
if self.
|
|
15463
|
-
for k in self.
|
|
15464
|
-
result['
|
|
16786
|
+
result['results'] = []
|
|
16787
|
+
if self.results is not None:
|
|
16788
|
+
for k in self.results:
|
|
16789
|
+
result['results'].append(k.to_map() if k else None)
|
|
15465
16790
|
if self.total is not None:
|
|
15466
16791
|
result['total'] = self.total
|
|
15467
16792
|
return result
|
|
@@ -15470,22 +16795,22 @@ class ListMachinesResponseBody(TeaModel):
|
|
|
15470
16795
|
m = m or dict()
|
|
15471
16796
|
if m.get('count') is not None:
|
|
15472
16797
|
self.count = m.get('count')
|
|
15473
|
-
self.
|
|
15474
|
-
if m.get('
|
|
15475
|
-
for k in m.get('
|
|
15476
|
-
temp_model =
|
|
15477
|
-
self.
|
|
16798
|
+
self.results = []
|
|
16799
|
+
if m.get('results') is not None:
|
|
16800
|
+
for k in m.get('results'):
|
|
16801
|
+
temp_model = MaxComputeExport()
|
|
16802
|
+
self.results.append(temp_model.from_map(k))
|
|
15478
16803
|
if m.get('total') is not None:
|
|
15479
16804
|
self.total = m.get('total')
|
|
15480
16805
|
return self
|
|
15481
16806
|
|
|
15482
16807
|
|
|
15483
|
-
class
|
|
16808
|
+
class ListMaxComputeExportsResponse(TeaModel):
|
|
15484
16809
|
def __init__(
|
|
15485
16810
|
self,
|
|
15486
16811
|
headers: Dict[str, str] = None,
|
|
15487
16812
|
status_code: int = None,
|
|
15488
|
-
body:
|
|
16813
|
+
body: ListMaxComputeExportsResponseBody = None,
|
|
15489
16814
|
):
|
|
15490
16815
|
self.headers = headers
|
|
15491
16816
|
self.status_code = status_code
|
|
@@ -15516,7 +16841,7 @@ class ListMachinesResponse(TeaModel):
|
|
|
15516
16841
|
if m.get('statusCode') is not None:
|
|
15517
16842
|
self.status_code = m.get('statusCode')
|
|
15518
16843
|
if m.get('body') is not None:
|
|
15519
|
-
temp_model =
|
|
16844
|
+
temp_model = ListMaxComputeExportsResponseBody()
|
|
15520
16845
|
self.body = temp_model.from_map(m['body'])
|
|
15521
16846
|
return self
|
|
15522
16847
|
|
|
@@ -15662,6 +16987,7 @@ class ListOSSExportsRequest(TeaModel):
|
|
|
15662
16987
|
):
|
|
15663
16988
|
self.logstore = logstore
|
|
15664
16989
|
self.offset = offset
|
|
16990
|
+
# The number of entries to return. Default value: 10.
|
|
15665
16991
|
self.size = size
|
|
15666
16992
|
|
|
15667
16993
|
def validate(self):
|
|
@@ -15789,6 +17115,7 @@ class ListOSSHDFSExportsRequest(TeaModel):
|
|
|
15789
17115
|
):
|
|
15790
17116
|
self.logstore = logstore
|
|
15791
17117
|
self.offset = offset
|
|
17118
|
+
# The number of entries to return. Default value: 10.
|
|
15792
17119
|
self.size = size
|
|
15793
17120
|
|
|
15794
17121
|
def validate(self):
|
|
@@ -16318,6 +17645,7 @@ class ListScheduledSQLsRequest(TeaModel):
|
|
|
16318
17645
|
# The name of the Logstore.
|
|
16319
17646
|
self.logstore = logstore
|
|
16320
17647
|
self.offset = offset
|
|
17648
|
+
# The number of entries to return. Default value: 10.
|
|
16321
17649
|
self.size = size
|
|
16322
17650
|
|
|
16323
17651
|
def validate(self):
|
|
@@ -16684,8 +18012,8 @@ class ListTagResourcesRequest(TeaModel):
|
|
|
16684
18012
|
# * project
|
|
16685
18013
|
# * logstore
|
|
16686
18014
|
# * dashboard
|
|
16687
|
-
# *
|
|
16688
|
-
# *
|
|
18015
|
+
# * machinegroup
|
|
18016
|
+
# * logtailconfig
|
|
16689
18017
|
#
|
|
16690
18018
|
# This parameter is required.
|
|
16691
18019
|
self.resource_type = resource_type
|
|
@@ -16744,8 +18072,8 @@ class ListTagResourcesShrinkRequest(TeaModel):
|
|
|
16744
18072
|
# * project
|
|
16745
18073
|
# * logstore
|
|
16746
18074
|
# * dashboard
|
|
16747
|
-
# *
|
|
16748
|
-
# *
|
|
18075
|
+
# * machinegroup
|
|
18076
|
+
# * logtailconfig
|
|
16749
18077
|
#
|
|
16750
18078
|
# This parameter is required.
|
|
16751
18079
|
self.resource_type = resource_type
|
|
@@ -17194,6 +18522,87 @@ class PutAnnotationDataResponse(TeaModel):
|
|
|
17194
18522
|
return self
|
|
17195
18523
|
|
|
17196
18524
|
|
|
18525
|
+
class PutIngestProcessorRequest(TeaModel):
|
|
18526
|
+
def __init__(
|
|
18527
|
+
self,
|
|
18528
|
+
configuration: IngestProcessorConfiguration = None,
|
|
18529
|
+
description: str = None,
|
|
18530
|
+
display_name: str = None,
|
|
18531
|
+
):
|
|
18532
|
+
# The configuration of the ingest processor.
|
|
18533
|
+
#
|
|
18534
|
+
# This parameter is required.
|
|
18535
|
+
self.configuration = configuration
|
|
18536
|
+
# The description of the ingest processor.
|
|
18537
|
+
self.description = description
|
|
18538
|
+
# The display name of the ingest processor.
|
|
18539
|
+
#
|
|
18540
|
+
# This parameter is required.
|
|
18541
|
+
self.display_name = display_name
|
|
18542
|
+
|
|
18543
|
+
def validate(self):
|
|
18544
|
+
if self.configuration:
|
|
18545
|
+
self.configuration.validate()
|
|
18546
|
+
|
|
18547
|
+
def to_map(self):
|
|
18548
|
+
_map = super().to_map()
|
|
18549
|
+
if _map is not None:
|
|
18550
|
+
return _map
|
|
18551
|
+
|
|
18552
|
+
result = dict()
|
|
18553
|
+
if self.configuration is not None:
|
|
18554
|
+
result['configuration'] = self.configuration.to_map()
|
|
18555
|
+
if self.description is not None:
|
|
18556
|
+
result['description'] = self.description
|
|
18557
|
+
if self.display_name is not None:
|
|
18558
|
+
result['displayName'] = self.display_name
|
|
18559
|
+
return result
|
|
18560
|
+
|
|
18561
|
+
def from_map(self, m: dict = None):
|
|
18562
|
+
m = m or dict()
|
|
18563
|
+
if m.get('configuration') is not None:
|
|
18564
|
+
temp_model = IngestProcessorConfiguration()
|
|
18565
|
+
self.configuration = temp_model.from_map(m['configuration'])
|
|
18566
|
+
if m.get('description') is not None:
|
|
18567
|
+
self.description = m.get('description')
|
|
18568
|
+
if m.get('displayName') is not None:
|
|
18569
|
+
self.display_name = m.get('displayName')
|
|
18570
|
+
return self
|
|
18571
|
+
|
|
18572
|
+
|
|
18573
|
+
class PutIngestProcessorResponse(TeaModel):
|
|
18574
|
+
def __init__(
|
|
18575
|
+
self,
|
|
18576
|
+
headers: Dict[str, str] = None,
|
|
18577
|
+
status_code: int = None,
|
|
18578
|
+
):
|
|
18579
|
+
self.headers = headers
|
|
18580
|
+
self.status_code = status_code
|
|
18581
|
+
|
|
18582
|
+
def validate(self):
|
|
18583
|
+
pass
|
|
18584
|
+
|
|
18585
|
+
def to_map(self):
|
|
18586
|
+
_map = super().to_map()
|
|
18587
|
+
if _map is not None:
|
|
18588
|
+
return _map
|
|
18589
|
+
|
|
18590
|
+
result = dict()
|
|
18591
|
+
if self.headers is not None:
|
|
18592
|
+
result['headers'] = self.headers
|
|
18593
|
+
if self.status_code is not None:
|
|
18594
|
+
result['statusCode'] = self.status_code
|
|
18595
|
+
return result
|
|
18596
|
+
|
|
18597
|
+
def from_map(self, m: dict = None):
|
|
18598
|
+
m = m or dict()
|
|
18599
|
+
if m.get('headers') is not None:
|
|
18600
|
+
self.headers = m.get('headers')
|
|
18601
|
+
if m.get('statusCode') is not None:
|
|
18602
|
+
self.status_code = m.get('statusCode')
|
|
18603
|
+
return self
|
|
18604
|
+
|
|
18605
|
+
|
|
17197
18606
|
class PutLogsHeaders(TeaModel):
|
|
17198
18607
|
def __init__(
|
|
17199
18608
|
self,
|
|
@@ -17509,6 +18918,9 @@ class RefreshTokenRequest(TeaModel):
|
|
|
17509
18918
|
access_token_expiration_time: int = None,
|
|
17510
18919
|
ticket: str = None,
|
|
17511
18920
|
):
|
|
18921
|
+
# * The validity period of the access token. Unit: seconds. Default value: 86400, which specifies one day. Valid values: 0 to 86400.
|
|
18922
|
+
# * The validity period of the access token is the smaller value between accessTokenExpirationTime and expirationTime.
|
|
18923
|
+
# * If you use a Security Token Service (STS) token to call this operation, the validity period of the access token is the smallest value among accessTokenExpirationTime, expirationTime, and the validity period of the STS token.
|
|
17512
18924
|
self.access_token_expiration_time = access_token_expiration_time
|
|
17513
18925
|
# The ticket that is used for logon-free access.
|
|
17514
18926
|
self.ticket = ticket
|
|
@@ -17673,22 +19085,97 @@ class SplitShardRequest(TeaModel):
|
|
|
17673
19085
|
return self
|
|
17674
19086
|
|
|
17675
19087
|
|
|
17676
|
-
class SplitShardResponse(TeaModel):
|
|
19088
|
+
class SplitShardResponse(TeaModel):
|
|
19089
|
+
def __init__(
|
|
19090
|
+
self,
|
|
19091
|
+
headers: Dict[str, str] = None,
|
|
19092
|
+
status_code: int = None,
|
|
19093
|
+
body: List[Shard] = None,
|
|
19094
|
+
):
|
|
19095
|
+
self.headers = headers
|
|
19096
|
+
self.status_code = status_code
|
|
19097
|
+
self.body = body
|
|
19098
|
+
|
|
19099
|
+
def validate(self):
|
|
19100
|
+
if self.body:
|
|
19101
|
+
for k in self.body:
|
|
19102
|
+
if k:
|
|
19103
|
+
k.validate()
|
|
19104
|
+
|
|
19105
|
+
def to_map(self):
|
|
19106
|
+
_map = super().to_map()
|
|
19107
|
+
if _map is not None:
|
|
19108
|
+
return _map
|
|
19109
|
+
|
|
19110
|
+
result = dict()
|
|
19111
|
+
if self.headers is not None:
|
|
19112
|
+
result['headers'] = self.headers
|
|
19113
|
+
if self.status_code is not None:
|
|
19114
|
+
result['statusCode'] = self.status_code
|
|
19115
|
+
result['body'] = []
|
|
19116
|
+
if self.body is not None:
|
|
19117
|
+
for k in self.body:
|
|
19118
|
+
result['body'].append(k.to_map() if k else None)
|
|
19119
|
+
return result
|
|
19120
|
+
|
|
19121
|
+
def from_map(self, m: dict = None):
|
|
19122
|
+
m = m or dict()
|
|
19123
|
+
if m.get('headers') is not None:
|
|
19124
|
+
self.headers = m.get('headers')
|
|
19125
|
+
if m.get('statusCode') is not None:
|
|
19126
|
+
self.status_code = m.get('statusCode')
|
|
19127
|
+
self.body = []
|
|
19128
|
+
if m.get('body') is not None:
|
|
19129
|
+
for k in m.get('body'):
|
|
19130
|
+
temp_model = Shard()
|
|
19131
|
+
self.body.append(temp_model.from_map(k))
|
|
19132
|
+
return self
|
|
19133
|
+
|
|
19134
|
+
|
|
19135
|
+
class StartETLResponse(TeaModel):
|
|
19136
|
+
def __init__(
|
|
19137
|
+
self,
|
|
19138
|
+
headers: Dict[str, str] = None,
|
|
19139
|
+
status_code: int = None,
|
|
19140
|
+
):
|
|
19141
|
+
self.headers = headers
|
|
19142
|
+
self.status_code = status_code
|
|
19143
|
+
|
|
19144
|
+
def validate(self):
|
|
19145
|
+
pass
|
|
19146
|
+
|
|
19147
|
+
def to_map(self):
|
|
19148
|
+
_map = super().to_map()
|
|
19149
|
+
if _map is not None:
|
|
19150
|
+
return _map
|
|
19151
|
+
|
|
19152
|
+
result = dict()
|
|
19153
|
+
if self.headers is not None:
|
|
19154
|
+
result['headers'] = self.headers
|
|
19155
|
+
if self.status_code is not None:
|
|
19156
|
+
result['statusCode'] = self.status_code
|
|
19157
|
+
return result
|
|
19158
|
+
|
|
19159
|
+
def from_map(self, m: dict = None):
|
|
19160
|
+
m = m or dict()
|
|
19161
|
+
if m.get('headers') is not None:
|
|
19162
|
+
self.headers = m.get('headers')
|
|
19163
|
+
if m.get('statusCode') is not None:
|
|
19164
|
+
self.status_code = m.get('statusCode')
|
|
19165
|
+
return self
|
|
19166
|
+
|
|
19167
|
+
|
|
19168
|
+
class StartMaxComputeExportResponse(TeaModel):
|
|
17677
19169
|
def __init__(
|
|
17678
19170
|
self,
|
|
17679
19171
|
headers: Dict[str, str] = None,
|
|
17680
19172
|
status_code: int = None,
|
|
17681
|
-
body: List[Shard] = None,
|
|
17682
19173
|
):
|
|
17683
19174
|
self.headers = headers
|
|
17684
19175
|
self.status_code = status_code
|
|
17685
|
-
self.body = body
|
|
17686
19176
|
|
|
17687
19177
|
def validate(self):
|
|
17688
|
-
|
|
17689
|
-
for k in self.body:
|
|
17690
|
-
if k:
|
|
17691
|
-
k.validate()
|
|
19178
|
+
pass
|
|
17692
19179
|
|
|
17693
19180
|
def to_map(self):
|
|
17694
19181
|
_map = super().to_map()
|
|
@@ -17700,10 +19187,6 @@ class SplitShardResponse(TeaModel):
|
|
|
17700
19187
|
result['headers'] = self.headers
|
|
17701
19188
|
if self.status_code is not None:
|
|
17702
19189
|
result['statusCode'] = self.status_code
|
|
17703
|
-
result['body'] = []
|
|
17704
|
-
if self.body is not None:
|
|
17705
|
-
for k in self.body:
|
|
17706
|
-
result['body'].append(k.to_map() if k else None)
|
|
17707
19190
|
return result
|
|
17708
19191
|
|
|
17709
19192
|
def from_map(self, m: dict = None):
|
|
@@ -17712,15 +19195,10 @@ class SplitShardResponse(TeaModel):
|
|
|
17712
19195
|
self.headers = m.get('headers')
|
|
17713
19196
|
if m.get('statusCode') is not None:
|
|
17714
19197
|
self.status_code = m.get('statusCode')
|
|
17715
|
-
self.body = []
|
|
17716
|
-
if m.get('body') is not None:
|
|
17717
|
-
for k in m.get('body'):
|
|
17718
|
-
temp_model = Shard()
|
|
17719
|
-
self.body.append(temp_model.from_map(k))
|
|
17720
19198
|
return self
|
|
17721
19199
|
|
|
17722
19200
|
|
|
17723
|
-
class
|
|
19201
|
+
class StartOSSExportResponse(TeaModel):
|
|
17724
19202
|
def __init__(
|
|
17725
19203
|
self,
|
|
17726
19204
|
headers: Dict[str, str] = None,
|
|
@@ -17753,7 +19231,7 @@ class StartETLResponse(TeaModel):
|
|
|
17753
19231
|
return self
|
|
17754
19232
|
|
|
17755
19233
|
|
|
17756
|
-
class
|
|
19234
|
+
class StartOSSHDFSExportResponse(TeaModel):
|
|
17757
19235
|
def __init__(
|
|
17758
19236
|
self,
|
|
17759
19237
|
headers: Dict[str, str] = None,
|
|
@@ -17786,7 +19264,7 @@ class StartOSSExportResponse(TeaModel):
|
|
|
17786
19264
|
return self
|
|
17787
19265
|
|
|
17788
19266
|
|
|
17789
|
-
class
|
|
19267
|
+
class StartOSSIngestionResponse(TeaModel):
|
|
17790
19268
|
def __init__(
|
|
17791
19269
|
self,
|
|
17792
19270
|
headers: Dict[str, str] = None,
|
|
@@ -17819,7 +19297,7 @@ class StartOSSHDFSExportResponse(TeaModel):
|
|
|
17819
19297
|
return self
|
|
17820
19298
|
|
|
17821
19299
|
|
|
17822
|
-
class
|
|
19300
|
+
class StopETLResponse(TeaModel):
|
|
17823
19301
|
def __init__(
|
|
17824
19302
|
self,
|
|
17825
19303
|
headers: Dict[str, str] = None,
|
|
@@ -17852,7 +19330,7 @@ class StartOSSIngestionResponse(TeaModel):
|
|
|
17852
19330
|
return self
|
|
17853
19331
|
|
|
17854
19332
|
|
|
17855
|
-
class
|
|
19333
|
+
class StopMaxComputeExportResponse(TeaModel):
|
|
17856
19334
|
def __init__(
|
|
17857
19335
|
self,
|
|
17858
19336
|
headers: Dict[str, str] = None,
|
|
@@ -18585,7 +20063,7 @@ class UpdateConsumerGroupRequest(TeaModel):
|
|
|
18585
20063
|
# * true: If a shard is split, the data in the original shard is consumed first. Then, the data in the new shards is consumed at the same time. If shards are merged, the data in the original shards is consumed first. Then, the data in the new shard is consumed.
|
|
18586
20064
|
# * false: The data in all shards is consumed at the same time. If a new shard is generated after a shard is split or shards are merged, the data in the new shard is immediately consumed.
|
|
18587
20065
|
self.order = order
|
|
18588
|
-
# The timeout period. If Simple Log Service does not receive heartbeats from a consumer within the timeout period, Simple Log Service deletes the consumer. Unit: seconds
|
|
20066
|
+
# The timeout period. If Simple Log Service does not receive heartbeats from a consumer within the timeout period, Simple Log Service deletes the consumer. Unit: seconds
|
|
18589
20067
|
self.timeout = timeout
|
|
18590
20068
|
|
|
18591
20069
|
def validate(self):
|
|
@@ -19057,8 +20535,11 @@ class UpdateLogStoreEncryptionRequestUserCmkInfo(TeaModel):
|
|
|
19057
20535
|
region_id: str = None,
|
|
19058
20536
|
role_arn: str = None,
|
|
19059
20537
|
):
|
|
20538
|
+
# The ID of the CMK to which the BYOK key belongs. You can create a CMK in KMS. The CMK must be in the same region as the endpoint of Simple Log Service.
|
|
19060
20539
|
self.key_id = key_id
|
|
20540
|
+
# The region ID. Example: cn-hangzhou.
|
|
19061
20541
|
self.region_id = region_id
|
|
20542
|
+
# The Alibaba Cloud Resource Name (ARN) of the Resource Access Management (RAM) role.The value is in the acs:ram::12344\\*\\*\\*:role/xxxxx format. To use a BYOK key to encrypt logs, you must create a RAM role and grant the AliyunKMSReadOnlyAccess and AliyunKMSCryptoUserAccess permissions to the RAM role. You must grant the API caller the PassRole permission on the RAM role.
|
|
19062
20543
|
self.role_arn = role_arn
|
|
19063
20544
|
|
|
19064
20545
|
def validate(self):
|
|
@@ -19096,9 +20577,13 @@ class UpdateLogStoreEncryptionRequest(TeaModel):
|
|
|
19096
20577
|
encrypt_type: str = None,
|
|
19097
20578
|
user_cmk_info: UpdateLogStoreEncryptionRequestUserCmkInfo = None,
|
|
19098
20579
|
):
|
|
20580
|
+
# Specifies whether to enable the encryption feature. After you update the encryption configuration of the Logstore, you can modify only the enable parameter in subsequent update requests. You cannot modify the encryptType or userCmkInfo parameters.
|
|
20581
|
+
#
|
|
19099
20582
|
# This parameter is required.
|
|
19100
20583
|
self.enable = enable
|
|
20584
|
+
# The encryption algorithm. Valid values: default, m4, sm4_ecb, sm4_cbc, sm4_gcm, aes_ecb, aes_cbc, aes_cfb, aes_ofb, and aes_gcm.
|
|
19101
20585
|
self.encrypt_type = encrypt_type
|
|
20586
|
+
# Optional. If you use a BYOK key to encrypt logs, you must specify this parameter. If you use the service key of Simple Log Service to encrypt logs, you do not need to specify this parameter.
|
|
19102
20587
|
self.user_cmk_info = user_cmk_info
|
|
19103
20588
|
|
|
19104
20589
|
def validate(self):
|
|
@@ -19227,6 +20712,69 @@ class UpdateLogStoreMeteringModeResponse(TeaModel):
|
|
|
19227
20712
|
return self
|
|
19228
20713
|
|
|
19229
20714
|
|
|
20715
|
+
class UpdateLogStoreProcessorRequest(TeaModel):
|
|
20716
|
+
def __init__(
|
|
20717
|
+
self,
|
|
20718
|
+
processor_name: str = None,
|
|
20719
|
+
):
|
|
20720
|
+
# The identifier of the ingest processor.
|
|
20721
|
+
#
|
|
20722
|
+
# This parameter is required.
|
|
20723
|
+
self.processor_name = processor_name
|
|
20724
|
+
|
|
20725
|
+
def validate(self):
|
|
20726
|
+
pass
|
|
20727
|
+
|
|
20728
|
+
def to_map(self):
|
|
20729
|
+
_map = super().to_map()
|
|
20730
|
+
if _map is not None:
|
|
20731
|
+
return _map
|
|
20732
|
+
|
|
20733
|
+
result = dict()
|
|
20734
|
+
if self.processor_name is not None:
|
|
20735
|
+
result['processorName'] = self.processor_name
|
|
20736
|
+
return result
|
|
20737
|
+
|
|
20738
|
+
def from_map(self, m: dict = None):
|
|
20739
|
+
m = m or dict()
|
|
20740
|
+
if m.get('processorName') is not None:
|
|
20741
|
+
self.processor_name = m.get('processorName')
|
|
20742
|
+
return self
|
|
20743
|
+
|
|
20744
|
+
|
|
20745
|
+
class UpdateLogStoreProcessorResponse(TeaModel):
|
|
20746
|
+
def __init__(
|
|
20747
|
+
self,
|
|
20748
|
+
headers: Dict[str, str] = None,
|
|
20749
|
+
status_code: int = None,
|
|
20750
|
+
):
|
|
20751
|
+
self.headers = headers
|
|
20752
|
+
self.status_code = status_code
|
|
20753
|
+
|
|
20754
|
+
def validate(self):
|
|
20755
|
+
pass
|
|
20756
|
+
|
|
20757
|
+
def to_map(self):
|
|
20758
|
+
_map = super().to_map()
|
|
20759
|
+
if _map is not None:
|
|
20760
|
+
return _map
|
|
20761
|
+
|
|
20762
|
+
result = dict()
|
|
20763
|
+
if self.headers is not None:
|
|
20764
|
+
result['headers'] = self.headers
|
|
20765
|
+
if self.status_code is not None:
|
|
20766
|
+
result['statusCode'] = self.status_code
|
|
20767
|
+
return result
|
|
20768
|
+
|
|
20769
|
+
def from_map(self, m: dict = None):
|
|
20770
|
+
m = m or dict()
|
|
20771
|
+
if m.get('headers') is not None:
|
|
20772
|
+
self.headers = m.get('headers')
|
|
20773
|
+
if m.get('statusCode') is not None:
|
|
20774
|
+
self.status_code = m.get('statusCode')
|
|
20775
|
+
return self
|
|
20776
|
+
|
|
20777
|
+
|
|
19230
20778
|
class UpdateLoggingRequestLoggingDetails(TeaModel):
|
|
19231
20779
|
def __init__(
|
|
19232
20780
|
self,
|
|
@@ -19378,15 +20926,11 @@ class UpdateLogtailPipelineConfigRequest(TeaModel):
|
|
|
19378
20926
|
self.config_name = config_name
|
|
19379
20927
|
# The output plug-ins.
|
|
19380
20928
|
#
|
|
19381
|
-
# > You can
|
|
20929
|
+
# > You can configure only one output plug-in.
|
|
19382
20930
|
#
|
|
19383
20931
|
# This parameter is required.
|
|
19384
20932
|
self.flushers = flushers
|
|
19385
20933
|
# The global settings.
|
|
19386
|
-
#
|
|
19387
|
-
# **\
|
|
19388
|
-
#
|
|
19389
|
-
# ****\
|
|
19390
20934
|
self.global_ = global_
|
|
19391
20935
|
# The input plug-ins.
|
|
19392
20936
|
#
|
|
@@ -19398,13 +20942,13 @@ class UpdateLogtailPipelineConfigRequest(TeaModel):
|
|
|
19398
20942
|
self.log_sample = log_sample
|
|
19399
20943
|
# The processing plug-ins.
|
|
19400
20944
|
#
|
|
19401
|
-
# > Logtail
|
|
20945
|
+
# > Logtail plug-ins for data processing are classified into native plug-ins and extended plug-ins. For more information, see [Overview of Logtail plug-ins for data processing](https://help.aliyun.com/document_detail/64957.html).
|
|
19402
20946
|
#
|
|
19403
20947
|
# >
|
|
19404
20948
|
#
|
|
19405
20949
|
# * You can use native plug-ins only to collect text logs.
|
|
19406
20950
|
#
|
|
19407
|
-
# * You cannot add native plug-ins and extended plug-ins at
|
|
20951
|
+
# * You cannot add native plug-ins and extended plug-ins at a time.
|
|
19408
20952
|
#
|
|
19409
20953
|
# * When you add native plug-ins, take note of the following items:
|
|
19410
20954
|
#
|
|
@@ -19697,16 +21241,101 @@ class UpdateMachineGroupMachineResponse(TeaModel):
|
|
|
19697
21241
|
return self
|
|
19698
21242
|
|
|
19699
21243
|
|
|
21244
|
+
class UpdateMaxComputeExportRequest(TeaModel):
|
|
21245
|
+
def __init__(
|
|
21246
|
+
self,
|
|
21247
|
+
configuration: MaxComputeExportConfiguration = None,
|
|
21248
|
+
description: str = None,
|
|
21249
|
+
display_name: str = None,
|
|
21250
|
+
):
|
|
21251
|
+
# The setting of the MaxCompute data shipping job.
|
|
21252
|
+
#
|
|
21253
|
+
# This parameter is required.
|
|
21254
|
+
self.configuration = configuration
|
|
21255
|
+
# The description of the MaxCompute data shipping job.
|
|
21256
|
+
self.description = description
|
|
21257
|
+
# The display name of the MaxCompute data shipping job.
|
|
21258
|
+
#
|
|
21259
|
+
# This parameter is required.
|
|
21260
|
+
self.display_name = display_name
|
|
21261
|
+
|
|
21262
|
+
def validate(self):
|
|
21263
|
+
if self.configuration:
|
|
21264
|
+
self.configuration.validate()
|
|
21265
|
+
|
|
21266
|
+
def to_map(self):
|
|
21267
|
+
_map = super().to_map()
|
|
21268
|
+
if _map is not None:
|
|
21269
|
+
return _map
|
|
21270
|
+
|
|
21271
|
+
result = dict()
|
|
21272
|
+
if self.configuration is not None:
|
|
21273
|
+
result['configuration'] = self.configuration.to_map()
|
|
21274
|
+
if self.description is not None:
|
|
21275
|
+
result['description'] = self.description
|
|
21276
|
+
if self.display_name is not None:
|
|
21277
|
+
result['displayName'] = self.display_name
|
|
21278
|
+
return result
|
|
21279
|
+
|
|
21280
|
+
def from_map(self, m: dict = None):
|
|
21281
|
+
m = m or dict()
|
|
21282
|
+
if m.get('configuration') is not None:
|
|
21283
|
+
temp_model = MaxComputeExportConfiguration()
|
|
21284
|
+
self.configuration = temp_model.from_map(m['configuration'])
|
|
21285
|
+
if m.get('description') is not None:
|
|
21286
|
+
self.description = m.get('description')
|
|
21287
|
+
if m.get('displayName') is not None:
|
|
21288
|
+
self.display_name = m.get('displayName')
|
|
21289
|
+
return self
|
|
21290
|
+
|
|
21291
|
+
|
|
21292
|
+
class UpdateMaxComputeExportResponse(TeaModel):
|
|
21293
|
+
def __init__(
|
|
21294
|
+
self,
|
|
21295
|
+
headers: Dict[str, str] = None,
|
|
21296
|
+
status_code: int = None,
|
|
21297
|
+
):
|
|
21298
|
+
self.headers = headers
|
|
21299
|
+
self.status_code = status_code
|
|
21300
|
+
|
|
21301
|
+
def validate(self):
|
|
21302
|
+
pass
|
|
21303
|
+
|
|
21304
|
+
def to_map(self):
|
|
21305
|
+
_map = super().to_map()
|
|
21306
|
+
if _map is not None:
|
|
21307
|
+
return _map
|
|
21308
|
+
|
|
21309
|
+
result = dict()
|
|
21310
|
+
if self.headers is not None:
|
|
21311
|
+
result['headers'] = self.headers
|
|
21312
|
+
if self.status_code is not None:
|
|
21313
|
+
result['statusCode'] = self.status_code
|
|
21314
|
+
return result
|
|
21315
|
+
|
|
21316
|
+
def from_map(self, m: dict = None):
|
|
21317
|
+
m = m or dict()
|
|
21318
|
+
if m.get('headers') is not None:
|
|
21319
|
+
self.headers = m.get('headers')
|
|
21320
|
+
if m.get('statusCode') is not None:
|
|
21321
|
+
self.status_code = m.get('statusCode')
|
|
21322
|
+
return self
|
|
21323
|
+
|
|
21324
|
+
|
|
19700
21325
|
class UpdateMetricStoreRequest(TeaModel):
|
|
19701
21326
|
def __init__(
|
|
19702
21327
|
self,
|
|
19703
21328
|
auto_split: bool = None,
|
|
21329
|
+
hot_ttl: int = None,
|
|
21330
|
+
infrequent_access_ttl: int = None,
|
|
19704
21331
|
max_split_shard: int = None,
|
|
19705
21332
|
mode: str = None,
|
|
19706
21333
|
ttl: int = None,
|
|
19707
21334
|
):
|
|
19708
21335
|
# Specifies whether to enable automatic sharding.
|
|
19709
21336
|
self.auto_split = auto_split
|
|
21337
|
+
self.hot_ttl = hot_ttl
|
|
21338
|
+
self.infrequent_access_ttl = infrequent_access_ttl
|
|
19710
21339
|
# The maximum number of shards into which existing shards can be automatically split. This parameter is valid only when you set the autoSplit parameter to true.
|
|
19711
21340
|
self.max_split_shard = max_split_shard
|
|
19712
21341
|
# The type of the Metricstore.
|
|
@@ -19725,6 +21354,10 @@ class UpdateMetricStoreRequest(TeaModel):
|
|
|
19725
21354
|
result = dict()
|
|
19726
21355
|
if self.auto_split is not None:
|
|
19727
21356
|
result['autoSplit'] = self.auto_split
|
|
21357
|
+
if self.hot_ttl is not None:
|
|
21358
|
+
result['hot_ttl'] = self.hot_ttl
|
|
21359
|
+
if self.infrequent_access_ttl is not None:
|
|
21360
|
+
result['infrequentAccessTTL'] = self.infrequent_access_ttl
|
|
19728
21361
|
if self.max_split_shard is not None:
|
|
19729
21362
|
result['maxSplitShard'] = self.max_split_shard
|
|
19730
21363
|
if self.mode is not None:
|
|
@@ -19737,6 +21370,10 @@ class UpdateMetricStoreRequest(TeaModel):
|
|
|
19737
21370
|
m = m or dict()
|
|
19738
21371
|
if m.get('autoSplit') is not None:
|
|
19739
21372
|
self.auto_split = m.get('autoSplit')
|
|
21373
|
+
if m.get('hot_ttl') is not None:
|
|
21374
|
+
self.hot_ttl = m.get('hot_ttl')
|
|
21375
|
+
if m.get('infrequentAccessTTL') is not None:
|
|
21376
|
+
self.infrequent_access_ttl = m.get('infrequentAccessTTL')
|
|
19740
21377
|
if m.get('maxSplitShard') is not None:
|
|
19741
21378
|
self.max_split_shard = m.get('maxSplitShard')
|
|
19742
21379
|
if m.get('mode') is not None:
|
|
@@ -19840,6 +21477,69 @@ class UpdateMetricStoreMeteringModeResponse(TeaModel):
|
|
|
19840
21477
|
return self
|
|
19841
21478
|
|
|
19842
21479
|
|
|
21480
|
+
class UpdateMetricStoreProcessorRequest(TeaModel):
|
|
21481
|
+
def __init__(
|
|
21482
|
+
self,
|
|
21483
|
+
processor_name: str = None,
|
|
21484
|
+
):
|
|
21485
|
+
# The identifier of the ingest processor.
|
|
21486
|
+
#
|
|
21487
|
+
# This parameter is required.
|
|
21488
|
+
self.processor_name = processor_name
|
|
21489
|
+
|
|
21490
|
+
def validate(self):
|
|
21491
|
+
pass
|
|
21492
|
+
|
|
21493
|
+
def to_map(self):
|
|
21494
|
+
_map = super().to_map()
|
|
21495
|
+
if _map is not None:
|
|
21496
|
+
return _map
|
|
21497
|
+
|
|
21498
|
+
result = dict()
|
|
21499
|
+
if self.processor_name is not None:
|
|
21500
|
+
result['processorName'] = self.processor_name
|
|
21501
|
+
return result
|
|
21502
|
+
|
|
21503
|
+
def from_map(self, m: dict = None):
|
|
21504
|
+
m = m or dict()
|
|
21505
|
+
if m.get('processorName') is not None:
|
|
21506
|
+
self.processor_name = m.get('processorName')
|
|
21507
|
+
return self
|
|
21508
|
+
|
|
21509
|
+
|
|
21510
|
+
class UpdateMetricStoreProcessorResponse(TeaModel):
|
|
21511
|
+
def __init__(
|
|
21512
|
+
self,
|
|
21513
|
+
headers: Dict[str, str] = None,
|
|
21514
|
+
status_code: int = None,
|
|
21515
|
+
):
|
|
21516
|
+
self.headers = headers
|
|
21517
|
+
self.status_code = status_code
|
|
21518
|
+
|
|
21519
|
+
def validate(self):
|
|
21520
|
+
pass
|
|
21521
|
+
|
|
21522
|
+
def to_map(self):
|
|
21523
|
+
_map = super().to_map()
|
|
21524
|
+
if _map is not None:
|
|
21525
|
+
return _map
|
|
21526
|
+
|
|
21527
|
+
result = dict()
|
|
21528
|
+
if self.headers is not None:
|
|
21529
|
+
result['headers'] = self.headers
|
|
21530
|
+
if self.status_code is not None:
|
|
21531
|
+
result['statusCode'] = self.status_code
|
|
21532
|
+
return result
|
|
21533
|
+
|
|
21534
|
+
def from_map(self, m: dict = None):
|
|
21535
|
+
m = m or dict()
|
|
21536
|
+
if m.get('headers') is not None:
|
|
21537
|
+
self.headers = m.get('headers')
|
|
21538
|
+
if m.get('statusCode') is not None:
|
|
21539
|
+
self.status_code = m.get('statusCode')
|
|
21540
|
+
return self
|
|
21541
|
+
|
|
21542
|
+
|
|
19843
21543
|
class UpdateOSSExportRequest(TeaModel):
|
|
19844
21544
|
def __init__(
|
|
19845
21545
|
self,
|
|
@@ -19925,10 +21625,14 @@ class UpdateOSSHDFSExportRequest(TeaModel):
|
|
|
19925
21625
|
display_name: str = None,
|
|
19926
21626
|
):
|
|
19927
21627
|
# The configuration details of the job.
|
|
21628
|
+
#
|
|
21629
|
+
# This parameter is required.
|
|
19928
21630
|
self.configuration = configuration
|
|
19929
21631
|
# The description of the job.
|
|
19930
21632
|
self.description = description
|
|
19931
21633
|
# The display name of the job.
|
|
21634
|
+
#
|
|
21635
|
+
# This parameter is required.
|
|
19932
21636
|
self.display_name = display_name
|
|
19933
21637
|
|
|
19934
21638
|
def validate(self):
|
|
@@ -20294,11 +21998,13 @@ class UpdateProjectRequest(TeaModel):
|
|
|
20294
21998
|
def __init__(
|
|
20295
21999
|
self,
|
|
20296
22000
|
description: str = None,
|
|
22001
|
+
recycle_bin_enabled: bool = None,
|
|
20297
22002
|
):
|
|
20298
22003
|
# The description of the project. The default value is an empty string.
|
|
20299
22004
|
#
|
|
20300
22005
|
# This parameter is required.
|
|
20301
22006
|
self.description = description
|
|
22007
|
+
self.recycle_bin_enabled = recycle_bin_enabled
|
|
20302
22008
|
|
|
20303
22009
|
def validate(self):
|
|
20304
22010
|
pass
|
|
@@ -20311,12 +22017,16 @@ class UpdateProjectRequest(TeaModel):
|
|
|
20311
22017
|
result = dict()
|
|
20312
22018
|
if self.description is not None:
|
|
20313
22019
|
result['description'] = self.description
|
|
22020
|
+
if self.recycle_bin_enabled is not None:
|
|
22021
|
+
result['recycleBinEnabled'] = self.recycle_bin_enabled
|
|
20314
22022
|
return result
|
|
20315
22023
|
|
|
20316
22024
|
def from_map(self, m: dict = None):
|
|
20317
22025
|
m = m or dict()
|
|
20318
22026
|
if m.get('description') is not None:
|
|
20319
22027
|
self.description = m.get('description')
|
|
22028
|
+
if m.get('recycleBinEnabled') is not None:
|
|
22029
|
+
self.recycle_bin_enabled = m.get('recycleBinEnabled')
|
|
20320
22030
|
return self
|
|
20321
22031
|
|
|
20322
22032
|
|
|
@@ -20889,9 +22599,13 @@ class UpsertCollectionPolicyRequestCentralizeConfig(TeaModel):
|
|
|
20889
22599
|
dest_region: str = None,
|
|
20890
22600
|
dest_ttl: int = None,
|
|
20891
22601
|
):
|
|
22602
|
+
# The destination logstore for centralized storage. Make sure that the region of the destination logstore is consistent with the region specified by destRegion and the destination logstore belongs to the destination project specified by destProject.
|
|
20892
22603
|
self.dest_logstore = dest_logstore
|
|
22604
|
+
# The destination project for centralized storage. Make sure that the region of the destination project is consistent with the region specified by destRegion.
|
|
20893
22605
|
self.dest_project = dest_project
|
|
22606
|
+
# The destination region for centralized storage.
|
|
20894
22607
|
self.dest_region = dest_region
|
|
22608
|
+
# The data retention period for centralized storage. Unit: days. This parameter takes effect only when you use an existing logstore for centralized storage.
|
|
20895
22609
|
self.dest_ttl = dest_ttl
|
|
20896
22610
|
|
|
20897
22611
|
def validate(self):
|
|
@@ -20931,6 +22645,7 @@ class UpsertCollectionPolicyRequestDataConfig(TeaModel):
|
|
|
20931
22645
|
self,
|
|
20932
22646
|
data_region: str = None,
|
|
20933
22647
|
):
|
|
22648
|
+
# The region for storing the global logs that are collected for the first time.
|
|
20934
22649
|
self.data_region = data_region
|
|
20935
22650
|
|
|
20936
22651
|
def validate(self):
|
|
@@ -20961,10 +22676,15 @@ class UpsertCollectionPolicyRequestPolicyConfig(TeaModel):
|
|
|
20961
22676
|
resource_mode: str = None,
|
|
20962
22677
|
resource_tags: Dict[str, Any] = None,
|
|
20963
22678
|
):
|
|
22679
|
+
# The IDs of the instances. This parameter takes effect only when resourceMode is set to instanceMode. Logs are collected only from instances that use the specified IDs.
|
|
20964
22680
|
self.instance_ids = instance_ids
|
|
22681
|
+
# The regions of the instances. This parameter takes effect only when resourceMode is set to attributeMode. Wildcard characters are supported. If you leave this parameter empty, region-based filtering is not performed. The system considers that all instances are matched. If you specify a value for this parameter, logs of instances that reside in the specified regions are collected. Logs are collected from an instance only if the resource tags and region of the instance match the specified conditions.
|
|
20965
22682
|
self.regions = regions
|
|
22683
|
+
# The resource collection mode. Valid values: all, attributeMode, and instanceMode. The value all specifies that logs of all instances within your account are collected to the default logstore. The value attributeMode specifies that logs are collected based on the regions of instances and resource tags. The value instanceMode specifies that logs are collected based on instance IDs.
|
|
22684
|
+
#
|
|
20966
22685
|
# This parameter is required.
|
|
20967
22686
|
self.resource_mode = resource_mode
|
|
22687
|
+
# The resource tags. This parameter takes effect only when resourceMode is set to attributeMode. If you leave this parameter empty, resource tag-based filtering is not performed. The system considers that all instances are matched. If you specify a value for this parameter, logs of instances that use the specified resource tags are collected. Logs are collected from an instance only if the resource tags and region of the instance match the specified conditions.
|
|
20968
22688
|
self.resource_tags = resource_tags
|
|
20969
22689
|
|
|
20970
22690
|
def validate(self):
|
|
@@ -21005,7 +22725,9 @@ class UpsertCollectionPolicyRequestResourceDirectory(TeaModel):
|
|
|
21005
22725
|
account_group_type: str = None,
|
|
21006
22726
|
members: List[str] = None,
|
|
21007
22727
|
):
|
|
22728
|
+
# The mode of the resource directory. Valid values: all and custom.
|
|
21008
22729
|
self.account_group_type = account_group_type
|
|
22730
|
+
# The members. If accountGroupType is set to custom, the members are returned.
|
|
21009
22731
|
self.members = members
|
|
21010
22732
|
|
|
21011
22733
|
def validate(self):
|
|
@@ -21045,19 +22767,37 @@ class UpsertCollectionPolicyRequest(TeaModel):
|
|
|
21045
22767
|
product_code: str = None,
|
|
21046
22768
|
resource_directory: UpsertCollectionPolicyRequestResourceDirectory = None,
|
|
21047
22769
|
):
|
|
22770
|
+
# The configurations of centralized storage.
|
|
21048
22771
|
self.centralize_config = centralize_config
|
|
22772
|
+
# Specifies whether to enable centralized storage. Default value: false.
|
|
21049
22773
|
self.centralize_enabled = centralize_enabled
|
|
22774
|
+
# The code of the log type.
|
|
22775
|
+
#
|
|
21050
22776
|
# This parameter is required.
|
|
21051
22777
|
self.data_code = data_code
|
|
22778
|
+
# The data configurations. The configuration is returned only for global logs. For example, if productCode is set to sls, the configuration is returned.
|
|
21052
22779
|
self.data_config = data_config
|
|
22780
|
+
# Specifies whether to enable the policy.
|
|
22781
|
+
#
|
|
21053
22782
|
# This parameter is required.
|
|
21054
22783
|
self.enabled = enabled
|
|
22784
|
+
# The configurations of the policy.
|
|
22785
|
+
#
|
|
21055
22786
|
# This parameter is required.
|
|
21056
22787
|
self.policy_config = policy_config
|
|
22788
|
+
# The name must meet the following requirements:
|
|
22789
|
+
#
|
|
22790
|
+
# * The name can contain only lowercase letters, digits, hyphens (-), and underscores (_).
|
|
22791
|
+
# * The name must start with a letter.
|
|
22792
|
+
# * The name must be 3 to 63 characters in length.
|
|
22793
|
+
#
|
|
21057
22794
|
# This parameter is required.
|
|
21058
22795
|
self.policy_name = policy_name
|
|
22796
|
+
# The code of the service.
|
|
22797
|
+
#
|
|
21059
22798
|
# This parameter is required.
|
|
21060
22799
|
self.product_code = product_code
|
|
22800
|
+
# The configurations of the resource directory. The account must have activated the resource directory and be a management account or a delegated administrator of the resource directory.
|
|
21061
22801
|
self.resource_directory = resource_directory
|
|
21062
22802
|
|
|
21063
22803
|
def validate(self):
|