alibabacloud-sls20201230 5.8.0__py3-none-any.whl → 5.10.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- alibabacloud_sls20201230/__init__.py +1 -1
- alibabacloud_sls20201230/client.py +2249 -199
- alibabacloud_sls20201230/models.py +2462 -301
- {alibabacloud_sls20201230-5.8.0.dist-info → alibabacloud_sls20201230-5.10.0.dist-info}/METADATA +2 -2
- alibabacloud_sls20201230-5.10.0.dist-info/RECORD +8 -0
- alibabacloud_sls20201230-5.8.0.dist-info/RECORD +0 -8
- {alibabacloud_sls20201230-5.8.0.dist-info → alibabacloud_sls20201230-5.10.0.dist-info}/LICENSE +0 -0
- {alibabacloud_sls20201230-5.8.0.dist-info → alibabacloud_sls20201230-5.10.0.dist-info}/WHEEL +0 -0
- {alibabacloud_sls20201230-5.8.0.dist-info → alibabacloud_sls20201230-5.10.0.dist-info}/top_level.txt +0 -0
|
@@ -877,6 +877,261 @@ class Alert(TeaModel):
|
|
|
877
877
|
return self
|
|
878
878
|
|
|
879
879
|
|
|
880
|
+
class AzureBlobIngestionConfigurationSource(TeaModel):
|
|
881
|
+
def __init__(
|
|
882
|
+
self,
|
|
883
|
+
account_key: str = None,
|
|
884
|
+
account_name: str = None,
|
|
885
|
+
compression_codec: str = None,
|
|
886
|
+
container_name: str = None,
|
|
887
|
+
encoding: str = None,
|
|
888
|
+
end_time: int = None,
|
|
889
|
+
format: Dict[str, Any] = None,
|
|
890
|
+
interval: str = None,
|
|
891
|
+
pattern: str = None,
|
|
892
|
+
prefix: str = None,
|
|
893
|
+
processor_id: str = None,
|
|
894
|
+
start_time: int = None,
|
|
895
|
+
tag_pack_id: bool = None,
|
|
896
|
+
time_field: str = None,
|
|
897
|
+
time_format: str = None,
|
|
898
|
+
time_pattern: str = None,
|
|
899
|
+
time_zone: str = None,
|
|
900
|
+
):
|
|
901
|
+
# This parameter is required.
|
|
902
|
+
self.account_key = account_key
|
|
903
|
+
# This parameter is required.
|
|
904
|
+
self.account_name = account_name
|
|
905
|
+
# This parameter is required.
|
|
906
|
+
self.compression_codec = compression_codec
|
|
907
|
+
# This parameter is required.
|
|
908
|
+
self.container_name = container_name
|
|
909
|
+
# This parameter is required.
|
|
910
|
+
self.encoding = encoding
|
|
911
|
+
self.end_time = end_time
|
|
912
|
+
# This parameter is required.
|
|
913
|
+
self.format = format
|
|
914
|
+
# This parameter is required.
|
|
915
|
+
self.interval = interval
|
|
916
|
+
self.pattern = pattern
|
|
917
|
+
self.prefix = prefix
|
|
918
|
+
self.processor_id = processor_id
|
|
919
|
+
self.start_time = start_time
|
|
920
|
+
self.tag_pack_id = tag_pack_id
|
|
921
|
+
self.time_field = time_field
|
|
922
|
+
self.time_format = time_format
|
|
923
|
+
self.time_pattern = time_pattern
|
|
924
|
+
self.time_zone = time_zone
|
|
925
|
+
|
|
926
|
+
def validate(self):
|
|
927
|
+
pass
|
|
928
|
+
|
|
929
|
+
def to_map(self):
|
|
930
|
+
_map = super().to_map()
|
|
931
|
+
if _map is not None:
|
|
932
|
+
return _map
|
|
933
|
+
|
|
934
|
+
result = dict()
|
|
935
|
+
if self.account_key is not None:
|
|
936
|
+
result['accountKey'] = self.account_key
|
|
937
|
+
if self.account_name is not None:
|
|
938
|
+
result['accountName'] = self.account_name
|
|
939
|
+
if self.compression_codec is not None:
|
|
940
|
+
result['compressionCodec'] = self.compression_codec
|
|
941
|
+
if self.container_name is not None:
|
|
942
|
+
result['containerName'] = self.container_name
|
|
943
|
+
if self.encoding is not None:
|
|
944
|
+
result['encoding'] = self.encoding
|
|
945
|
+
if self.end_time is not None:
|
|
946
|
+
result['endTime'] = self.end_time
|
|
947
|
+
if self.format is not None:
|
|
948
|
+
result['format'] = self.format
|
|
949
|
+
if self.interval is not None:
|
|
950
|
+
result['interval'] = self.interval
|
|
951
|
+
if self.pattern is not None:
|
|
952
|
+
result['pattern'] = self.pattern
|
|
953
|
+
if self.prefix is not None:
|
|
954
|
+
result['prefix'] = self.prefix
|
|
955
|
+
if self.processor_id is not None:
|
|
956
|
+
result['processorId'] = self.processor_id
|
|
957
|
+
if self.start_time is not None:
|
|
958
|
+
result['startTime'] = self.start_time
|
|
959
|
+
if self.tag_pack_id is not None:
|
|
960
|
+
result['tagPackId'] = self.tag_pack_id
|
|
961
|
+
if self.time_field is not None:
|
|
962
|
+
result['timeField'] = self.time_field
|
|
963
|
+
if self.time_format is not None:
|
|
964
|
+
result['timeFormat'] = self.time_format
|
|
965
|
+
if self.time_pattern is not None:
|
|
966
|
+
result['timePattern'] = self.time_pattern
|
|
967
|
+
if self.time_zone is not None:
|
|
968
|
+
result['timeZone'] = self.time_zone
|
|
969
|
+
return result
|
|
970
|
+
|
|
971
|
+
def from_map(self, m: dict = None):
|
|
972
|
+
m = m or dict()
|
|
973
|
+
if m.get('accountKey') is not None:
|
|
974
|
+
self.account_key = m.get('accountKey')
|
|
975
|
+
if m.get('accountName') is not None:
|
|
976
|
+
self.account_name = m.get('accountName')
|
|
977
|
+
if m.get('compressionCodec') is not None:
|
|
978
|
+
self.compression_codec = m.get('compressionCodec')
|
|
979
|
+
if m.get('containerName') is not None:
|
|
980
|
+
self.container_name = m.get('containerName')
|
|
981
|
+
if m.get('encoding') is not None:
|
|
982
|
+
self.encoding = m.get('encoding')
|
|
983
|
+
if m.get('endTime') is not None:
|
|
984
|
+
self.end_time = m.get('endTime')
|
|
985
|
+
if m.get('format') is not None:
|
|
986
|
+
self.format = m.get('format')
|
|
987
|
+
if m.get('interval') is not None:
|
|
988
|
+
self.interval = m.get('interval')
|
|
989
|
+
if m.get('pattern') is not None:
|
|
990
|
+
self.pattern = m.get('pattern')
|
|
991
|
+
if m.get('prefix') is not None:
|
|
992
|
+
self.prefix = m.get('prefix')
|
|
993
|
+
if m.get('processorId') is not None:
|
|
994
|
+
self.processor_id = m.get('processorId')
|
|
995
|
+
if m.get('startTime') is not None:
|
|
996
|
+
self.start_time = m.get('startTime')
|
|
997
|
+
if m.get('tagPackId') is not None:
|
|
998
|
+
self.tag_pack_id = m.get('tagPackId')
|
|
999
|
+
if m.get('timeField') is not None:
|
|
1000
|
+
self.time_field = m.get('timeField')
|
|
1001
|
+
if m.get('timeFormat') is not None:
|
|
1002
|
+
self.time_format = m.get('timeFormat')
|
|
1003
|
+
if m.get('timePattern') is not None:
|
|
1004
|
+
self.time_pattern = m.get('timePattern')
|
|
1005
|
+
if m.get('timeZone') is not None:
|
|
1006
|
+
self.time_zone = m.get('timeZone')
|
|
1007
|
+
return self
|
|
1008
|
+
|
|
1009
|
+
|
|
1010
|
+
class AzureBlobIngestionConfiguration(TeaModel):
|
|
1011
|
+
def __init__(
|
|
1012
|
+
self,
|
|
1013
|
+
logstore: str = None,
|
|
1014
|
+
source: AzureBlobIngestionConfigurationSource = None,
|
|
1015
|
+
):
|
|
1016
|
+
self.logstore = logstore
|
|
1017
|
+
self.source = source
|
|
1018
|
+
|
|
1019
|
+
def validate(self):
|
|
1020
|
+
if self.source:
|
|
1021
|
+
self.source.validate()
|
|
1022
|
+
|
|
1023
|
+
def to_map(self):
|
|
1024
|
+
_map = super().to_map()
|
|
1025
|
+
if _map is not None:
|
|
1026
|
+
return _map
|
|
1027
|
+
|
|
1028
|
+
result = dict()
|
|
1029
|
+
if self.logstore is not None:
|
|
1030
|
+
result['logstore'] = self.logstore
|
|
1031
|
+
if self.source is not None:
|
|
1032
|
+
result['source'] = self.source.to_map()
|
|
1033
|
+
return result
|
|
1034
|
+
|
|
1035
|
+
def from_map(self, m: dict = None):
|
|
1036
|
+
m = m or dict()
|
|
1037
|
+
if m.get('logstore') is not None:
|
|
1038
|
+
self.logstore = m.get('logstore')
|
|
1039
|
+
if m.get('source') is not None:
|
|
1040
|
+
temp_model = AzureBlobIngestionConfigurationSource()
|
|
1041
|
+
self.source = temp_model.from_map(m['source'])
|
|
1042
|
+
return self
|
|
1043
|
+
|
|
1044
|
+
|
|
1045
|
+
class AzureBlobIngestion(TeaModel):
|
|
1046
|
+
def __init__(
|
|
1047
|
+
self,
|
|
1048
|
+
configuration: AzureBlobIngestionConfiguration = None,
|
|
1049
|
+
create_time: int = None,
|
|
1050
|
+
description: str = None,
|
|
1051
|
+
display_name: str = None,
|
|
1052
|
+
last_modified_time: int = None,
|
|
1053
|
+
name: str = None,
|
|
1054
|
+
processor_id: str = None,
|
|
1055
|
+
schedule: Schedule = None,
|
|
1056
|
+
schedule_id: str = None,
|
|
1057
|
+
status: str = None,
|
|
1058
|
+
):
|
|
1059
|
+
# This parameter is required.
|
|
1060
|
+
self.configuration = configuration
|
|
1061
|
+
self.create_time = create_time
|
|
1062
|
+
self.description = description
|
|
1063
|
+
# This parameter is required.
|
|
1064
|
+
self.display_name = display_name
|
|
1065
|
+
self.last_modified_time = last_modified_time
|
|
1066
|
+
# This parameter is required.
|
|
1067
|
+
self.name = name
|
|
1068
|
+
self.processor_id = processor_id
|
|
1069
|
+
# This parameter is required.
|
|
1070
|
+
self.schedule = schedule
|
|
1071
|
+
self.schedule_id = schedule_id
|
|
1072
|
+
self.status = status
|
|
1073
|
+
|
|
1074
|
+
def validate(self):
|
|
1075
|
+
if self.configuration:
|
|
1076
|
+
self.configuration.validate()
|
|
1077
|
+
if self.schedule:
|
|
1078
|
+
self.schedule.validate()
|
|
1079
|
+
|
|
1080
|
+
def to_map(self):
|
|
1081
|
+
_map = super().to_map()
|
|
1082
|
+
if _map is not None:
|
|
1083
|
+
return _map
|
|
1084
|
+
|
|
1085
|
+
result = dict()
|
|
1086
|
+
if self.configuration is not None:
|
|
1087
|
+
result['configuration'] = self.configuration.to_map()
|
|
1088
|
+
if self.create_time is not None:
|
|
1089
|
+
result['createTime'] = self.create_time
|
|
1090
|
+
if self.description is not None:
|
|
1091
|
+
result['description'] = self.description
|
|
1092
|
+
if self.display_name is not None:
|
|
1093
|
+
result['displayName'] = self.display_name
|
|
1094
|
+
if self.last_modified_time is not None:
|
|
1095
|
+
result['lastModifiedTime'] = self.last_modified_time
|
|
1096
|
+
if self.name is not None:
|
|
1097
|
+
result['name'] = self.name
|
|
1098
|
+
if self.processor_id is not None:
|
|
1099
|
+
result['processorId'] = self.processor_id
|
|
1100
|
+
if self.schedule is not None:
|
|
1101
|
+
result['schedule'] = self.schedule.to_map()
|
|
1102
|
+
if self.schedule_id is not None:
|
|
1103
|
+
result['scheduleId'] = self.schedule_id
|
|
1104
|
+
if self.status is not None:
|
|
1105
|
+
result['status'] = self.status
|
|
1106
|
+
return result
|
|
1107
|
+
|
|
1108
|
+
def from_map(self, m: dict = None):
|
|
1109
|
+
m = m or dict()
|
|
1110
|
+
if m.get('configuration') is not None:
|
|
1111
|
+
temp_model = AzureBlobIngestionConfiguration()
|
|
1112
|
+
self.configuration = temp_model.from_map(m['configuration'])
|
|
1113
|
+
if m.get('createTime') is not None:
|
|
1114
|
+
self.create_time = m.get('createTime')
|
|
1115
|
+
if m.get('description') is not None:
|
|
1116
|
+
self.description = m.get('description')
|
|
1117
|
+
if m.get('displayName') is not None:
|
|
1118
|
+
self.display_name = m.get('displayName')
|
|
1119
|
+
if m.get('lastModifiedTime') is not None:
|
|
1120
|
+
self.last_modified_time = m.get('lastModifiedTime')
|
|
1121
|
+
if m.get('name') is not None:
|
|
1122
|
+
self.name = m.get('name')
|
|
1123
|
+
if m.get('processorId') is not None:
|
|
1124
|
+
self.processor_id = m.get('processorId')
|
|
1125
|
+
if m.get('schedule') is not None:
|
|
1126
|
+
temp_model = Schedule()
|
|
1127
|
+
self.schedule = temp_model.from_map(m['schedule'])
|
|
1128
|
+
if m.get('scheduleId') is not None:
|
|
1129
|
+
self.schedule_id = m.get('scheduleId')
|
|
1130
|
+
if m.get('status') is not None:
|
|
1131
|
+
self.status = m.get('status')
|
|
1132
|
+
return self
|
|
1133
|
+
|
|
1134
|
+
|
|
880
1135
|
class ConsumeProcessorConfiguration(TeaModel):
|
|
881
1136
|
def __init__(
|
|
882
1137
|
self,
|
|
@@ -1228,20 +1483,252 @@ class DeleteLogStoreLogsTask(TeaModel):
|
|
|
1228
1483
|
|
|
1229
1484
|
def from_map(self, m: dict = None):
|
|
1230
1485
|
m = m or dict()
|
|
1231
|
-
if m.get('errorCode') is not None:
|
|
1232
|
-
self.error_code = m.get('errorCode')
|
|
1233
|
-
if m.get('errorMessage') is not None:
|
|
1234
|
-
self.error_message = m.get('errorMessage')
|
|
1235
|
-
if m.get('from') is not None:
|
|
1236
|
-
self.from_ = m.get('from')
|
|
1237
|
-
if m.get('progress') is not None:
|
|
1238
|
-
self.progress = m.get('progress')
|
|
1239
|
-
if m.get('query') is not None:
|
|
1240
|
-
self.query = m.get('query')
|
|
1241
|
-
if m.get('taskId') is not None:
|
|
1242
|
-
self.task_id = m.get('taskId')
|
|
1243
|
-
if m.get('to') is not None:
|
|
1244
|
-
self.to = m.get('to')
|
|
1486
|
+
if m.get('errorCode') is not None:
|
|
1487
|
+
self.error_code = m.get('errorCode')
|
|
1488
|
+
if m.get('errorMessage') is not None:
|
|
1489
|
+
self.error_message = m.get('errorMessage')
|
|
1490
|
+
if m.get('from') is not None:
|
|
1491
|
+
self.from_ = m.get('from')
|
|
1492
|
+
if m.get('progress') is not None:
|
|
1493
|
+
self.progress = m.get('progress')
|
|
1494
|
+
if m.get('query') is not None:
|
|
1495
|
+
self.query = m.get('query')
|
|
1496
|
+
if m.get('taskId') is not None:
|
|
1497
|
+
self.task_id = m.get('taskId')
|
|
1498
|
+
if m.get('to') is not None:
|
|
1499
|
+
self.to = m.get('to')
|
|
1500
|
+
return self
|
|
1501
|
+
|
|
1502
|
+
|
|
1503
|
+
class ESIngestionConfigurationSource(TeaModel):
|
|
1504
|
+
def __init__(
|
|
1505
|
+
self,
|
|
1506
|
+
bootstrap_servers: str = None,
|
|
1507
|
+
connector_mode: str = None,
|
|
1508
|
+
end_time: int = None,
|
|
1509
|
+
index: str = None,
|
|
1510
|
+
max_data_delay_sec: int = None,
|
|
1511
|
+
min_frag_range_sec: int = None,
|
|
1512
|
+
password: str = None,
|
|
1513
|
+
query: str = None,
|
|
1514
|
+
start_time: int = None,
|
|
1515
|
+
time_field_name: str = None,
|
|
1516
|
+
time_format: str = None,
|
|
1517
|
+
time_zone: str = None,
|
|
1518
|
+
username: str = None,
|
|
1519
|
+
vpc_id: str = None,
|
|
1520
|
+
):
|
|
1521
|
+
# This parameter is required.
|
|
1522
|
+
self.bootstrap_servers = bootstrap_servers
|
|
1523
|
+
# This parameter is required.
|
|
1524
|
+
self.connector_mode = connector_mode
|
|
1525
|
+
self.end_time = end_time
|
|
1526
|
+
# This parameter is required.
|
|
1527
|
+
self.index = index
|
|
1528
|
+
# This parameter is required.
|
|
1529
|
+
self.max_data_delay_sec = max_data_delay_sec
|
|
1530
|
+
# This parameter is required.
|
|
1531
|
+
self.min_frag_range_sec = min_frag_range_sec
|
|
1532
|
+
self.password = password
|
|
1533
|
+
# This parameter is required.
|
|
1534
|
+
self.query = query
|
|
1535
|
+
self.start_time = start_time
|
|
1536
|
+
self.time_field_name = time_field_name
|
|
1537
|
+
self.time_format = time_format
|
|
1538
|
+
self.time_zone = time_zone
|
|
1539
|
+
self.username = username
|
|
1540
|
+
self.vpc_id = vpc_id
|
|
1541
|
+
|
|
1542
|
+
def validate(self):
|
|
1543
|
+
pass
|
|
1544
|
+
|
|
1545
|
+
def to_map(self):
|
|
1546
|
+
_map = super().to_map()
|
|
1547
|
+
if _map is not None:
|
|
1548
|
+
return _map
|
|
1549
|
+
|
|
1550
|
+
result = dict()
|
|
1551
|
+
if self.bootstrap_servers is not None:
|
|
1552
|
+
result['BootstrapServers'] = self.bootstrap_servers
|
|
1553
|
+
if self.connector_mode is not None:
|
|
1554
|
+
result['ConnectorMode'] = self.connector_mode
|
|
1555
|
+
if self.end_time is not None:
|
|
1556
|
+
result['EndTime'] = self.end_time
|
|
1557
|
+
if self.index is not None:
|
|
1558
|
+
result['Index'] = self.index
|
|
1559
|
+
if self.max_data_delay_sec is not None:
|
|
1560
|
+
result['MaxDataDelaySec'] = self.max_data_delay_sec
|
|
1561
|
+
if self.min_frag_range_sec is not None:
|
|
1562
|
+
result['MinFragRangeSec'] = self.min_frag_range_sec
|
|
1563
|
+
if self.password is not None:
|
|
1564
|
+
result['Password'] = self.password
|
|
1565
|
+
if self.query is not None:
|
|
1566
|
+
result['Query'] = self.query
|
|
1567
|
+
if self.start_time is not None:
|
|
1568
|
+
result['StartTime'] = self.start_time
|
|
1569
|
+
if self.time_field_name is not None:
|
|
1570
|
+
result['TimeFieldName'] = self.time_field_name
|
|
1571
|
+
if self.time_format is not None:
|
|
1572
|
+
result['TimeFormat'] = self.time_format
|
|
1573
|
+
if self.time_zone is not None:
|
|
1574
|
+
result['TimeZone'] = self.time_zone
|
|
1575
|
+
if self.username is not None:
|
|
1576
|
+
result['Username'] = self.username
|
|
1577
|
+
if self.vpc_id is not None:
|
|
1578
|
+
result['VpcId'] = self.vpc_id
|
|
1579
|
+
return result
|
|
1580
|
+
|
|
1581
|
+
def from_map(self, m: dict = None):
|
|
1582
|
+
m = m or dict()
|
|
1583
|
+
if m.get('BootstrapServers') is not None:
|
|
1584
|
+
self.bootstrap_servers = m.get('BootstrapServers')
|
|
1585
|
+
if m.get('ConnectorMode') is not None:
|
|
1586
|
+
self.connector_mode = m.get('ConnectorMode')
|
|
1587
|
+
if m.get('EndTime') is not None:
|
|
1588
|
+
self.end_time = m.get('EndTime')
|
|
1589
|
+
if m.get('Index') is not None:
|
|
1590
|
+
self.index = m.get('Index')
|
|
1591
|
+
if m.get('MaxDataDelaySec') is not None:
|
|
1592
|
+
self.max_data_delay_sec = m.get('MaxDataDelaySec')
|
|
1593
|
+
if m.get('MinFragRangeSec') is not None:
|
|
1594
|
+
self.min_frag_range_sec = m.get('MinFragRangeSec')
|
|
1595
|
+
if m.get('Password') is not None:
|
|
1596
|
+
self.password = m.get('Password')
|
|
1597
|
+
if m.get('Query') is not None:
|
|
1598
|
+
self.query = m.get('Query')
|
|
1599
|
+
if m.get('StartTime') is not None:
|
|
1600
|
+
self.start_time = m.get('StartTime')
|
|
1601
|
+
if m.get('TimeFieldName') is not None:
|
|
1602
|
+
self.time_field_name = m.get('TimeFieldName')
|
|
1603
|
+
if m.get('TimeFormat') is not None:
|
|
1604
|
+
self.time_format = m.get('TimeFormat')
|
|
1605
|
+
if m.get('TimeZone') is not None:
|
|
1606
|
+
self.time_zone = m.get('TimeZone')
|
|
1607
|
+
if m.get('Username') is not None:
|
|
1608
|
+
self.username = m.get('Username')
|
|
1609
|
+
if m.get('VpcId') is not None:
|
|
1610
|
+
self.vpc_id = m.get('VpcId')
|
|
1611
|
+
return self
|
|
1612
|
+
|
|
1613
|
+
|
|
1614
|
+
class ESIngestionConfiguration(TeaModel):
|
|
1615
|
+
def __init__(
|
|
1616
|
+
self,
|
|
1617
|
+
logstore: str = None,
|
|
1618
|
+
source: ESIngestionConfigurationSource = None,
|
|
1619
|
+
):
|
|
1620
|
+
# This parameter is required.
|
|
1621
|
+
self.logstore = logstore
|
|
1622
|
+
# This parameter is required.
|
|
1623
|
+
self.source = source
|
|
1624
|
+
|
|
1625
|
+
def validate(self):
|
|
1626
|
+
if self.source:
|
|
1627
|
+
self.source.validate()
|
|
1628
|
+
|
|
1629
|
+
def to_map(self):
|
|
1630
|
+
_map = super().to_map()
|
|
1631
|
+
if _map is not None:
|
|
1632
|
+
return _map
|
|
1633
|
+
|
|
1634
|
+
result = dict()
|
|
1635
|
+
if self.logstore is not None:
|
|
1636
|
+
result['logstore'] = self.logstore
|
|
1637
|
+
if self.source is not None:
|
|
1638
|
+
result['source'] = self.source.to_map()
|
|
1639
|
+
return result
|
|
1640
|
+
|
|
1641
|
+
def from_map(self, m: dict = None):
|
|
1642
|
+
m = m or dict()
|
|
1643
|
+
if m.get('logstore') is not None:
|
|
1644
|
+
self.logstore = m.get('logstore')
|
|
1645
|
+
if m.get('source') is not None:
|
|
1646
|
+
temp_model = ESIngestionConfigurationSource()
|
|
1647
|
+
self.source = temp_model.from_map(m['source'])
|
|
1648
|
+
return self
|
|
1649
|
+
|
|
1650
|
+
|
|
1651
|
+
class ESIngestion(TeaModel):
|
|
1652
|
+
def __init__(
|
|
1653
|
+
self,
|
|
1654
|
+
configuration: ESIngestionConfiguration = None,
|
|
1655
|
+
create_time: int = None,
|
|
1656
|
+
description: str = None,
|
|
1657
|
+
display_name: str = None,
|
|
1658
|
+
last_modified_time: int = None,
|
|
1659
|
+
name: str = None,
|
|
1660
|
+
schedule: Schedule = None,
|
|
1661
|
+
schedule_id: str = None,
|
|
1662
|
+
status: str = None,
|
|
1663
|
+
):
|
|
1664
|
+
# This parameter is required.
|
|
1665
|
+
self.configuration = configuration
|
|
1666
|
+
self.create_time = create_time
|
|
1667
|
+
self.description = description
|
|
1668
|
+
# This parameter is required.
|
|
1669
|
+
self.display_name = display_name
|
|
1670
|
+
self.last_modified_time = last_modified_time
|
|
1671
|
+
# This parameter is required.
|
|
1672
|
+
self.name = name
|
|
1673
|
+
# This parameter is required.
|
|
1674
|
+
self.schedule = schedule
|
|
1675
|
+
self.schedule_id = schedule_id
|
|
1676
|
+
self.status = status
|
|
1677
|
+
|
|
1678
|
+
def validate(self):
|
|
1679
|
+
if self.configuration:
|
|
1680
|
+
self.configuration.validate()
|
|
1681
|
+
if self.schedule:
|
|
1682
|
+
self.schedule.validate()
|
|
1683
|
+
|
|
1684
|
+
def to_map(self):
|
|
1685
|
+
_map = super().to_map()
|
|
1686
|
+
if _map is not None:
|
|
1687
|
+
return _map
|
|
1688
|
+
|
|
1689
|
+
result = dict()
|
|
1690
|
+
if self.configuration is not None:
|
|
1691
|
+
result['configuration'] = self.configuration.to_map()
|
|
1692
|
+
if self.create_time is not None:
|
|
1693
|
+
result['createTime'] = self.create_time
|
|
1694
|
+
if self.description is not None:
|
|
1695
|
+
result['description'] = self.description
|
|
1696
|
+
if self.display_name is not None:
|
|
1697
|
+
result['displayName'] = self.display_name
|
|
1698
|
+
if self.last_modified_time is not None:
|
|
1699
|
+
result['lastModifiedTime'] = self.last_modified_time
|
|
1700
|
+
if self.name is not None:
|
|
1701
|
+
result['name'] = self.name
|
|
1702
|
+
if self.schedule is not None:
|
|
1703
|
+
result['schedule'] = self.schedule.to_map()
|
|
1704
|
+
if self.schedule_id is not None:
|
|
1705
|
+
result['scheduleId'] = self.schedule_id
|
|
1706
|
+
if self.status is not None:
|
|
1707
|
+
result['status'] = self.status
|
|
1708
|
+
return result
|
|
1709
|
+
|
|
1710
|
+
def from_map(self, m: dict = None):
|
|
1711
|
+
m = m or dict()
|
|
1712
|
+
if m.get('configuration') is not None:
|
|
1713
|
+
temp_model = ESIngestionConfiguration()
|
|
1714
|
+
self.configuration = temp_model.from_map(m['configuration'])
|
|
1715
|
+
if m.get('createTime') is not None:
|
|
1716
|
+
self.create_time = m.get('createTime')
|
|
1717
|
+
if m.get('description') is not None:
|
|
1718
|
+
self.description = m.get('description')
|
|
1719
|
+
if m.get('displayName') is not None:
|
|
1720
|
+
self.display_name = m.get('displayName')
|
|
1721
|
+
if m.get('lastModifiedTime') is not None:
|
|
1722
|
+
self.last_modified_time = m.get('lastModifiedTime')
|
|
1723
|
+
if m.get('name') is not None:
|
|
1724
|
+
self.name = m.get('name')
|
|
1725
|
+
if m.get('schedule') is not None:
|
|
1726
|
+
temp_model = Schedule()
|
|
1727
|
+
self.schedule = temp_model.from_map(m['schedule'])
|
|
1728
|
+
if m.get('scheduleId') is not None:
|
|
1729
|
+
self.schedule_id = m.get('scheduleId')
|
|
1730
|
+
if m.get('status') is not None:
|
|
1731
|
+
self.status = m.get('status')
|
|
1245
1732
|
return self
|
|
1246
1733
|
|
|
1247
1734
|
|
|
@@ -1854,6 +2341,262 @@ class IngestProcessor(TeaModel):
|
|
|
1854
2341
|
return self
|
|
1855
2342
|
|
|
1856
2343
|
|
|
2344
|
+
class KafkaIngestionConfigurationSource(TeaModel):
|
|
2345
|
+
def __init__(
|
|
2346
|
+
self,
|
|
2347
|
+
bootstrap_servers: str = None,
|
|
2348
|
+
communication: str = None,
|
|
2349
|
+
consumer_group: str = None,
|
|
2350
|
+
default_time_source: str = None,
|
|
2351
|
+
enable_sls_context: bool = None,
|
|
2352
|
+
encoding: str = None,
|
|
2353
|
+
from_position: str = None,
|
|
2354
|
+
name_resolutions: str = None,
|
|
2355
|
+
parse_array: bool = None,
|
|
2356
|
+
processor_id: str = None,
|
|
2357
|
+
time_field: str = None,
|
|
2358
|
+
time_format: str = None,
|
|
2359
|
+
time_pattern: str = None,
|
|
2360
|
+
time_zone: str = None,
|
|
2361
|
+
topics: str = None,
|
|
2362
|
+
value_type: str = None,
|
|
2363
|
+
vpc_id: str = None,
|
|
2364
|
+
):
|
|
2365
|
+
# This parameter is required.
|
|
2366
|
+
self.bootstrap_servers = bootstrap_servers
|
|
2367
|
+
self.communication = communication
|
|
2368
|
+
self.consumer_group = consumer_group
|
|
2369
|
+
self.default_time_source = default_time_source
|
|
2370
|
+
self.enable_sls_context = enable_sls_context
|
|
2371
|
+
# This parameter is required.
|
|
2372
|
+
self.encoding = encoding
|
|
2373
|
+
# This parameter is required.
|
|
2374
|
+
self.from_position = from_position
|
|
2375
|
+
self.name_resolutions = name_resolutions
|
|
2376
|
+
# This parameter is required.
|
|
2377
|
+
self.parse_array = parse_array
|
|
2378
|
+
self.processor_id = processor_id
|
|
2379
|
+
self.time_field = time_field
|
|
2380
|
+
self.time_format = time_format
|
|
2381
|
+
self.time_pattern = time_pattern
|
|
2382
|
+
self.time_zone = time_zone
|
|
2383
|
+
# This parameter is required.
|
|
2384
|
+
self.topics = topics
|
|
2385
|
+
# This parameter is required.
|
|
2386
|
+
self.value_type = value_type
|
|
2387
|
+
self.vpc_id = vpc_id
|
|
2388
|
+
|
|
2389
|
+
def validate(self):
|
|
2390
|
+
pass
|
|
2391
|
+
|
|
2392
|
+
def to_map(self):
|
|
2393
|
+
_map = super().to_map()
|
|
2394
|
+
if _map is not None:
|
|
2395
|
+
return _map
|
|
2396
|
+
|
|
2397
|
+
result = dict()
|
|
2398
|
+
if self.bootstrap_servers is not None:
|
|
2399
|
+
result['bootstrapServers'] = self.bootstrap_servers
|
|
2400
|
+
if self.communication is not None:
|
|
2401
|
+
result['communication'] = self.communication
|
|
2402
|
+
if self.consumer_group is not None:
|
|
2403
|
+
result['consumerGroup'] = self.consumer_group
|
|
2404
|
+
if self.default_time_source is not None:
|
|
2405
|
+
result['defaultTimeSource'] = self.default_time_source
|
|
2406
|
+
if self.enable_sls_context is not None:
|
|
2407
|
+
result['enableSlsContext'] = self.enable_sls_context
|
|
2408
|
+
if self.encoding is not None:
|
|
2409
|
+
result['encoding'] = self.encoding
|
|
2410
|
+
if self.from_position is not None:
|
|
2411
|
+
result['fromPosition'] = self.from_position
|
|
2412
|
+
if self.name_resolutions is not None:
|
|
2413
|
+
result['nameResolutions'] = self.name_resolutions
|
|
2414
|
+
if self.parse_array is not None:
|
|
2415
|
+
result['parseArray'] = self.parse_array
|
|
2416
|
+
if self.processor_id is not None:
|
|
2417
|
+
result['processorId'] = self.processor_id
|
|
2418
|
+
if self.time_field is not None:
|
|
2419
|
+
result['timeField'] = self.time_field
|
|
2420
|
+
if self.time_format is not None:
|
|
2421
|
+
result['timeFormat'] = self.time_format
|
|
2422
|
+
if self.time_pattern is not None:
|
|
2423
|
+
result['timePattern'] = self.time_pattern
|
|
2424
|
+
if self.time_zone is not None:
|
|
2425
|
+
result['timeZone'] = self.time_zone
|
|
2426
|
+
if self.topics is not None:
|
|
2427
|
+
result['topics'] = self.topics
|
|
2428
|
+
if self.value_type is not None:
|
|
2429
|
+
result['valueType'] = self.value_type
|
|
2430
|
+
if self.vpc_id is not None:
|
|
2431
|
+
result['vpcId'] = self.vpc_id
|
|
2432
|
+
return result
|
|
2433
|
+
|
|
2434
|
+
def from_map(self, m: dict = None):
|
|
2435
|
+
m = m or dict()
|
|
2436
|
+
if m.get('bootstrapServers') is not None:
|
|
2437
|
+
self.bootstrap_servers = m.get('bootstrapServers')
|
|
2438
|
+
if m.get('communication') is not None:
|
|
2439
|
+
self.communication = m.get('communication')
|
|
2440
|
+
if m.get('consumerGroup') is not None:
|
|
2441
|
+
self.consumer_group = m.get('consumerGroup')
|
|
2442
|
+
if m.get('defaultTimeSource') is not None:
|
|
2443
|
+
self.default_time_source = m.get('defaultTimeSource')
|
|
2444
|
+
if m.get('enableSlsContext') is not None:
|
|
2445
|
+
self.enable_sls_context = m.get('enableSlsContext')
|
|
2446
|
+
if m.get('encoding') is not None:
|
|
2447
|
+
self.encoding = m.get('encoding')
|
|
2448
|
+
if m.get('fromPosition') is not None:
|
|
2449
|
+
self.from_position = m.get('fromPosition')
|
|
2450
|
+
if m.get('nameResolutions') is not None:
|
|
2451
|
+
self.name_resolutions = m.get('nameResolutions')
|
|
2452
|
+
if m.get('parseArray') is not None:
|
|
2453
|
+
self.parse_array = m.get('parseArray')
|
|
2454
|
+
if m.get('processorId') is not None:
|
|
2455
|
+
self.processor_id = m.get('processorId')
|
|
2456
|
+
if m.get('timeField') is not None:
|
|
2457
|
+
self.time_field = m.get('timeField')
|
|
2458
|
+
if m.get('timeFormat') is not None:
|
|
2459
|
+
self.time_format = m.get('timeFormat')
|
|
2460
|
+
if m.get('timePattern') is not None:
|
|
2461
|
+
self.time_pattern = m.get('timePattern')
|
|
2462
|
+
if m.get('timeZone') is not None:
|
|
2463
|
+
self.time_zone = m.get('timeZone')
|
|
2464
|
+
if m.get('topics') is not None:
|
|
2465
|
+
self.topics = m.get('topics')
|
|
2466
|
+
if m.get('valueType') is not None:
|
|
2467
|
+
self.value_type = m.get('valueType')
|
|
2468
|
+
if m.get('vpcId') is not None:
|
|
2469
|
+
self.vpc_id = m.get('vpcId')
|
|
2470
|
+
return self
|
|
2471
|
+
|
|
2472
|
+
|
|
2473
|
+
class KafkaIngestionConfiguration(TeaModel):
|
|
2474
|
+
def __init__(
|
|
2475
|
+
self,
|
|
2476
|
+
logstore: str = None,
|
|
2477
|
+
source: KafkaIngestionConfigurationSource = None,
|
|
2478
|
+
):
|
|
2479
|
+
# This parameter is required.
|
|
2480
|
+
self.logstore = logstore
|
|
2481
|
+
# This parameter is required.
|
|
2482
|
+
self.source = source
|
|
2483
|
+
|
|
2484
|
+
def validate(self):
|
|
2485
|
+
if self.source:
|
|
2486
|
+
self.source.validate()
|
|
2487
|
+
|
|
2488
|
+
def to_map(self):
|
|
2489
|
+
_map = super().to_map()
|
|
2490
|
+
if _map is not None:
|
|
2491
|
+
return _map
|
|
2492
|
+
|
|
2493
|
+
result = dict()
|
|
2494
|
+
if self.logstore is not None:
|
|
2495
|
+
result['logstore'] = self.logstore
|
|
2496
|
+
if self.source is not None:
|
|
2497
|
+
result['source'] = self.source.to_map()
|
|
2498
|
+
return result
|
|
2499
|
+
|
|
2500
|
+
def from_map(self, m: dict = None):
|
|
2501
|
+
m = m or dict()
|
|
2502
|
+
if m.get('logstore') is not None:
|
|
2503
|
+
self.logstore = m.get('logstore')
|
|
2504
|
+
if m.get('source') is not None:
|
|
2505
|
+
temp_model = KafkaIngestionConfigurationSource()
|
|
2506
|
+
self.source = temp_model.from_map(m['source'])
|
|
2507
|
+
return self
|
|
2508
|
+
|
|
2509
|
+
|
|
2510
|
+
class KafkaIngestion(TeaModel):
|
|
2511
|
+
def __init__(
|
|
2512
|
+
self,
|
|
2513
|
+
configuration: KafkaIngestionConfiguration = None,
|
|
2514
|
+
create_time: int = None,
|
|
2515
|
+
description: str = None,
|
|
2516
|
+
display_name: str = None,
|
|
2517
|
+
last_modified_time: int = None,
|
|
2518
|
+
name: str = None,
|
|
2519
|
+
processor_id: str = None,
|
|
2520
|
+
schedule: Schedule = None,
|
|
2521
|
+
schedule_id: str = None,
|
|
2522
|
+
status: str = None,
|
|
2523
|
+
):
|
|
2524
|
+
# This parameter is required.
|
|
2525
|
+
self.configuration = configuration
|
|
2526
|
+
self.create_time = create_time
|
|
2527
|
+
self.description = description
|
|
2528
|
+
# This parameter is required.
|
|
2529
|
+
self.display_name = display_name
|
|
2530
|
+
self.last_modified_time = last_modified_time
|
|
2531
|
+
# This parameter is required.
|
|
2532
|
+
self.name = name
|
|
2533
|
+
self.processor_id = processor_id
|
|
2534
|
+
# This parameter is required.
|
|
2535
|
+
self.schedule = schedule
|
|
2536
|
+
self.schedule_id = schedule_id
|
|
2537
|
+
self.status = status
|
|
2538
|
+
|
|
2539
|
+
def validate(self):
|
|
2540
|
+
if self.configuration:
|
|
2541
|
+
self.configuration.validate()
|
|
2542
|
+
if self.schedule:
|
|
2543
|
+
self.schedule.validate()
|
|
2544
|
+
|
|
2545
|
+
def to_map(self):
|
|
2546
|
+
_map = super().to_map()
|
|
2547
|
+
if _map is not None:
|
|
2548
|
+
return _map
|
|
2549
|
+
|
|
2550
|
+
result = dict()
|
|
2551
|
+
if self.configuration is not None:
|
|
2552
|
+
result['configuration'] = self.configuration.to_map()
|
|
2553
|
+
if self.create_time is not None:
|
|
2554
|
+
result['createTime'] = self.create_time
|
|
2555
|
+
if self.description is not None:
|
|
2556
|
+
result['description'] = self.description
|
|
2557
|
+
if self.display_name is not None:
|
|
2558
|
+
result['displayName'] = self.display_name
|
|
2559
|
+
if self.last_modified_time is not None:
|
|
2560
|
+
result['lastModifiedTime'] = self.last_modified_time
|
|
2561
|
+
if self.name is not None:
|
|
2562
|
+
result['name'] = self.name
|
|
2563
|
+
if self.processor_id is not None:
|
|
2564
|
+
result['processorId'] = self.processor_id
|
|
2565
|
+
if self.schedule is not None:
|
|
2566
|
+
result['schedule'] = self.schedule.to_map()
|
|
2567
|
+
if self.schedule_id is not None:
|
|
2568
|
+
result['scheduleId'] = self.schedule_id
|
|
2569
|
+
if self.status is not None:
|
|
2570
|
+
result['status'] = self.status
|
|
2571
|
+
return result
|
|
2572
|
+
|
|
2573
|
+
def from_map(self, m: dict = None):
|
|
2574
|
+
m = m or dict()
|
|
2575
|
+
if m.get('configuration') is not None:
|
|
2576
|
+
temp_model = KafkaIngestionConfiguration()
|
|
2577
|
+
self.configuration = temp_model.from_map(m['configuration'])
|
|
2578
|
+
if m.get('createTime') is not None:
|
|
2579
|
+
self.create_time = m.get('createTime')
|
|
2580
|
+
if m.get('description') is not None:
|
|
2581
|
+
self.description = m.get('description')
|
|
2582
|
+
if m.get('displayName') is not None:
|
|
2583
|
+
self.display_name = m.get('displayName')
|
|
2584
|
+
if m.get('lastModifiedTime') is not None:
|
|
2585
|
+
self.last_modified_time = m.get('lastModifiedTime')
|
|
2586
|
+
if m.get('name') is not None:
|
|
2587
|
+
self.name = m.get('name')
|
|
2588
|
+
if m.get('processorId') is not None:
|
|
2589
|
+
self.processor_id = m.get('processorId')
|
|
2590
|
+
if m.get('schedule') is not None:
|
|
2591
|
+
temp_model = Schedule()
|
|
2592
|
+
self.schedule = temp_model.from_map(m['schedule'])
|
|
2593
|
+
if m.get('scheduleId') is not None:
|
|
2594
|
+
self.schedule_id = m.get('scheduleId')
|
|
2595
|
+
if m.get('status') is not None:
|
|
2596
|
+
self.status = m.get('status')
|
|
2597
|
+
return self
|
|
2598
|
+
|
|
2599
|
+
|
|
1857
2600
|
class LogContent(TeaModel):
|
|
1858
2601
|
def __init__(
|
|
1859
2602
|
self,
|
|
@@ -1894,11 +2637,13 @@ class LogItem(TeaModel):
|
|
|
1894
2637
|
self,
|
|
1895
2638
|
contents: List[LogContent] = None,
|
|
1896
2639
|
time: int = None,
|
|
2640
|
+
time_ns: int = None,
|
|
1897
2641
|
):
|
|
1898
2642
|
# This parameter is required.
|
|
1899
2643
|
self.contents = contents
|
|
1900
2644
|
# This parameter is required.
|
|
1901
2645
|
self.time = time
|
|
2646
|
+
self.time_ns = time_ns
|
|
1902
2647
|
|
|
1903
2648
|
def validate(self):
|
|
1904
2649
|
if self.contents:
|
|
@@ -1918,6 +2663,8 @@ class LogItem(TeaModel):
|
|
|
1918
2663
|
result['Contents'].append(k.to_map() if k else None)
|
|
1919
2664
|
if self.time is not None:
|
|
1920
2665
|
result['Time'] = self.time
|
|
2666
|
+
if self.time_ns is not None:
|
|
2667
|
+
result['TimeNs'] = self.time_ns
|
|
1921
2668
|
return result
|
|
1922
2669
|
|
|
1923
2670
|
def from_map(self, m: dict = None):
|
|
@@ -1929,6 +2676,8 @@ class LogItem(TeaModel):
|
|
|
1929
2676
|
self.contents.append(temp_model.from_map(k))
|
|
1930
2677
|
if m.get('Time') is not None:
|
|
1931
2678
|
self.time = m.get('Time')
|
|
2679
|
+
if m.get('TimeNs') is not None:
|
|
2680
|
+
self.time_ns = m.get('TimeNs')
|
|
1932
2681
|
return self
|
|
1933
2682
|
|
|
1934
2683
|
|
|
@@ -3318,6 +4067,7 @@ class OSSIngestionConfigurationSource(TeaModel):
|
|
|
3318
4067
|
interval: str = None,
|
|
3319
4068
|
pattern: str = None,
|
|
3320
4069
|
prefix: str = None,
|
|
4070
|
+
processor_id: str = None,
|
|
3321
4071
|
restore_object_enabled: bool = None,
|
|
3322
4072
|
role_arn: str = None,
|
|
3323
4073
|
start_time: int = None,
|
|
@@ -3343,6 +4093,7 @@ class OSSIngestionConfigurationSource(TeaModel):
|
|
|
3343
4093
|
self.interval = interval
|
|
3344
4094
|
self.pattern = pattern
|
|
3345
4095
|
self.prefix = prefix
|
|
4096
|
+
self.processor_id = processor_id
|
|
3346
4097
|
self.restore_object_enabled = restore_object_enabled
|
|
3347
4098
|
self.role_arn = role_arn
|
|
3348
4099
|
self.start_time = start_time
|
|
@@ -3381,6 +4132,8 @@ class OSSIngestionConfigurationSource(TeaModel):
|
|
|
3381
4132
|
result['pattern'] = self.pattern
|
|
3382
4133
|
if self.prefix is not None:
|
|
3383
4134
|
result['prefix'] = self.prefix
|
|
4135
|
+
if self.processor_id is not None:
|
|
4136
|
+
result['processorId'] = self.processor_id
|
|
3384
4137
|
if self.restore_object_enabled is not None:
|
|
3385
4138
|
result['restoreObjectEnabled'] = self.restore_object_enabled
|
|
3386
4139
|
if self.role_arn is not None:
|
|
@@ -3421,6 +4174,8 @@ class OSSIngestionConfigurationSource(TeaModel):
|
|
|
3421
4174
|
self.pattern = m.get('pattern')
|
|
3422
4175
|
if m.get('prefix') is not None:
|
|
3423
4176
|
self.prefix = m.get('prefix')
|
|
4177
|
+
if m.get('processorId') is not None:
|
|
4178
|
+
self.processor_id = m.get('processorId')
|
|
3424
4179
|
if m.get('restoreObjectEnabled') is not None:
|
|
3425
4180
|
self.restore_object_enabled = m.get('restoreObjectEnabled')
|
|
3426
4181
|
if m.get('roleARN') is not None:
|
|
@@ -3488,6 +4243,7 @@ class OSSIngestion(TeaModel):
|
|
|
3488
4243
|
display_name: str = None,
|
|
3489
4244
|
last_modified_time: int = None,
|
|
3490
4245
|
name: str = None,
|
|
4246
|
+
processor_id: str = None,
|
|
3491
4247
|
schedule: Schedule = None,
|
|
3492
4248
|
schedule_id: str = None,
|
|
3493
4249
|
status: str = None,
|
|
@@ -3501,6 +4257,7 @@ class OSSIngestion(TeaModel):
|
|
|
3501
4257
|
self.last_modified_time = last_modified_time
|
|
3502
4258
|
# This parameter is required.
|
|
3503
4259
|
self.name = name
|
|
4260
|
+
self.processor_id = processor_id
|
|
3504
4261
|
# This parameter is required.
|
|
3505
4262
|
self.schedule = schedule
|
|
3506
4263
|
self.schedule_id = schedule_id
|
|
@@ -3530,6 +4287,8 @@ class OSSIngestion(TeaModel):
|
|
|
3530
4287
|
result['lastModifiedTime'] = self.last_modified_time
|
|
3531
4288
|
if self.name is not None:
|
|
3532
4289
|
result['name'] = self.name
|
|
4290
|
+
if self.processor_id is not None:
|
|
4291
|
+
result['processorId'] = self.processor_id
|
|
3533
4292
|
if self.schedule is not None:
|
|
3534
4293
|
result['schedule'] = self.schedule.to_map()
|
|
3535
4294
|
if self.schedule_id is not None:
|
|
@@ -3553,6 +4312,8 @@ class OSSIngestion(TeaModel):
|
|
|
3553
4312
|
self.last_modified_time = m.get('lastModifiedTime')
|
|
3554
4313
|
if m.get('name') is not None:
|
|
3555
4314
|
self.name = m.get('name')
|
|
4315
|
+
if m.get('processorId') is not None:
|
|
4316
|
+
self.processor_id = m.get('processorId')
|
|
3556
4317
|
if m.get('schedule') is not None:
|
|
3557
4318
|
temp_model = Schedule()
|
|
3558
4319
|
self.schedule = temp_model.from_map(m['schedule'])
|
|
@@ -3666,10 +4427,12 @@ class S3IngestionConfigurationSource(TeaModel):
|
|
|
3666
4427
|
compression_codec: str = None,
|
|
3667
4428
|
encoding: str = None,
|
|
3668
4429
|
end_time: int = None,
|
|
4430
|
+
endpoint: str = None,
|
|
3669
4431
|
format: Dict[str, Any] = None,
|
|
3670
4432
|
interval: str = None,
|
|
3671
4433
|
pattern: str = None,
|
|
3672
4434
|
prefix: str = None,
|
|
4435
|
+
processor_id: str = None,
|
|
3673
4436
|
start_time: int = None,
|
|
3674
4437
|
tag_pack_id: bool = None,
|
|
3675
4438
|
time_field: str = None,
|
|
@@ -3693,12 +4456,14 @@ class S3IngestionConfigurationSource(TeaModel):
|
|
|
3693
4456
|
# This parameter is required.
|
|
3694
4457
|
self.encoding = encoding
|
|
3695
4458
|
self.end_time = end_time
|
|
4459
|
+
self.endpoint = endpoint
|
|
3696
4460
|
# This parameter is required.
|
|
3697
4461
|
self.format = format
|
|
3698
4462
|
# This parameter is required.
|
|
3699
4463
|
self.interval = interval
|
|
3700
4464
|
self.pattern = pattern
|
|
3701
4465
|
self.prefix = prefix
|
|
4466
|
+
self.processor_id = processor_id
|
|
3702
4467
|
self.start_time = start_time
|
|
3703
4468
|
self.tag_pack_id = tag_pack_id
|
|
3704
4469
|
self.time_field = time_field
|
|
@@ -3734,6 +4499,8 @@ class S3IngestionConfigurationSource(TeaModel):
|
|
|
3734
4499
|
result['encoding'] = self.encoding
|
|
3735
4500
|
if self.end_time is not None:
|
|
3736
4501
|
result['endTime'] = self.end_time
|
|
4502
|
+
if self.endpoint is not None:
|
|
4503
|
+
result['endpoint'] = self.endpoint
|
|
3737
4504
|
if self.format is not None:
|
|
3738
4505
|
result['format'] = self.format
|
|
3739
4506
|
if self.interval is not None:
|
|
@@ -3742,6 +4509,8 @@ class S3IngestionConfigurationSource(TeaModel):
|
|
|
3742
4509
|
result['pattern'] = self.pattern
|
|
3743
4510
|
if self.prefix is not None:
|
|
3744
4511
|
result['prefix'] = self.prefix
|
|
4512
|
+
if self.processor_id is not None:
|
|
4513
|
+
result['processorId'] = self.processor_id
|
|
3745
4514
|
if self.start_time is not None:
|
|
3746
4515
|
result['startTime'] = self.start_time
|
|
3747
4516
|
if self.tag_pack_id is not None:
|
|
@@ -3778,6 +4547,8 @@ class S3IngestionConfigurationSource(TeaModel):
|
|
|
3778
4547
|
self.encoding = m.get('encoding')
|
|
3779
4548
|
if m.get('endTime') is not None:
|
|
3780
4549
|
self.end_time = m.get('endTime')
|
|
4550
|
+
if m.get('endpoint') is not None:
|
|
4551
|
+
self.endpoint = m.get('endpoint')
|
|
3781
4552
|
if m.get('format') is not None:
|
|
3782
4553
|
self.format = m.get('format')
|
|
3783
4554
|
if m.get('interval') is not None:
|
|
@@ -3786,6 +4557,8 @@ class S3IngestionConfigurationSource(TeaModel):
|
|
|
3786
4557
|
self.pattern = m.get('pattern')
|
|
3787
4558
|
if m.get('prefix') is not None:
|
|
3788
4559
|
self.prefix = m.get('prefix')
|
|
4560
|
+
if m.get('processorId') is not None:
|
|
4561
|
+
self.processor_id = m.get('processorId')
|
|
3789
4562
|
if m.get('startTime') is not None:
|
|
3790
4563
|
self.start_time = m.get('startTime')
|
|
3791
4564
|
if m.get('tagPackId') is not None:
|
|
@@ -3812,6 +4585,7 @@ class S3Ingestion(TeaModel):
|
|
|
3812
4585
|
display_name: str = None,
|
|
3813
4586
|
last_modified_time: int = None,
|
|
3814
4587
|
name: str = None,
|
|
4588
|
+
processor_id: str = None,
|
|
3815
4589
|
schedule: Schedule = None,
|
|
3816
4590
|
schedule_id: str = None,
|
|
3817
4591
|
status: str = None,
|
|
@@ -3825,6 +4599,7 @@ class S3Ingestion(TeaModel):
|
|
|
3825
4599
|
self.last_modified_time = last_modified_time
|
|
3826
4600
|
# This parameter is required.
|
|
3827
4601
|
self.name = name
|
|
4602
|
+
self.processor_id = processor_id
|
|
3828
4603
|
# This parameter is required.
|
|
3829
4604
|
self.schedule = schedule
|
|
3830
4605
|
self.schedule_id = schedule_id
|
|
@@ -3854,6 +4629,8 @@ class S3Ingestion(TeaModel):
|
|
|
3854
4629
|
result['lastModifiedTime'] = self.last_modified_time
|
|
3855
4630
|
if self.name is not None:
|
|
3856
4631
|
result['name'] = self.name
|
|
4632
|
+
if self.processor_id is not None:
|
|
4633
|
+
result['processorId'] = self.processor_id
|
|
3857
4634
|
if self.schedule is not None:
|
|
3858
4635
|
result['schedule'] = self.schedule.to_map()
|
|
3859
4636
|
if self.schedule_id is not None:
|
|
@@ -3877,6 +4654,8 @@ class S3Ingestion(TeaModel):
|
|
|
3877
4654
|
self.last_modified_time = m.get('lastModifiedTime')
|
|
3878
4655
|
if m.get('name') is not None:
|
|
3879
4656
|
self.name = m.get('name')
|
|
4657
|
+
if m.get('processorId') is not None:
|
|
4658
|
+
self.processor_id = m.get('processorId')
|
|
3880
4659
|
if m.get('schedule') is not None:
|
|
3881
4660
|
temp_model = Schedule()
|
|
3882
4661
|
self.schedule = temp_model.from_map(m['schedule'])
|
|
@@ -4201,6 +4980,121 @@ class ScheduledSQL(TeaModel):
|
|
|
4201
4980
|
return self
|
|
4202
4981
|
|
|
4203
4982
|
|
|
4983
|
+
class ShardingPolicyShardGroup(TeaModel):
|
|
4984
|
+
def __init__(
|
|
4985
|
+
self,
|
|
4986
|
+
group_count: int = None,
|
|
4987
|
+
keys: List[str] = None,
|
|
4988
|
+
):
|
|
4989
|
+
# This parameter is required.
|
|
4990
|
+
self.group_count = group_count
|
|
4991
|
+
# This parameter is required.
|
|
4992
|
+
self.keys = keys
|
|
4993
|
+
|
|
4994
|
+
def validate(self):
|
|
4995
|
+
pass
|
|
4996
|
+
|
|
4997
|
+
def to_map(self):
|
|
4998
|
+
_map = super().to_map()
|
|
4999
|
+
if _map is not None:
|
|
5000
|
+
return _map
|
|
5001
|
+
|
|
5002
|
+
result = dict()
|
|
5003
|
+
if self.group_count is not None:
|
|
5004
|
+
result['groupCount'] = self.group_count
|
|
5005
|
+
if self.keys is not None:
|
|
5006
|
+
result['keys'] = self.keys
|
|
5007
|
+
return result
|
|
5008
|
+
|
|
5009
|
+
def from_map(self, m: dict = None):
|
|
5010
|
+
m = m or dict()
|
|
5011
|
+
if m.get('groupCount') is not None:
|
|
5012
|
+
self.group_count = m.get('groupCount')
|
|
5013
|
+
if m.get('keys') is not None:
|
|
5014
|
+
self.keys = m.get('keys')
|
|
5015
|
+
return self
|
|
5016
|
+
|
|
5017
|
+
|
|
5018
|
+
class ShardingPolicyShardHash(TeaModel):
|
|
5019
|
+
def __init__(
|
|
5020
|
+
self,
|
|
5021
|
+
keys: List[str] = None,
|
|
5022
|
+
max_hash_count: int = None,
|
|
5023
|
+
):
|
|
5024
|
+
# This parameter is required.
|
|
5025
|
+
self.keys = keys
|
|
5026
|
+
# This parameter is required.
|
|
5027
|
+
self.max_hash_count = max_hash_count
|
|
5028
|
+
|
|
5029
|
+
def validate(self):
|
|
5030
|
+
pass
|
|
5031
|
+
|
|
5032
|
+
def to_map(self):
|
|
5033
|
+
_map = super().to_map()
|
|
5034
|
+
if _map is not None:
|
|
5035
|
+
return _map
|
|
5036
|
+
|
|
5037
|
+
result = dict()
|
|
5038
|
+
if self.keys is not None:
|
|
5039
|
+
result['keys'] = self.keys
|
|
5040
|
+
if self.max_hash_count is not None:
|
|
5041
|
+
result['maxHashCount'] = self.max_hash_count
|
|
5042
|
+
return result
|
|
5043
|
+
|
|
5044
|
+
def from_map(self, m: dict = None):
|
|
5045
|
+
m = m or dict()
|
|
5046
|
+
if m.get('keys') is not None:
|
|
5047
|
+
self.keys = m.get('keys')
|
|
5048
|
+
if m.get('maxHashCount') is not None:
|
|
5049
|
+
self.max_hash_count = m.get('maxHashCount')
|
|
5050
|
+
return self
|
|
5051
|
+
|
|
5052
|
+
|
|
5053
|
+
class ShardingPolicy(TeaModel):
|
|
5054
|
+
def __init__(
|
|
5055
|
+
self,
|
|
5056
|
+
query_active_time: int = None,
|
|
5057
|
+
shard_group: ShardingPolicyShardGroup = None,
|
|
5058
|
+
shard_hash: ShardingPolicyShardHash = None,
|
|
5059
|
+
):
|
|
5060
|
+
self.query_active_time = query_active_time
|
|
5061
|
+
self.shard_group = shard_group
|
|
5062
|
+
# This parameter is required.
|
|
5063
|
+
self.shard_hash = shard_hash
|
|
5064
|
+
|
|
5065
|
+
def validate(self):
|
|
5066
|
+
if self.shard_group:
|
|
5067
|
+
self.shard_group.validate()
|
|
5068
|
+
if self.shard_hash:
|
|
5069
|
+
self.shard_hash.validate()
|
|
5070
|
+
|
|
5071
|
+
def to_map(self):
|
|
5072
|
+
_map = super().to_map()
|
|
5073
|
+
if _map is not None:
|
|
5074
|
+
return _map
|
|
5075
|
+
|
|
5076
|
+
result = dict()
|
|
5077
|
+
if self.query_active_time is not None:
|
|
5078
|
+
result['queryActiveTime'] = self.query_active_time
|
|
5079
|
+
if self.shard_group is not None:
|
|
5080
|
+
result['shardGroup'] = self.shard_group.to_map()
|
|
5081
|
+
if self.shard_hash is not None:
|
|
5082
|
+
result['shardHash'] = self.shard_hash.to_map()
|
|
5083
|
+
return result
|
|
5084
|
+
|
|
5085
|
+
def from_map(self, m: dict = None):
|
|
5086
|
+
m = m or dict()
|
|
5087
|
+
if m.get('queryActiveTime') is not None:
|
|
5088
|
+
self.query_active_time = m.get('queryActiveTime')
|
|
5089
|
+
if m.get('shardGroup') is not None:
|
|
5090
|
+
temp_model = ShardingPolicyShardGroup()
|
|
5091
|
+
self.shard_group = temp_model.from_map(m['shardGroup'])
|
|
5092
|
+
if m.get('shardHash') is not None:
|
|
5093
|
+
temp_model = ShardingPolicyShardHash()
|
|
5094
|
+
self.shard_hash = temp_model.from_map(m['shardHash'])
|
|
5095
|
+
return self
|
|
5096
|
+
|
|
5097
|
+
|
|
4204
5098
|
class StoreViewStore(TeaModel):
|
|
4205
5099
|
def __init__(
|
|
4206
5100
|
self,
|
|
@@ -4556,6 +5450,7 @@ class Index(TeaModel):
|
|
|
4556
5450
|
log_reduce_black_list: List[str] = None,
|
|
4557
5451
|
log_reduce_white_list: List[str] = None,
|
|
4558
5452
|
max_text_len: int = None,
|
|
5453
|
+
scan_index: bool = None,
|
|
4559
5454
|
):
|
|
4560
5455
|
self.keys = keys
|
|
4561
5456
|
self.line = line
|
|
@@ -4563,6 +5458,7 @@ class Index(TeaModel):
|
|
|
4563
5458
|
self.log_reduce_black_list = log_reduce_black_list
|
|
4564
5459
|
self.log_reduce_white_list = log_reduce_white_list
|
|
4565
5460
|
self.max_text_len = max_text_len
|
|
5461
|
+
self.scan_index = scan_index
|
|
4566
5462
|
|
|
4567
5463
|
def validate(self):
|
|
4568
5464
|
if self.keys:
|
|
@@ -4592,6 +5488,8 @@ class Index(TeaModel):
|
|
|
4592
5488
|
result['log_reduce_white_list'] = self.log_reduce_white_list
|
|
4593
5489
|
if self.max_text_len is not None:
|
|
4594
5490
|
result['max_text_len'] = self.max_text_len
|
|
5491
|
+
if self.scan_index is not None:
|
|
5492
|
+
result['scan_index'] = self.scan_index
|
|
4595
5493
|
return result
|
|
4596
5494
|
|
|
4597
5495
|
def from_map(self, m: dict = None):
|
|
@@ -4612,6 +5510,8 @@ class Index(TeaModel):
|
|
|
4612
5510
|
self.log_reduce_white_list = m.get('log_reduce_white_list')
|
|
4613
5511
|
if m.get('max_text_len') is not None:
|
|
4614
5512
|
self.max_text_len = m.get('max_text_len')
|
|
5513
|
+
if m.get('scan_index') is not None:
|
|
5514
|
+
self.scan_index = m.get('scan_index')
|
|
4615
5515
|
return self
|
|
4616
5516
|
|
|
4617
5517
|
|
|
@@ -4710,6 +5610,7 @@ class Logstore(TeaModel):
|
|
|
4710
5610
|
processor_id: str = None,
|
|
4711
5611
|
product_type: str = None,
|
|
4712
5612
|
shard_count: int = None,
|
|
5613
|
+
sharding_policy: ShardingPolicy = None,
|
|
4713
5614
|
telemetry_type: str = None,
|
|
4714
5615
|
ttl: int = None,
|
|
4715
5616
|
):
|
|
@@ -4729,6 +5630,7 @@ class Logstore(TeaModel):
|
|
|
4729
5630
|
self.product_type = product_type
|
|
4730
5631
|
# This parameter is required.
|
|
4731
5632
|
self.shard_count = shard_count
|
|
5633
|
+
self.sharding_policy = sharding_policy
|
|
4732
5634
|
self.telemetry_type = telemetry_type
|
|
4733
5635
|
# This parameter is required.
|
|
4734
5636
|
self.ttl = ttl
|
|
@@ -4736,6 +5638,8 @@ class Logstore(TeaModel):
|
|
|
4736
5638
|
def validate(self):
|
|
4737
5639
|
if self.encrypt_conf:
|
|
4738
5640
|
self.encrypt_conf.validate()
|
|
5641
|
+
if self.sharding_policy:
|
|
5642
|
+
self.sharding_policy.validate()
|
|
4739
5643
|
|
|
4740
5644
|
def to_map(self):
|
|
4741
5645
|
_map = super().to_map()
|
|
@@ -4771,6 +5675,8 @@ class Logstore(TeaModel):
|
|
|
4771
5675
|
result['productType'] = self.product_type
|
|
4772
5676
|
if self.shard_count is not None:
|
|
4773
5677
|
result['shardCount'] = self.shard_count
|
|
5678
|
+
if self.sharding_policy is not None:
|
|
5679
|
+
result['shardingPolicy'] = self.sharding_policy.to_map()
|
|
4774
5680
|
if self.telemetry_type is not None:
|
|
4775
5681
|
result['telemetryType'] = self.telemetry_type
|
|
4776
5682
|
if self.ttl is not None:
|
|
@@ -4808,6 +5714,9 @@ class Logstore(TeaModel):
|
|
|
4808
5714
|
self.product_type = m.get('productType')
|
|
4809
5715
|
if m.get('shardCount') is not None:
|
|
4810
5716
|
self.shard_count = m.get('shardCount')
|
|
5717
|
+
if m.get('shardingPolicy') is not None:
|
|
5718
|
+
temp_model = ShardingPolicy()
|
|
5719
|
+
self.sharding_policy = temp_model.from_map(m['shardingPolicy'])
|
|
4811
5720
|
if m.get('telemetryType') is not None:
|
|
4812
5721
|
self.telemetry_type = m.get('telemetryType')
|
|
4813
5722
|
if m.get('ttl') is not None:
|
|
@@ -4962,7 +5871,6 @@ class Project(TeaModel):
|
|
|
4962
5871
|
data_redundancy_type: str = None,
|
|
4963
5872
|
description: str = None,
|
|
4964
5873
|
last_modify_time: str = None,
|
|
4965
|
-
location: str = None,
|
|
4966
5874
|
owner: str = None,
|
|
4967
5875
|
project_name: str = None,
|
|
4968
5876
|
quota: Dict[str, Any] = None,
|
|
@@ -4976,7 +5884,6 @@ class Project(TeaModel):
|
|
|
4976
5884
|
# This parameter is required.
|
|
4977
5885
|
self.description = description
|
|
4978
5886
|
self.last_modify_time = last_modify_time
|
|
4979
|
-
self.location = location
|
|
4980
5887
|
self.owner = owner
|
|
4981
5888
|
# This parameter is required.
|
|
4982
5889
|
self.project_name = project_name
|
|
@@ -5003,8 +5910,6 @@ class Project(TeaModel):
|
|
|
5003
5910
|
result['description'] = self.description
|
|
5004
5911
|
if self.last_modify_time is not None:
|
|
5005
5912
|
result['lastModifyTime'] = self.last_modify_time
|
|
5006
|
-
if self.location is not None:
|
|
5007
|
-
result['location'] = self.location
|
|
5008
5913
|
if self.owner is not None:
|
|
5009
5914
|
result['owner'] = self.owner
|
|
5010
5915
|
if self.project_name is not None:
|
|
@@ -5031,8 +5936,6 @@ class Project(TeaModel):
|
|
|
5031
5936
|
self.description = m.get('description')
|
|
5032
5937
|
if m.get('lastModifyTime') is not None:
|
|
5033
5938
|
self.last_modify_time = m.get('lastModifyTime')
|
|
5034
|
-
if m.get('location') is not None:
|
|
5035
|
-
self.location = m.get('location')
|
|
5036
5939
|
if m.get('owner') is not None:
|
|
5037
5940
|
self.owner = m.get('owner')
|
|
5038
5941
|
if m.get('projectName') is not None:
|
|
@@ -5817,6 +6720,98 @@ class CreateAnnotationLabelResponse(TeaModel):
|
|
|
5817
6720
|
return self
|
|
5818
6721
|
|
|
5819
6722
|
|
|
6723
|
+
class CreateAzureBlobIngestionRequest(TeaModel):
|
|
6724
|
+
def __init__(
|
|
6725
|
+
self,
|
|
6726
|
+
configuration: AzureBlobIngestionConfiguration = None,
|
|
6727
|
+
description: str = None,
|
|
6728
|
+
display_name: str = None,
|
|
6729
|
+
name: str = None,
|
|
6730
|
+
schedule: Schedule = None,
|
|
6731
|
+
):
|
|
6732
|
+
# This parameter is required.
|
|
6733
|
+
self.configuration = configuration
|
|
6734
|
+
self.description = description
|
|
6735
|
+
# This parameter is required.
|
|
6736
|
+
self.display_name = display_name
|
|
6737
|
+
# This parameter is required.
|
|
6738
|
+
self.name = name
|
|
6739
|
+
self.schedule = schedule
|
|
6740
|
+
|
|
6741
|
+
def validate(self):
|
|
6742
|
+
if self.configuration:
|
|
6743
|
+
self.configuration.validate()
|
|
6744
|
+
if self.schedule:
|
|
6745
|
+
self.schedule.validate()
|
|
6746
|
+
|
|
6747
|
+
def to_map(self):
|
|
6748
|
+
_map = super().to_map()
|
|
6749
|
+
if _map is not None:
|
|
6750
|
+
return _map
|
|
6751
|
+
|
|
6752
|
+
result = dict()
|
|
6753
|
+
if self.configuration is not None:
|
|
6754
|
+
result['configuration'] = self.configuration.to_map()
|
|
6755
|
+
if self.description is not None:
|
|
6756
|
+
result['description'] = self.description
|
|
6757
|
+
if self.display_name is not None:
|
|
6758
|
+
result['displayName'] = self.display_name
|
|
6759
|
+
if self.name is not None:
|
|
6760
|
+
result['name'] = self.name
|
|
6761
|
+
if self.schedule is not None:
|
|
6762
|
+
result['schedule'] = self.schedule.to_map()
|
|
6763
|
+
return result
|
|
6764
|
+
|
|
6765
|
+
def from_map(self, m: dict = None):
|
|
6766
|
+
m = m or dict()
|
|
6767
|
+
if m.get('configuration') is not None:
|
|
6768
|
+
temp_model = AzureBlobIngestionConfiguration()
|
|
6769
|
+
self.configuration = temp_model.from_map(m['configuration'])
|
|
6770
|
+
if m.get('description') is not None:
|
|
6771
|
+
self.description = m.get('description')
|
|
6772
|
+
if m.get('displayName') is not None:
|
|
6773
|
+
self.display_name = m.get('displayName')
|
|
6774
|
+
if m.get('name') is not None:
|
|
6775
|
+
self.name = m.get('name')
|
|
6776
|
+
if m.get('schedule') is not None:
|
|
6777
|
+
temp_model = Schedule()
|
|
6778
|
+
self.schedule = temp_model.from_map(m['schedule'])
|
|
6779
|
+
return self
|
|
6780
|
+
|
|
6781
|
+
|
|
6782
|
+
class CreateAzureBlobIngestionResponse(TeaModel):
|
|
6783
|
+
def __init__(
|
|
6784
|
+
self,
|
|
6785
|
+
headers: Dict[str, str] = None,
|
|
6786
|
+
status_code: int = None,
|
|
6787
|
+
):
|
|
6788
|
+
self.headers = headers
|
|
6789
|
+
self.status_code = status_code
|
|
6790
|
+
|
|
6791
|
+
def validate(self):
|
|
6792
|
+
pass
|
|
6793
|
+
|
|
6794
|
+
def to_map(self):
|
|
6795
|
+
_map = super().to_map()
|
|
6796
|
+
if _map is not None:
|
|
6797
|
+
return _map
|
|
6798
|
+
|
|
6799
|
+
result = dict()
|
|
6800
|
+
if self.headers is not None:
|
|
6801
|
+
result['headers'] = self.headers
|
|
6802
|
+
if self.status_code is not None:
|
|
6803
|
+
result['statusCode'] = self.status_code
|
|
6804
|
+
return result
|
|
6805
|
+
|
|
6806
|
+
def from_map(self, m: dict = None):
|
|
6807
|
+
m = m or dict()
|
|
6808
|
+
if m.get('headers') is not None:
|
|
6809
|
+
self.headers = m.get('headers')
|
|
6810
|
+
if m.get('statusCode') is not None:
|
|
6811
|
+
self.status_code = m.get('statusCode')
|
|
6812
|
+
return self
|
|
6813
|
+
|
|
6814
|
+
|
|
5820
6815
|
class CreateConfigRequest(TeaModel):
|
|
5821
6816
|
def __init__(
|
|
5822
6817
|
self,
|
|
@@ -6227,43 +7222,133 @@ class CreateDownloadJobRequestConfiguration(TeaModel):
|
|
|
6227
7222
|
|
|
6228
7223
|
def from_map(self, m: dict = None):
|
|
6229
7224
|
m = m or dict()
|
|
6230
|
-
if m.get('allowInComplete') is not None:
|
|
6231
|
-
self.allow_in_complete = m.get('allowInComplete')
|
|
6232
|
-
if m.get('fromTime') is not None:
|
|
6233
|
-
self.from_time = m.get('fromTime')
|
|
6234
|
-
if m.get('logstore') is not None:
|
|
6235
|
-
self.logstore = m.get('logstore')
|
|
6236
|
-
if m.get('powerSql') is not None:
|
|
6237
|
-
self.power_sql = m.get('powerSql')
|
|
6238
|
-
if m.get('query') is not None:
|
|
6239
|
-
self.query = m.get('query')
|
|
6240
|
-
if m.get('sink') is not None:
|
|
6241
|
-
temp_model = CreateDownloadJobRequestConfigurationSink()
|
|
6242
|
-
self.sink = temp_model.from_map(m['sink'])
|
|
6243
|
-
if m.get('toTime') is not None:
|
|
6244
|
-
self.to_time = m.get('toTime')
|
|
7225
|
+
if m.get('allowInComplete') is not None:
|
|
7226
|
+
self.allow_in_complete = m.get('allowInComplete')
|
|
7227
|
+
if m.get('fromTime') is not None:
|
|
7228
|
+
self.from_time = m.get('fromTime')
|
|
7229
|
+
if m.get('logstore') is not None:
|
|
7230
|
+
self.logstore = m.get('logstore')
|
|
7231
|
+
if m.get('powerSql') is not None:
|
|
7232
|
+
self.power_sql = m.get('powerSql')
|
|
7233
|
+
if m.get('query') is not None:
|
|
7234
|
+
self.query = m.get('query')
|
|
7235
|
+
if m.get('sink') is not None:
|
|
7236
|
+
temp_model = CreateDownloadJobRequestConfigurationSink()
|
|
7237
|
+
self.sink = temp_model.from_map(m['sink'])
|
|
7238
|
+
if m.get('toTime') is not None:
|
|
7239
|
+
self.to_time = m.get('toTime')
|
|
7240
|
+
return self
|
|
7241
|
+
|
|
7242
|
+
|
|
7243
|
+
class CreateDownloadJobRequest(TeaModel):
|
|
7244
|
+
def __init__(
|
|
7245
|
+
self,
|
|
7246
|
+
configuration: CreateDownloadJobRequestConfiguration = None,
|
|
7247
|
+
description: str = None,
|
|
7248
|
+
display_name: str = None,
|
|
7249
|
+
name: str = None,
|
|
7250
|
+
):
|
|
7251
|
+
# 下载配置
|
|
7252
|
+
#
|
|
7253
|
+
# This parameter is required.
|
|
7254
|
+
self.configuration = configuration
|
|
7255
|
+
# 任务描述
|
|
7256
|
+
self.description = description
|
|
7257
|
+
# 任务显示名称
|
|
7258
|
+
#
|
|
7259
|
+
# This parameter is required.
|
|
7260
|
+
self.display_name = display_name
|
|
7261
|
+
# 代表资源名称的资源属性字段
|
|
7262
|
+
#
|
|
7263
|
+
# This parameter is required.
|
|
7264
|
+
self.name = name
|
|
7265
|
+
|
|
7266
|
+
def validate(self):
|
|
7267
|
+
if self.configuration:
|
|
7268
|
+
self.configuration.validate()
|
|
7269
|
+
|
|
7270
|
+
def to_map(self):
|
|
7271
|
+
_map = super().to_map()
|
|
7272
|
+
if _map is not None:
|
|
7273
|
+
return _map
|
|
7274
|
+
|
|
7275
|
+
result = dict()
|
|
7276
|
+
if self.configuration is not None:
|
|
7277
|
+
result['configuration'] = self.configuration.to_map()
|
|
7278
|
+
if self.description is not None:
|
|
7279
|
+
result['description'] = self.description
|
|
7280
|
+
if self.display_name is not None:
|
|
7281
|
+
result['displayName'] = self.display_name
|
|
7282
|
+
if self.name is not None:
|
|
7283
|
+
result['name'] = self.name
|
|
7284
|
+
return result
|
|
7285
|
+
|
|
7286
|
+
def from_map(self, m: dict = None):
|
|
7287
|
+
m = m or dict()
|
|
7288
|
+
if m.get('configuration') is not None:
|
|
7289
|
+
temp_model = CreateDownloadJobRequestConfiguration()
|
|
7290
|
+
self.configuration = temp_model.from_map(m['configuration'])
|
|
7291
|
+
if m.get('description') is not None:
|
|
7292
|
+
self.description = m.get('description')
|
|
7293
|
+
if m.get('displayName') is not None:
|
|
7294
|
+
self.display_name = m.get('displayName')
|
|
7295
|
+
if m.get('name') is not None:
|
|
7296
|
+
self.name = m.get('name')
|
|
7297
|
+
return self
|
|
7298
|
+
|
|
7299
|
+
|
|
7300
|
+
class CreateDownloadJobResponse(TeaModel):
|
|
7301
|
+
def __init__(
|
|
7302
|
+
self,
|
|
7303
|
+
headers: Dict[str, str] = None,
|
|
7304
|
+
status_code: int = None,
|
|
7305
|
+
):
|
|
7306
|
+
self.headers = headers
|
|
7307
|
+
self.status_code = status_code
|
|
7308
|
+
|
|
7309
|
+
def validate(self):
|
|
7310
|
+
pass
|
|
7311
|
+
|
|
7312
|
+
def to_map(self):
|
|
7313
|
+
_map = super().to_map()
|
|
7314
|
+
if _map is not None:
|
|
7315
|
+
return _map
|
|
7316
|
+
|
|
7317
|
+
result = dict()
|
|
7318
|
+
if self.headers is not None:
|
|
7319
|
+
result['headers'] = self.headers
|
|
7320
|
+
if self.status_code is not None:
|
|
7321
|
+
result['statusCode'] = self.status_code
|
|
7322
|
+
return result
|
|
7323
|
+
|
|
7324
|
+
def from_map(self, m: dict = None):
|
|
7325
|
+
m = m or dict()
|
|
7326
|
+
if m.get('headers') is not None:
|
|
7327
|
+
self.headers = m.get('headers')
|
|
7328
|
+
if m.get('statusCode') is not None:
|
|
7329
|
+
self.status_code = m.get('statusCode')
|
|
6245
7330
|
return self
|
|
6246
7331
|
|
|
6247
7332
|
|
|
6248
|
-
class
|
|
7333
|
+
class CreateETLRequest(TeaModel):
|
|
6249
7334
|
def __init__(
|
|
6250
7335
|
self,
|
|
6251
|
-
configuration:
|
|
7336
|
+
configuration: ETLConfiguration = None,
|
|
6252
7337
|
description: str = None,
|
|
6253
7338
|
display_name: str = None,
|
|
6254
7339
|
name: str = None,
|
|
6255
7340
|
):
|
|
6256
|
-
#
|
|
7341
|
+
# The detailed configuration of the job.
|
|
6257
7342
|
#
|
|
6258
7343
|
# This parameter is required.
|
|
6259
7344
|
self.configuration = configuration
|
|
6260
|
-
#
|
|
7345
|
+
# The description of the job.
|
|
6261
7346
|
self.description = description
|
|
6262
|
-
#
|
|
7347
|
+
# The display name of the job.
|
|
6263
7348
|
#
|
|
6264
7349
|
# This parameter is required.
|
|
6265
7350
|
self.display_name = display_name
|
|
6266
|
-
#
|
|
7351
|
+
# The name of the job (unique within a project).
|
|
6267
7352
|
#
|
|
6268
7353
|
# This parameter is required.
|
|
6269
7354
|
self.name = name
|
|
@@ -6291,7 +7376,7 @@ class CreateDownloadJobRequest(TeaModel):
|
|
|
6291
7376
|
def from_map(self, m: dict = None):
|
|
6292
7377
|
m = m or dict()
|
|
6293
7378
|
if m.get('configuration') is not None:
|
|
6294
|
-
temp_model =
|
|
7379
|
+
temp_model = ETLConfiguration()
|
|
6295
7380
|
self.configuration = temp_model.from_map(m['configuration'])
|
|
6296
7381
|
if m.get('description') is not None:
|
|
6297
7382
|
self.description = m.get('description')
|
|
@@ -6302,7 +7387,7 @@ class CreateDownloadJobRequest(TeaModel):
|
|
|
6302
7387
|
return self
|
|
6303
7388
|
|
|
6304
7389
|
|
|
6305
|
-
class
|
|
7390
|
+
class CreateETLResponse(TeaModel):
|
|
6306
7391
|
def __init__(
|
|
6307
7392
|
self,
|
|
6308
7393
|
headers: Dict[str, str] = None,
|
|
@@ -6335,32 +7420,29 @@ class CreateDownloadJobResponse(TeaModel):
|
|
|
6335
7420
|
return self
|
|
6336
7421
|
|
|
6337
7422
|
|
|
6338
|
-
class
|
|
7423
|
+
class CreateElasticsearchIngestionRequest(TeaModel):
|
|
6339
7424
|
def __init__(
|
|
6340
7425
|
self,
|
|
6341
|
-
configuration:
|
|
7426
|
+
configuration: ESIngestionConfiguration = None,
|
|
6342
7427
|
description: str = None,
|
|
6343
7428
|
display_name: str = None,
|
|
6344
7429
|
name: str = None,
|
|
7430
|
+
schedule: Schedule = None,
|
|
6345
7431
|
):
|
|
6346
|
-
# The detailed configuration of the job.
|
|
6347
|
-
#
|
|
6348
7432
|
# This parameter is required.
|
|
6349
7433
|
self.configuration = configuration
|
|
6350
|
-
# The description of the job.
|
|
6351
7434
|
self.description = description
|
|
6352
|
-
# The display name of the job.
|
|
6353
|
-
#
|
|
6354
7435
|
# This parameter is required.
|
|
6355
7436
|
self.display_name = display_name
|
|
6356
|
-
# The name of the job (unique within a project).
|
|
6357
|
-
#
|
|
6358
7437
|
# This parameter is required.
|
|
6359
7438
|
self.name = name
|
|
7439
|
+
self.schedule = schedule
|
|
6360
7440
|
|
|
6361
7441
|
def validate(self):
|
|
6362
7442
|
if self.configuration:
|
|
6363
7443
|
self.configuration.validate()
|
|
7444
|
+
if self.schedule:
|
|
7445
|
+
self.schedule.validate()
|
|
6364
7446
|
|
|
6365
7447
|
def to_map(self):
|
|
6366
7448
|
_map = super().to_map()
|
|
@@ -6376,12 +7458,14 @@ class CreateETLRequest(TeaModel):
|
|
|
6376
7458
|
result['displayName'] = self.display_name
|
|
6377
7459
|
if self.name is not None:
|
|
6378
7460
|
result['name'] = self.name
|
|
7461
|
+
if self.schedule is not None:
|
|
7462
|
+
result['schedule'] = self.schedule.to_map()
|
|
6379
7463
|
return result
|
|
6380
7464
|
|
|
6381
7465
|
def from_map(self, m: dict = None):
|
|
6382
7466
|
m = m or dict()
|
|
6383
7467
|
if m.get('configuration') is not None:
|
|
6384
|
-
temp_model =
|
|
7468
|
+
temp_model = ESIngestionConfiguration()
|
|
6385
7469
|
self.configuration = temp_model.from_map(m['configuration'])
|
|
6386
7470
|
if m.get('description') is not None:
|
|
6387
7471
|
self.description = m.get('description')
|
|
@@ -6389,10 +7473,13 @@ class CreateETLRequest(TeaModel):
|
|
|
6389
7473
|
self.display_name = m.get('displayName')
|
|
6390
7474
|
if m.get('name') is not None:
|
|
6391
7475
|
self.name = m.get('name')
|
|
7476
|
+
if m.get('schedule') is not None:
|
|
7477
|
+
temp_model = Schedule()
|
|
7478
|
+
self.schedule = temp_model.from_map(m['schedule'])
|
|
6392
7479
|
return self
|
|
6393
7480
|
|
|
6394
7481
|
|
|
6395
|
-
class
|
|
7482
|
+
class CreateElasticsearchIngestionResponse(TeaModel):
|
|
6396
7483
|
def __init__(
|
|
6397
7484
|
self,
|
|
6398
7485
|
headers: Dict[str, str] = None,
|
|
@@ -6502,6 +7589,7 @@ class CreateLogStoreRequest(TeaModel):
|
|
|
6502
7589
|
mode: str = None,
|
|
6503
7590
|
processor_id: str = None,
|
|
6504
7591
|
shard_count: int = None,
|
|
7592
|
+
sharding_policy: ShardingPolicy = None,
|
|
6505
7593
|
telemetry_type: str = None,
|
|
6506
7594
|
ttl: int = None,
|
|
6507
7595
|
):
|
|
@@ -6554,6 +7642,7 @@ class CreateLogStoreRequest(TeaModel):
|
|
|
6554
7642
|
#
|
|
6555
7643
|
# This parameter is required.
|
|
6556
7644
|
self.shard_count = shard_count
|
|
7645
|
+
self.sharding_policy = sharding_policy
|
|
6557
7646
|
# The type of the observable data. Valid values:
|
|
6558
7647
|
#
|
|
6559
7648
|
# * **None** (default): log data
|
|
@@ -6567,6 +7656,8 @@ class CreateLogStoreRequest(TeaModel):
|
|
|
6567
7656
|
def validate(self):
|
|
6568
7657
|
if self.encrypt_conf:
|
|
6569
7658
|
self.encrypt_conf.validate()
|
|
7659
|
+
if self.sharding_policy:
|
|
7660
|
+
self.sharding_policy.validate()
|
|
6570
7661
|
|
|
6571
7662
|
def to_map(self):
|
|
6572
7663
|
_map = super().to_map()
|
|
@@ -6596,6 +7687,8 @@ class CreateLogStoreRequest(TeaModel):
|
|
|
6596
7687
|
result['processorId'] = self.processor_id
|
|
6597
7688
|
if self.shard_count is not None:
|
|
6598
7689
|
result['shardCount'] = self.shard_count
|
|
7690
|
+
if self.sharding_policy is not None:
|
|
7691
|
+
result['shardingPolicy'] = self.sharding_policy.to_map()
|
|
6599
7692
|
if self.telemetry_type is not None:
|
|
6600
7693
|
result['telemetryType'] = self.telemetry_type
|
|
6601
7694
|
if self.ttl is not None:
|
|
@@ -6627,6 +7720,9 @@ class CreateLogStoreRequest(TeaModel):
|
|
|
6627
7720
|
self.processor_id = m.get('processorId')
|
|
6628
7721
|
if m.get('shardCount') is not None:
|
|
6629
7722
|
self.shard_count = m.get('shardCount')
|
|
7723
|
+
if m.get('shardingPolicy') is not None:
|
|
7724
|
+
temp_model = ShardingPolicy()
|
|
7725
|
+
self.sharding_policy = temp_model.from_map(m['shardingPolicy'])
|
|
6630
7726
|
if m.get('telemetryType') is not None:
|
|
6631
7727
|
self.telemetry_type = m.get('telemetryType')
|
|
6632
7728
|
if m.get('ttl') is not None:
|
|
@@ -7173,6 +8269,7 @@ class CreateMaxComputeExportResponse(TeaModel):
|
|
|
7173
8269
|
class CreateMetricStoreRequest(TeaModel):
|
|
7174
8270
|
def __init__(
|
|
7175
8271
|
self,
|
|
8272
|
+
append_meta: bool = None,
|
|
7176
8273
|
auto_split: bool = None,
|
|
7177
8274
|
hot_ttl: int = None,
|
|
7178
8275
|
infrequent_access_ttl: int = None,
|
|
@@ -7181,8 +8278,10 @@ class CreateMetricStoreRequest(TeaModel):
|
|
|
7181
8278
|
mode: str = None,
|
|
7182
8279
|
name: str = None,
|
|
7183
8280
|
shard_count: int = None,
|
|
8281
|
+
sharding_policy: ShardingPolicy = None,
|
|
7184
8282
|
ttl: int = None,
|
|
7185
8283
|
):
|
|
8284
|
+
self.append_meta = append_meta
|
|
7186
8285
|
# Specifies whether to enable automatic sharding.
|
|
7187
8286
|
self.auto_split = auto_split
|
|
7188
8287
|
self.hot_ttl = hot_ttl
|
|
@@ -7201,13 +8300,15 @@ class CreateMetricStoreRequest(TeaModel):
|
|
|
7201
8300
|
#
|
|
7202
8301
|
# This parameter is required.
|
|
7203
8302
|
self.shard_count = shard_count
|
|
8303
|
+
self.sharding_policy = sharding_policy
|
|
7204
8304
|
# The retention period of the metric data in the Metricstore. Unit: days.
|
|
7205
8305
|
#
|
|
7206
8306
|
# This parameter is required.
|
|
7207
8307
|
self.ttl = ttl
|
|
7208
8308
|
|
|
7209
8309
|
def validate(self):
|
|
7210
|
-
|
|
8310
|
+
if self.sharding_policy:
|
|
8311
|
+
self.sharding_policy.validate()
|
|
7211
8312
|
|
|
7212
8313
|
def to_map(self):
|
|
7213
8314
|
_map = super().to_map()
|
|
@@ -7215,6 +8316,8 @@ class CreateMetricStoreRequest(TeaModel):
|
|
|
7215
8316
|
return _map
|
|
7216
8317
|
|
|
7217
8318
|
result = dict()
|
|
8319
|
+
if self.append_meta is not None:
|
|
8320
|
+
result['appendMeta'] = self.append_meta
|
|
7218
8321
|
if self.auto_split is not None:
|
|
7219
8322
|
result['autoSplit'] = self.auto_split
|
|
7220
8323
|
if self.hot_ttl is not None:
|
|
@@ -7231,12 +8334,16 @@ class CreateMetricStoreRequest(TeaModel):
|
|
|
7231
8334
|
result['name'] = self.name
|
|
7232
8335
|
if self.shard_count is not None:
|
|
7233
8336
|
result['shardCount'] = self.shard_count
|
|
8337
|
+
if self.sharding_policy is not None:
|
|
8338
|
+
result['shardingPolicy'] = self.sharding_policy.to_map()
|
|
7234
8339
|
if self.ttl is not None:
|
|
7235
8340
|
result['ttl'] = self.ttl
|
|
7236
8341
|
return result
|
|
7237
8342
|
|
|
7238
8343
|
def from_map(self, m: dict = None):
|
|
7239
8344
|
m = m or dict()
|
|
8345
|
+
if m.get('appendMeta') is not None:
|
|
8346
|
+
self.append_meta = m.get('appendMeta')
|
|
7240
8347
|
if m.get('autoSplit') is not None:
|
|
7241
8348
|
self.auto_split = m.get('autoSplit')
|
|
7242
8349
|
if m.get('hot_ttl') is not None:
|
|
@@ -7253,6 +8360,9 @@ class CreateMetricStoreRequest(TeaModel):
|
|
|
7253
8360
|
self.name = m.get('name')
|
|
7254
8361
|
if m.get('shardCount') is not None:
|
|
7255
8362
|
self.shard_count = m.get('shardCount')
|
|
8363
|
+
if m.get('shardingPolicy') is not None:
|
|
8364
|
+
temp_model = ShardingPolicy()
|
|
8365
|
+
self.sharding_policy = temp_model.from_map(m['shardingPolicy'])
|
|
7256
8366
|
if m.get('ttl') is not None:
|
|
7257
8367
|
self.ttl = m.get('ttl')
|
|
7258
8368
|
return self
|
|
@@ -7677,6 +8787,97 @@ class CreateProjectResponse(TeaModel):
|
|
|
7677
8787
|
return self
|
|
7678
8788
|
|
|
7679
8789
|
|
|
8790
|
+
class CreateS3IngestionRequest(TeaModel):
|
|
8791
|
+
def __init__(
|
|
8792
|
+
self,
|
|
8793
|
+
configuration: S3IngestionConfiguration = None,
|
|
8794
|
+
description: str = None,
|
|
8795
|
+
display_name: str = None,
|
|
8796
|
+
name: str = None,
|
|
8797
|
+
schedule: Schedule = None,
|
|
8798
|
+
):
|
|
8799
|
+
self.configuration = configuration
|
|
8800
|
+
self.description = description
|
|
8801
|
+
# This parameter is required.
|
|
8802
|
+
self.display_name = display_name
|
|
8803
|
+
# This parameter is required.
|
|
8804
|
+
self.name = name
|
|
8805
|
+
self.schedule = schedule
|
|
8806
|
+
|
|
8807
|
+
def validate(self):
|
|
8808
|
+
if self.configuration:
|
|
8809
|
+
self.configuration.validate()
|
|
8810
|
+
if self.schedule:
|
|
8811
|
+
self.schedule.validate()
|
|
8812
|
+
|
|
8813
|
+
def to_map(self):
|
|
8814
|
+
_map = super().to_map()
|
|
8815
|
+
if _map is not None:
|
|
8816
|
+
return _map
|
|
8817
|
+
|
|
8818
|
+
result = dict()
|
|
8819
|
+
if self.configuration is not None:
|
|
8820
|
+
result['configuration'] = self.configuration.to_map()
|
|
8821
|
+
if self.description is not None:
|
|
8822
|
+
result['description'] = self.description
|
|
8823
|
+
if self.display_name is not None:
|
|
8824
|
+
result['displayName'] = self.display_name
|
|
8825
|
+
if self.name is not None:
|
|
8826
|
+
result['name'] = self.name
|
|
8827
|
+
if self.schedule is not None:
|
|
8828
|
+
result['schedule'] = self.schedule.to_map()
|
|
8829
|
+
return result
|
|
8830
|
+
|
|
8831
|
+
def from_map(self, m: dict = None):
|
|
8832
|
+
m = m or dict()
|
|
8833
|
+
if m.get('configuration') is not None:
|
|
8834
|
+
temp_model = S3IngestionConfiguration()
|
|
8835
|
+
self.configuration = temp_model.from_map(m['configuration'])
|
|
8836
|
+
if m.get('description') is not None:
|
|
8837
|
+
self.description = m.get('description')
|
|
8838
|
+
if m.get('displayName') is not None:
|
|
8839
|
+
self.display_name = m.get('displayName')
|
|
8840
|
+
if m.get('name') is not None:
|
|
8841
|
+
self.name = m.get('name')
|
|
8842
|
+
if m.get('schedule') is not None:
|
|
8843
|
+
temp_model = Schedule()
|
|
8844
|
+
self.schedule = temp_model.from_map(m['schedule'])
|
|
8845
|
+
return self
|
|
8846
|
+
|
|
8847
|
+
|
|
8848
|
+
class CreateS3IngestionResponse(TeaModel):
|
|
8849
|
+
def __init__(
|
|
8850
|
+
self,
|
|
8851
|
+
headers: Dict[str, str] = None,
|
|
8852
|
+
status_code: int = None,
|
|
8853
|
+
):
|
|
8854
|
+
self.headers = headers
|
|
8855
|
+
self.status_code = status_code
|
|
8856
|
+
|
|
8857
|
+
def validate(self):
|
|
8858
|
+
pass
|
|
8859
|
+
|
|
8860
|
+
def to_map(self):
|
|
8861
|
+
_map = super().to_map()
|
|
8862
|
+
if _map is not None:
|
|
8863
|
+
return _map
|
|
8864
|
+
|
|
8865
|
+
result = dict()
|
|
8866
|
+
if self.headers is not None:
|
|
8867
|
+
result['headers'] = self.headers
|
|
8868
|
+
if self.status_code is not None:
|
|
8869
|
+
result['statusCode'] = self.status_code
|
|
8870
|
+
return result
|
|
8871
|
+
|
|
8872
|
+
def from_map(self, m: dict = None):
|
|
8873
|
+
m = m or dict()
|
|
8874
|
+
if m.get('headers') is not None:
|
|
8875
|
+
self.headers = m.get('headers')
|
|
8876
|
+
if m.get('statusCode') is not None:
|
|
8877
|
+
self.status_code = m.get('statusCode')
|
|
8878
|
+
return self
|
|
8879
|
+
|
|
8880
|
+
|
|
7680
8881
|
class CreateSavedSearchRequest(TeaModel):
|
|
7681
8882
|
def __init__(
|
|
7682
8883
|
self,
|
|
@@ -8360,6 +9561,39 @@ class DeleteAnnotationLabelResponse(TeaModel):
|
|
|
8360
9561
|
return self
|
|
8361
9562
|
|
|
8362
9563
|
|
|
9564
|
+
class DeleteAzureBlobIngestionResponse(TeaModel):
|
|
9565
|
+
def __init__(
|
|
9566
|
+
self,
|
|
9567
|
+
headers: Dict[str, str] = None,
|
|
9568
|
+
status_code: int = None,
|
|
9569
|
+
):
|
|
9570
|
+
self.headers = headers
|
|
9571
|
+
self.status_code = status_code
|
|
9572
|
+
|
|
9573
|
+
def validate(self):
|
|
9574
|
+
pass
|
|
9575
|
+
|
|
9576
|
+
def to_map(self):
|
|
9577
|
+
_map = super().to_map()
|
|
9578
|
+
if _map is not None:
|
|
9579
|
+
return _map
|
|
9580
|
+
|
|
9581
|
+
result = dict()
|
|
9582
|
+
if self.headers is not None:
|
|
9583
|
+
result['headers'] = self.headers
|
|
9584
|
+
if self.status_code is not None:
|
|
9585
|
+
result['statusCode'] = self.status_code
|
|
9586
|
+
return result
|
|
9587
|
+
|
|
9588
|
+
def from_map(self, m: dict = None):
|
|
9589
|
+
m = m or dict()
|
|
9590
|
+
if m.get('headers') is not None:
|
|
9591
|
+
self.headers = m.get('headers')
|
|
9592
|
+
if m.get('statusCode') is not None:
|
|
9593
|
+
self.status_code = m.get('statusCode')
|
|
9594
|
+
return self
|
|
9595
|
+
|
|
9596
|
+
|
|
8363
9597
|
class DeleteCollectionPolicyRequest(TeaModel):
|
|
8364
9598
|
def __init__(
|
|
8365
9599
|
self,
|
|
@@ -8657,6 +9891,39 @@ class DeleteETLResponse(TeaModel):
|
|
|
8657
9891
|
return self
|
|
8658
9892
|
|
|
8659
9893
|
|
|
9894
|
+
class DeleteElasticsearchIngestionResponse(TeaModel):
|
|
9895
|
+
def __init__(
|
|
9896
|
+
self,
|
|
9897
|
+
headers: Dict[str, str] = None,
|
|
9898
|
+
status_code: int = None,
|
|
9899
|
+
):
|
|
9900
|
+
self.headers = headers
|
|
9901
|
+
self.status_code = status_code
|
|
9902
|
+
|
|
9903
|
+
def validate(self):
|
|
9904
|
+
pass
|
|
9905
|
+
|
|
9906
|
+
def to_map(self):
|
|
9907
|
+
_map = super().to_map()
|
|
9908
|
+
if _map is not None:
|
|
9909
|
+
return _map
|
|
9910
|
+
|
|
9911
|
+
result = dict()
|
|
9912
|
+
if self.headers is not None:
|
|
9913
|
+
result['headers'] = self.headers
|
|
9914
|
+
if self.status_code is not None:
|
|
9915
|
+
result['statusCode'] = self.status_code
|
|
9916
|
+
return result
|
|
9917
|
+
|
|
9918
|
+
def from_map(self, m: dict = None):
|
|
9919
|
+
m = m or dict()
|
|
9920
|
+
if m.get('headers') is not None:
|
|
9921
|
+
self.headers = m.get('headers')
|
|
9922
|
+
if m.get('statusCode') is not None:
|
|
9923
|
+
self.status_code = m.get('statusCode')
|
|
9924
|
+
return self
|
|
9925
|
+
|
|
9926
|
+
|
|
8660
9927
|
class DeleteIndexResponse(TeaModel):
|
|
8661
9928
|
def __init__(
|
|
8662
9929
|
self,
|
|
@@ -9047,7 +10314,40 @@ class DeleteProjectRequest(TeaModel):
|
|
|
9047
10314
|
return self
|
|
9048
10315
|
|
|
9049
10316
|
|
|
9050
|
-
class DeleteProjectResponse(TeaModel):
|
|
10317
|
+
class DeleteProjectResponse(TeaModel):
|
|
10318
|
+
def __init__(
|
|
10319
|
+
self,
|
|
10320
|
+
headers: Dict[str, str] = None,
|
|
10321
|
+
status_code: int = None,
|
|
10322
|
+
):
|
|
10323
|
+
self.headers = headers
|
|
10324
|
+
self.status_code = status_code
|
|
10325
|
+
|
|
10326
|
+
def validate(self):
|
|
10327
|
+
pass
|
|
10328
|
+
|
|
10329
|
+
def to_map(self):
|
|
10330
|
+
_map = super().to_map()
|
|
10331
|
+
if _map is not None:
|
|
10332
|
+
return _map
|
|
10333
|
+
|
|
10334
|
+
result = dict()
|
|
10335
|
+
if self.headers is not None:
|
|
10336
|
+
result['headers'] = self.headers
|
|
10337
|
+
if self.status_code is not None:
|
|
10338
|
+
result['statusCode'] = self.status_code
|
|
10339
|
+
return result
|
|
10340
|
+
|
|
10341
|
+
def from_map(self, m: dict = None):
|
|
10342
|
+
m = m or dict()
|
|
10343
|
+
if m.get('headers') is not None:
|
|
10344
|
+
self.headers = m.get('headers')
|
|
10345
|
+
if m.get('statusCode') is not None:
|
|
10346
|
+
self.status_code = m.get('statusCode')
|
|
10347
|
+
return self
|
|
10348
|
+
|
|
10349
|
+
|
|
10350
|
+
class DeleteProjectPolicyResponse(TeaModel):
|
|
9051
10351
|
def __init__(
|
|
9052
10352
|
self,
|
|
9053
10353
|
headers: Dict[str, str] = None,
|
|
@@ -9080,7 +10380,7 @@ class DeleteProjectResponse(TeaModel):
|
|
|
9080
10380
|
return self
|
|
9081
10381
|
|
|
9082
10382
|
|
|
9083
|
-
class
|
|
10383
|
+
class DeleteS3IngestionResponse(TeaModel):
|
|
9084
10384
|
def __init__(
|
|
9085
10385
|
self,
|
|
9086
10386
|
headers: Dict[str, str] = None,
|
|
@@ -9249,11 +10549,15 @@ class DescribeRegionsRequest(TeaModel):
|
|
|
9249
10549
|
class DescribeRegionsResponseBodyRegions(TeaModel):
|
|
9250
10550
|
def __init__(
|
|
9251
10551
|
self,
|
|
10552
|
+
data_redundancy_type: List[str] = None,
|
|
10553
|
+
internal_endpoint: str = None,
|
|
9252
10554
|
internet_endpoint: str = None,
|
|
9253
10555
|
intranet_endpoint: str = None,
|
|
9254
10556
|
local_name: str = None,
|
|
9255
10557
|
region: str = None,
|
|
9256
10558
|
):
|
|
10559
|
+
self.data_redundancy_type = data_redundancy_type
|
|
10560
|
+
self.internal_endpoint = internal_endpoint
|
|
9257
10561
|
# The public endpoint of Simple Log Service.
|
|
9258
10562
|
self.internet_endpoint = internet_endpoint
|
|
9259
10563
|
# The internal endpoint of Simple Log Service.
|
|
@@ -9272,6 +10576,10 @@ class DescribeRegionsResponseBodyRegions(TeaModel):
|
|
|
9272
10576
|
return _map
|
|
9273
10577
|
|
|
9274
10578
|
result = dict()
|
|
10579
|
+
if self.data_redundancy_type is not None:
|
|
10580
|
+
result['dataRedundancyType'] = self.data_redundancy_type
|
|
10581
|
+
if self.internal_endpoint is not None:
|
|
10582
|
+
result['internalEndpoint'] = self.internal_endpoint
|
|
9275
10583
|
if self.internet_endpoint is not None:
|
|
9276
10584
|
result['internetEndpoint'] = self.internet_endpoint
|
|
9277
10585
|
if self.intranet_endpoint is not None:
|
|
@@ -9284,6 +10592,10 @@ class DescribeRegionsResponseBodyRegions(TeaModel):
|
|
|
9284
10592
|
|
|
9285
10593
|
def from_map(self, m: dict = None):
|
|
9286
10594
|
m = m or dict()
|
|
10595
|
+
if m.get('dataRedundancyType') is not None:
|
|
10596
|
+
self.data_redundancy_type = m.get('dataRedundancyType')
|
|
10597
|
+
if m.get('internalEndpoint') is not None:
|
|
10598
|
+
self.internal_endpoint = m.get('internalEndpoint')
|
|
9287
10599
|
if m.get('internetEndpoint') is not None:
|
|
9288
10600
|
self.internet_endpoint = m.get('internetEndpoint')
|
|
9289
10601
|
if m.get('intranetEndpoint') is not None:
|
|
@@ -9979,6 +11291,47 @@ class GetAppliedMachineGroupsResponse(TeaModel):
|
|
|
9979
11291
|
return self
|
|
9980
11292
|
|
|
9981
11293
|
|
|
11294
|
+
class GetAzureBlobIngestionResponse(TeaModel):
|
|
11295
|
+
def __init__(
|
|
11296
|
+
self,
|
|
11297
|
+
headers: Dict[str, str] = None,
|
|
11298
|
+
status_code: int = None,
|
|
11299
|
+
body: AzureBlobIngestion = None,
|
|
11300
|
+
):
|
|
11301
|
+
self.headers = headers
|
|
11302
|
+
self.status_code = status_code
|
|
11303
|
+
self.body = body
|
|
11304
|
+
|
|
11305
|
+
def validate(self):
|
|
11306
|
+
if self.body:
|
|
11307
|
+
self.body.validate()
|
|
11308
|
+
|
|
11309
|
+
def to_map(self):
|
|
11310
|
+
_map = super().to_map()
|
|
11311
|
+
if _map is not None:
|
|
11312
|
+
return _map
|
|
11313
|
+
|
|
11314
|
+
result = dict()
|
|
11315
|
+
if self.headers is not None:
|
|
11316
|
+
result['headers'] = self.headers
|
|
11317
|
+
if self.status_code is not None:
|
|
11318
|
+
result['statusCode'] = self.status_code
|
|
11319
|
+
if self.body is not None:
|
|
11320
|
+
result['body'] = self.body.to_map()
|
|
11321
|
+
return result
|
|
11322
|
+
|
|
11323
|
+
def from_map(self, m: dict = None):
|
|
11324
|
+
m = m or dict()
|
|
11325
|
+
if m.get('headers') is not None:
|
|
11326
|
+
self.headers = m.get('headers')
|
|
11327
|
+
if m.get('statusCode') is not None:
|
|
11328
|
+
self.status_code = m.get('statusCode')
|
|
11329
|
+
if m.get('body') is not None:
|
|
11330
|
+
temp_model = AzureBlobIngestion()
|
|
11331
|
+
self.body = temp_model.from_map(m['body'])
|
|
11332
|
+
return self
|
|
11333
|
+
|
|
11334
|
+
|
|
9982
11335
|
class GetCheckPointRequest(TeaModel):
|
|
9983
11336
|
def __init__(
|
|
9984
11337
|
self,
|
|
@@ -11305,6 +12658,47 @@ class GetETLResponse(TeaModel):
|
|
|
11305
12658
|
return self
|
|
11306
12659
|
|
|
11307
12660
|
|
|
12661
|
+
class GetElasticsearchIngestionResponse(TeaModel):
|
|
12662
|
+
def __init__(
|
|
12663
|
+
self,
|
|
12664
|
+
headers: Dict[str, str] = None,
|
|
12665
|
+
status_code: int = None,
|
|
12666
|
+
body: ESIngestion = None,
|
|
12667
|
+
):
|
|
12668
|
+
self.headers = headers
|
|
12669
|
+
self.status_code = status_code
|
|
12670
|
+
self.body = body
|
|
12671
|
+
|
|
12672
|
+
def validate(self):
|
|
12673
|
+
if self.body:
|
|
12674
|
+
self.body.validate()
|
|
12675
|
+
|
|
12676
|
+
def to_map(self):
|
|
12677
|
+
_map = super().to_map()
|
|
12678
|
+
if _map is not None:
|
|
12679
|
+
return _map
|
|
12680
|
+
|
|
12681
|
+
result = dict()
|
|
12682
|
+
if self.headers is not None:
|
|
12683
|
+
result['headers'] = self.headers
|
|
12684
|
+
if self.status_code is not None:
|
|
12685
|
+
result['statusCode'] = self.status_code
|
|
12686
|
+
if self.body is not None:
|
|
12687
|
+
result['body'] = self.body.to_map()
|
|
12688
|
+
return result
|
|
12689
|
+
|
|
12690
|
+
def from_map(self, m: dict = None):
|
|
12691
|
+
m = m or dict()
|
|
12692
|
+
if m.get('headers') is not None:
|
|
12693
|
+
self.headers = m.get('headers')
|
|
12694
|
+
if m.get('statusCode') is not None:
|
|
12695
|
+
self.status_code = m.get('statusCode')
|
|
12696
|
+
if m.get('body') is not None:
|
|
12697
|
+
temp_model = ESIngestion()
|
|
12698
|
+
self.body = temp_model.from_map(m['body'])
|
|
12699
|
+
return self
|
|
12700
|
+
|
|
12701
|
+
|
|
11308
12702
|
class GetHistogramsRequest(TeaModel):
|
|
11309
12703
|
def __init__(
|
|
11310
12704
|
self,
|
|
@@ -12708,6 +14102,7 @@ class GetMaxComputeExportResponse(TeaModel):
|
|
|
12708
14102
|
class GetMetricStoreResponseBody(TeaModel):
|
|
12709
14103
|
def __init__(
|
|
12710
14104
|
self,
|
|
14105
|
+
append_meta: bool = None,
|
|
12711
14106
|
auto_split: bool = None,
|
|
12712
14107
|
create_time: int = None,
|
|
12713
14108
|
hot_ttl: int = None,
|
|
@@ -12718,8 +14113,10 @@ class GetMetricStoreResponseBody(TeaModel):
|
|
|
12718
14113
|
mode: str = None,
|
|
12719
14114
|
name: str = None,
|
|
12720
14115
|
shard_count: int = None,
|
|
14116
|
+
sharding_policy: ShardingPolicy = None,
|
|
12721
14117
|
ttl: int = None,
|
|
12722
14118
|
):
|
|
14119
|
+
self.append_meta = append_meta
|
|
12723
14120
|
# Indicates whether the automatic sharding feature is enabled.
|
|
12724
14121
|
self.auto_split = auto_split
|
|
12725
14122
|
# The creation time. The value is a UNIX timestamp.
|
|
@@ -12738,11 +14135,13 @@ class GetMetricStoreResponseBody(TeaModel):
|
|
|
12738
14135
|
self.name = name
|
|
12739
14136
|
# The number of shards.
|
|
12740
14137
|
self.shard_count = shard_count
|
|
14138
|
+
self.sharding_policy = sharding_policy
|
|
12741
14139
|
# The retention period. Unit: days.
|
|
12742
14140
|
self.ttl = ttl
|
|
12743
14141
|
|
|
12744
14142
|
def validate(self):
|
|
12745
|
-
|
|
14143
|
+
if self.sharding_policy:
|
|
14144
|
+
self.sharding_policy.validate()
|
|
12746
14145
|
|
|
12747
14146
|
def to_map(self):
|
|
12748
14147
|
_map = super().to_map()
|
|
@@ -12750,6 +14149,8 @@ class GetMetricStoreResponseBody(TeaModel):
|
|
|
12750
14149
|
return _map
|
|
12751
14150
|
|
|
12752
14151
|
result = dict()
|
|
14152
|
+
if self.append_meta is not None:
|
|
14153
|
+
result['appendMeta'] = self.append_meta
|
|
12753
14154
|
if self.auto_split is not None:
|
|
12754
14155
|
result['autoSplit'] = self.auto_split
|
|
12755
14156
|
if self.create_time is not None:
|
|
@@ -12770,12 +14171,16 @@ class GetMetricStoreResponseBody(TeaModel):
|
|
|
12770
14171
|
result['name'] = self.name
|
|
12771
14172
|
if self.shard_count is not None:
|
|
12772
14173
|
result['shardCount'] = self.shard_count
|
|
14174
|
+
if self.sharding_policy is not None:
|
|
14175
|
+
result['shardingPolicy'] = self.sharding_policy.to_map()
|
|
12773
14176
|
if self.ttl is not None:
|
|
12774
14177
|
result['ttl'] = self.ttl
|
|
12775
14178
|
return result
|
|
12776
14179
|
|
|
12777
14180
|
def from_map(self, m: dict = None):
|
|
12778
14181
|
m = m or dict()
|
|
14182
|
+
if m.get('appendMeta') is not None:
|
|
14183
|
+
self.append_meta = m.get('appendMeta')
|
|
12779
14184
|
if m.get('autoSplit') is not None:
|
|
12780
14185
|
self.auto_split = m.get('autoSplit')
|
|
12781
14186
|
if m.get('createTime') is not None:
|
|
@@ -12796,6 +14201,9 @@ class GetMetricStoreResponseBody(TeaModel):
|
|
|
12796
14201
|
self.name = m.get('name')
|
|
12797
14202
|
if m.get('shardCount') is not None:
|
|
12798
14203
|
self.shard_count = m.get('shardCount')
|
|
14204
|
+
if m.get('shardingPolicy') is not None:
|
|
14205
|
+
temp_model = ShardingPolicy()
|
|
14206
|
+
self.sharding_policy = temp_model.from_map(m['shardingPolicy'])
|
|
12799
14207
|
if m.get('ttl') is not None:
|
|
12800
14208
|
self.ttl = m.get('ttl')
|
|
12801
14209
|
return self
|
|
@@ -13195,6 +14603,47 @@ class GetProjectPolicyResponse(TeaModel):
|
|
|
13195
14603
|
return self
|
|
13196
14604
|
|
|
13197
14605
|
|
|
14606
|
+
class GetS3IngestionResponse(TeaModel):
|
|
14607
|
+
def __init__(
|
|
14608
|
+
self,
|
|
14609
|
+
headers: Dict[str, str] = None,
|
|
14610
|
+
status_code: int = None,
|
|
14611
|
+
body: S3Ingestion = None,
|
|
14612
|
+
):
|
|
14613
|
+
self.headers = headers
|
|
14614
|
+
self.status_code = status_code
|
|
14615
|
+
self.body = body
|
|
14616
|
+
|
|
14617
|
+
def validate(self):
|
|
14618
|
+
if self.body:
|
|
14619
|
+
self.body.validate()
|
|
14620
|
+
|
|
14621
|
+
def to_map(self):
|
|
14622
|
+
_map = super().to_map()
|
|
14623
|
+
if _map is not None:
|
|
14624
|
+
return _map
|
|
14625
|
+
|
|
14626
|
+
result = dict()
|
|
14627
|
+
if self.headers is not None:
|
|
14628
|
+
result['headers'] = self.headers
|
|
14629
|
+
if self.status_code is not None:
|
|
14630
|
+
result['statusCode'] = self.status_code
|
|
14631
|
+
if self.body is not None:
|
|
14632
|
+
result['body'] = self.body.to_map()
|
|
14633
|
+
return result
|
|
14634
|
+
|
|
14635
|
+
def from_map(self, m: dict = None):
|
|
14636
|
+
m = m or dict()
|
|
14637
|
+
if m.get('headers') is not None:
|
|
14638
|
+
self.headers = m.get('headers')
|
|
14639
|
+
if m.get('statusCode') is not None:
|
|
14640
|
+
self.status_code = m.get('statusCode')
|
|
14641
|
+
if m.get('body') is not None:
|
|
14642
|
+
temp_model = S3Ingestion()
|
|
14643
|
+
self.body = temp_model.from_map(m['body'])
|
|
14644
|
+
return self
|
|
14645
|
+
|
|
14646
|
+
|
|
13198
14647
|
class GetSavedSearchResponse(TeaModel):
|
|
13199
14648
|
def __init__(
|
|
13200
14649
|
self,
|
|
@@ -14005,8 +15454,131 @@ class ListAlertsResponseBody(TeaModel):
|
|
|
14005
15454
|
self.total = total
|
|
14006
15455
|
|
|
14007
15456
|
def validate(self):
|
|
14008
|
-
if self.results:
|
|
14009
|
-
for k in self.results:
|
|
15457
|
+
if self.results:
|
|
15458
|
+
for k in self.results:
|
|
15459
|
+
if k:
|
|
15460
|
+
k.validate()
|
|
15461
|
+
|
|
15462
|
+
def to_map(self):
|
|
15463
|
+
_map = super().to_map()
|
|
15464
|
+
if _map is not None:
|
|
15465
|
+
return _map
|
|
15466
|
+
|
|
15467
|
+
result = dict()
|
|
15468
|
+
if self.count is not None:
|
|
15469
|
+
result['count'] = self.count
|
|
15470
|
+
result['results'] = []
|
|
15471
|
+
if self.results is not None:
|
|
15472
|
+
for k in self.results:
|
|
15473
|
+
result['results'].append(k.to_map() if k else None)
|
|
15474
|
+
if self.total is not None:
|
|
15475
|
+
result['total'] = self.total
|
|
15476
|
+
return result
|
|
15477
|
+
|
|
15478
|
+
def from_map(self, m: dict = None):
|
|
15479
|
+
m = m or dict()
|
|
15480
|
+
if m.get('count') is not None:
|
|
15481
|
+
self.count = m.get('count')
|
|
15482
|
+
self.results = []
|
|
15483
|
+
if m.get('results') is not None:
|
|
15484
|
+
for k in m.get('results'):
|
|
15485
|
+
temp_model = Alert()
|
|
15486
|
+
self.results.append(temp_model.from_map(k))
|
|
15487
|
+
if m.get('total') is not None:
|
|
15488
|
+
self.total = m.get('total')
|
|
15489
|
+
return self
|
|
15490
|
+
|
|
15491
|
+
|
|
15492
|
+
class ListAlertsResponse(TeaModel):
|
|
15493
|
+
def __init__(
|
|
15494
|
+
self,
|
|
15495
|
+
headers: Dict[str, str] = None,
|
|
15496
|
+
status_code: int = None,
|
|
15497
|
+
body: ListAlertsResponseBody = None,
|
|
15498
|
+
):
|
|
15499
|
+
self.headers = headers
|
|
15500
|
+
self.status_code = status_code
|
|
15501
|
+
self.body = body
|
|
15502
|
+
|
|
15503
|
+
def validate(self):
|
|
15504
|
+
if self.body:
|
|
15505
|
+
self.body.validate()
|
|
15506
|
+
|
|
15507
|
+
def to_map(self):
|
|
15508
|
+
_map = super().to_map()
|
|
15509
|
+
if _map is not None:
|
|
15510
|
+
return _map
|
|
15511
|
+
|
|
15512
|
+
result = dict()
|
|
15513
|
+
if self.headers is not None:
|
|
15514
|
+
result['headers'] = self.headers
|
|
15515
|
+
if self.status_code is not None:
|
|
15516
|
+
result['statusCode'] = self.status_code
|
|
15517
|
+
if self.body is not None:
|
|
15518
|
+
result['body'] = self.body.to_map()
|
|
15519
|
+
return result
|
|
15520
|
+
|
|
15521
|
+
def from_map(self, m: dict = None):
|
|
15522
|
+
m = m or dict()
|
|
15523
|
+
if m.get('headers') is not None:
|
|
15524
|
+
self.headers = m.get('headers')
|
|
15525
|
+
if m.get('statusCode') is not None:
|
|
15526
|
+
self.status_code = m.get('statusCode')
|
|
15527
|
+
if m.get('body') is not None:
|
|
15528
|
+
temp_model = ListAlertsResponseBody()
|
|
15529
|
+
self.body = temp_model.from_map(m['body'])
|
|
15530
|
+
return self
|
|
15531
|
+
|
|
15532
|
+
|
|
15533
|
+
class ListAnnotationDataRequest(TeaModel):
|
|
15534
|
+
def __init__(
|
|
15535
|
+
self,
|
|
15536
|
+
offset: int = None,
|
|
15537
|
+
size: int = None,
|
|
15538
|
+
):
|
|
15539
|
+
# The line from which the query starts.
|
|
15540
|
+
self.offset = offset
|
|
15541
|
+
# The number of entries per page.
|
|
15542
|
+
self.size = size
|
|
15543
|
+
|
|
15544
|
+
def validate(self):
|
|
15545
|
+
pass
|
|
15546
|
+
|
|
15547
|
+
def to_map(self):
|
|
15548
|
+
_map = super().to_map()
|
|
15549
|
+
if _map is not None:
|
|
15550
|
+
return _map
|
|
15551
|
+
|
|
15552
|
+
result = dict()
|
|
15553
|
+
if self.offset is not None:
|
|
15554
|
+
result['offset'] = self.offset
|
|
15555
|
+
if self.size is not None:
|
|
15556
|
+
result['size'] = self.size
|
|
15557
|
+
return result
|
|
15558
|
+
|
|
15559
|
+
def from_map(self, m: dict = None):
|
|
15560
|
+
m = m or dict()
|
|
15561
|
+
if m.get('offset') is not None:
|
|
15562
|
+
self.offset = m.get('offset')
|
|
15563
|
+
if m.get('size') is not None:
|
|
15564
|
+
self.size = m.get('size')
|
|
15565
|
+
return self
|
|
15566
|
+
|
|
15567
|
+
|
|
15568
|
+
class ListAnnotationDataResponseBody(TeaModel):
|
|
15569
|
+
def __init__(
|
|
15570
|
+
self,
|
|
15571
|
+
data: List[MLDataParam] = None,
|
|
15572
|
+
total: int = None,
|
|
15573
|
+
):
|
|
15574
|
+
# The data returned.
|
|
15575
|
+
self.data = data
|
|
15576
|
+
# The total number of entries returned.
|
|
15577
|
+
self.total = total
|
|
15578
|
+
|
|
15579
|
+
def validate(self):
|
|
15580
|
+
if self.data:
|
|
15581
|
+
for k in self.data:
|
|
14010
15582
|
if k:
|
|
14011
15583
|
k.validate()
|
|
14012
15584
|
|
|
@@ -14016,36 +15588,32 @@ class ListAlertsResponseBody(TeaModel):
|
|
|
14016
15588
|
return _map
|
|
14017
15589
|
|
|
14018
15590
|
result = dict()
|
|
14019
|
-
|
|
14020
|
-
|
|
14021
|
-
|
|
14022
|
-
|
|
14023
|
-
for k in self.results:
|
|
14024
|
-
result['results'].append(k.to_map() if k else None)
|
|
15591
|
+
result['data'] = []
|
|
15592
|
+
if self.data is not None:
|
|
15593
|
+
for k in self.data:
|
|
15594
|
+
result['data'].append(k.to_map() if k else None)
|
|
14025
15595
|
if self.total is not None:
|
|
14026
15596
|
result['total'] = self.total
|
|
14027
15597
|
return result
|
|
14028
15598
|
|
|
14029
15599
|
def from_map(self, m: dict = None):
|
|
14030
15600
|
m = m or dict()
|
|
14031
|
-
|
|
14032
|
-
|
|
14033
|
-
|
|
14034
|
-
|
|
14035
|
-
|
|
14036
|
-
temp_model = Alert()
|
|
14037
|
-
self.results.append(temp_model.from_map(k))
|
|
15601
|
+
self.data = []
|
|
15602
|
+
if m.get('data') is not None:
|
|
15603
|
+
for k in m.get('data'):
|
|
15604
|
+
temp_model = MLDataParam()
|
|
15605
|
+
self.data.append(temp_model.from_map(k))
|
|
14038
15606
|
if m.get('total') is not None:
|
|
14039
15607
|
self.total = m.get('total')
|
|
14040
15608
|
return self
|
|
14041
15609
|
|
|
14042
15610
|
|
|
14043
|
-
class
|
|
15611
|
+
class ListAnnotationDataResponse(TeaModel):
|
|
14044
15612
|
def __init__(
|
|
14045
15613
|
self,
|
|
14046
15614
|
headers: Dict[str, str] = None,
|
|
14047
15615
|
status_code: int = None,
|
|
14048
|
-
body:
|
|
15616
|
+
body: ListAnnotationDataResponseBody = None,
|
|
14049
15617
|
):
|
|
14050
15618
|
self.headers = headers
|
|
14051
15619
|
self.status_code = status_code
|
|
@@ -14076,12 +15644,12 @@ class ListAlertsResponse(TeaModel):
|
|
|
14076
15644
|
if m.get('statusCode') is not None:
|
|
14077
15645
|
self.status_code = m.get('statusCode')
|
|
14078
15646
|
if m.get('body') is not None:
|
|
14079
|
-
temp_model =
|
|
15647
|
+
temp_model = ListAnnotationDataResponseBody()
|
|
14080
15648
|
self.body = temp_model.from_map(m['body'])
|
|
14081
15649
|
return self
|
|
14082
15650
|
|
|
14083
15651
|
|
|
14084
|
-
class
|
|
15652
|
+
class ListAnnotationDataSetsRequest(TeaModel):
|
|
14085
15653
|
def __init__(
|
|
14086
15654
|
self,
|
|
14087
15655
|
offset: int = None,
|
|
@@ -14116,10 +15684,10 @@ class ListAnnotationDataRequest(TeaModel):
|
|
|
14116
15684
|
return self
|
|
14117
15685
|
|
|
14118
15686
|
|
|
14119
|
-
class
|
|
15687
|
+
class ListAnnotationDataSetsResponseBody(TeaModel):
|
|
14120
15688
|
def __init__(
|
|
14121
15689
|
self,
|
|
14122
|
-
data: List[
|
|
15690
|
+
data: List[MLDataSetParam] = None,
|
|
14123
15691
|
total: int = None,
|
|
14124
15692
|
):
|
|
14125
15693
|
# The data returned.
|
|
@@ -14152,19 +15720,19 @@ class ListAnnotationDataResponseBody(TeaModel):
|
|
|
14152
15720
|
self.data = []
|
|
14153
15721
|
if m.get('data') is not None:
|
|
14154
15722
|
for k in m.get('data'):
|
|
14155
|
-
temp_model =
|
|
15723
|
+
temp_model = MLDataSetParam()
|
|
14156
15724
|
self.data.append(temp_model.from_map(k))
|
|
14157
15725
|
if m.get('total') is not None:
|
|
14158
15726
|
self.total = m.get('total')
|
|
14159
15727
|
return self
|
|
14160
15728
|
|
|
14161
15729
|
|
|
14162
|
-
class
|
|
15730
|
+
class ListAnnotationDataSetsResponse(TeaModel):
|
|
14163
15731
|
def __init__(
|
|
14164
15732
|
self,
|
|
14165
15733
|
headers: Dict[str, str] = None,
|
|
14166
15734
|
status_code: int = None,
|
|
14167
|
-
body:
|
|
15735
|
+
body: ListAnnotationDataSetsResponseBody = None,
|
|
14168
15736
|
):
|
|
14169
15737
|
self.headers = headers
|
|
14170
15738
|
self.status_code = status_code
|
|
@@ -14195,12 +15763,12 @@ class ListAnnotationDataResponse(TeaModel):
|
|
|
14195
15763
|
if m.get('statusCode') is not None:
|
|
14196
15764
|
self.status_code = m.get('statusCode')
|
|
14197
15765
|
if m.get('body') is not None:
|
|
14198
|
-
temp_model =
|
|
15766
|
+
temp_model = ListAnnotationDataSetsResponseBody()
|
|
14199
15767
|
self.body = temp_model.from_map(m['body'])
|
|
14200
15768
|
return self
|
|
14201
15769
|
|
|
14202
15770
|
|
|
14203
|
-
class
|
|
15771
|
+
class ListAnnotationLabelsRequest(TeaModel):
|
|
14204
15772
|
def __init__(
|
|
14205
15773
|
self,
|
|
14206
15774
|
offset: int = None,
|
|
@@ -14235,15 +15803,15 @@ class ListAnnotationDataSetsRequest(TeaModel):
|
|
|
14235
15803
|
return self
|
|
14236
15804
|
|
|
14237
15805
|
|
|
14238
|
-
class
|
|
15806
|
+
class ListAnnotationLabelsResponseBody(TeaModel):
|
|
14239
15807
|
def __init__(
|
|
14240
15808
|
self,
|
|
14241
|
-
data: List[
|
|
15809
|
+
data: List[MLLabelParam] = None,
|
|
14242
15810
|
total: int = None,
|
|
14243
15811
|
):
|
|
14244
15812
|
# The data returned.
|
|
14245
15813
|
self.data = data
|
|
14246
|
-
# The total number of
|
|
15814
|
+
# The total number of tags that meet the query conditions.
|
|
14247
15815
|
self.total = total
|
|
14248
15816
|
|
|
14249
15817
|
def validate(self):
|
|
@@ -14271,19 +15839,19 @@ class ListAnnotationDataSetsResponseBody(TeaModel):
|
|
|
14271
15839
|
self.data = []
|
|
14272
15840
|
if m.get('data') is not None:
|
|
14273
15841
|
for k in m.get('data'):
|
|
14274
|
-
temp_model =
|
|
15842
|
+
temp_model = MLLabelParam()
|
|
14275
15843
|
self.data.append(temp_model.from_map(k))
|
|
14276
15844
|
if m.get('total') is not None:
|
|
14277
15845
|
self.total = m.get('total')
|
|
14278
15846
|
return self
|
|
14279
15847
|
|
|
14280
15848
|
|
|
14281
|
-
class
|
|
15849
|
+
class ListAnnotationLabelsResponse(TeaModel):
|
|
14282
15850
|
def __init__(
|
|
14283
15851
|
self,
|
|
14284
15852
|
headers: Dict[str, str] = None,
|
|
14285
15853
|
status_code: int = None,
|
|
14286
|
-
body:
|
|
15854
|
+
body: ListAnnotationLabelsResponseBody = None,
|
|
14287
15855
|
):
|
|
14288
15856
|
self.headers = headers
|
|
14289
15857
|
self.status_code = status_code
|
|
@@ -14314,20 +15882,20 @@ class ListAnnotationDataSetsResponse(TeaModel):
|
|
|
14314
15882
|
if m.get('statusCode') is not None:
|
|
14315
15883
|
self.status_code = m.get('statusCode')
|
|
14316
15884
|
if m.get('body') is not None:
|
|
14317
|
-
temp_model =
|
|
15885
|
+
temp_model = ListAnnotationLabelsResponseBody()
|
|
14318
15886
|
self.body = temp_model.from_map(m['body'])
|
|
14319
15887
|
return self
|
|
14320
15888
|
|
|
14321
15889
|
|
|
14322
|
-
class
|
|
15890
|
+
class ListAzureBlobIngestionRequest(TeaModel):
|
|
14323
15891
|
def __init__(
|
|
14324
15892
|
self,
|
|
15893
|
+
logstore: str = None,
|
|
14325
15894
|
offset: int = None,
|
|
14326
15895
|
size: int = None,
|
|
14327
15896
|
):
|
|
14328
|
-
|
|
15897
|
+
self.logstore = logstore
|
|
14329
15898
|
self.offset = offset
|
|
14330
|
-
# The number of entries per page.
|
|
14331
15899
|
self.size = size
|
|
14332
15900
|
|
|
14333
15901
|
def validate(self):
|
|
@@ -14339,6 +15907,8 @@ class ListAnnotationLabelsRequest(TeaModel):
|
|
|
14339
15907
|
return _map
|
|
14340
15908
|
|
|
14341
15909
|
result = dict()
|
|
15910
|
+
if self.logstore is not None:
|
|
15911
|
+
result['logstore'] = self.logstore
|
|
14342
15912
|
if self.offset is not None:
|
|
14343
15913
|
result['offset'] = self.offset
|
|
14344
15914
|
if self.size is not None:
|
|
@@ -14347,6 +15917,8 @@ class ListAnnotationLabelsRequest(TeaModel):
|
|
|
14347
15917
|
|
|
14348
15918
|
def from_map(self, m: dict = None):
|
|
14349
15919
|
m = m or dict()
|
|
15920
|
+
if m.get('logstore') is not None:
|
|
15921
|
+
self.logstore = m.get('logstore')
|
|
14350
15922
|
if m.get('offset') is not None:
|
|
14351
15923
|
self.offset = m.get('offset')
|
|
14352
15924
|
if m.get('size') is not None:
|
|
@@ -14354,20 +15926,20 @@ class ListAnnotationLabelsRequest(TeaModel):
|
|
|
14354
15926
|
return self
|
|
14355
15927
|
|
|
14356
15928
|
|
|
14357
|
-
class
|
|
15929
|
+
class ListAzureBlobIngestionResponseBody(TeaModel):
|
|
14358
15930
|
def __init__(
|
|
14359
15931
|
self,
|
|
14360
|
-
|
|
15932
|
+
count: int = None,
|
|
15933
|
+
results: List[AzureBlobIngestion] = None,
|
|
14361
15934
|
total: int = None,
|
|
14362
15935
|
):
|
|
14363
|
-
|
|
14364
|
-
self.
|
|
14365
|
-
# The total number of tags that meet the query conditions.
|
|
15936
|
+
self.count = count
|
|
15937
|
+
self.results = results
|
|
14366
15938
|
self.total = total
|
|
14367
15939
|
|
|
14368
15940
|
def validate(self):
|
|
14369
|
-
if self.
|
|
14370
|
-
for k in self.
|
|
15941
|
+
if self.results:
|
|
15942
|
+
for k in self.results:
|
|
14371
15943
|
if k:
|
|
14372
15944
|
k.validate()
|
|
14373
15945
|
|
|
@@ -14377,32 +15949,36 @@ class ListAnnotationLabelsResponseBody(TeaModel):
|
|
|
14377
15949
|
return _map
|
|
14378
15950
|
|
|
14379
15951
|
result = dict()
|
|
14380
|
-
|
|
14381
|
-
|
|
14382
|
-
|
|
14383
|
-
|
|
15952
|
+
if self.count is not None:
|
|
15953
|
+
result['count'] = self.count
|
|
15954
|
+
result['results'] = []
|
|
15955
|
+
if self.results is not None:
|
|
15956
|
+
for k in self.results:
|
|
15957
|
+
result['results'].append(k.to_map() if k else None)
|
|
14384
15958
|
if self.total is not None:
|
|
14385
15959
|
result['total'] = self.total
|
|
14386
15960
|
return result
|
|
14387
15961
|
|
|
14388
15962
|
def from_map(self, m: dict = None):
|
|
14389
15963
|
m = m or dict()
|
|
14390
|
-
|
|
14391
|
-
|
|
14392
|
-
|
|
14393
|
-
|
|
14394
|
-
|
|
15964
|
+
if m.get('count') is not None:
|
|
15965
|
+
self.count = m.get('count')
|
|
15966
|
+
self.results = []
|
|
15967
|
+
if m.get('results') is not None:
|
|
15968
|
+
for k in m.get('results'):
|
|
15969
|
+
temp_model = AzureBlobIngestion()
|
|
15970
|
+
self.results.append(temp_model.from_map(k))
|
|
14395
15971
|
if m.get('total') is not None:
|
|
14396
15972
|
self.total = m.get('total')
|
|
14397
15973
|
return self
|
|
14398
15974
|
|
|
14399
15975
|
|
|
14400
|
-
class
|
|
15976
|
+
class ListAzureBlobIngestionResponse(TeaModel):
|
|
14401
15977
|
def __init__(
|
|
14402
15978
|
self,
|
|
14403
15979
|
headers: Dict[str, str] = None,
|
|
14404
15980
|
status_code: int = None,
|
|
14405
|
-
body:
|
|
15981
|
+
body: ListAzureBlobIngestionResponseBody = None,
|
|
14406
15982
|
):
|
|
14407
15983
|
self.headers = headers
|
|
14408
15984
|
self.status_code = status_code
|
|
@@ -14433,7 +16009,7 @@ class ListAnnotationLabelsResponse(TeaModel):
|
|
|
14433
16009
|
if m.get('statusCode') is not None:
|
|
14434
16010
|
self.status_code = m.get('statusCode')
|
|
14435
16011
|
if m.get('body') is not None:
|
|
14436
|
-
temp_model =
|
|
16012
|
+
temp_model = ListAzureBlobIngestionResponseBody()
|
|
14437
16013
|
self.body = temp_model.from_map(m['body'])
|
|
14438
16014
|
return self
|
|
14439
16015
|
|
|
@@ -15904,36 +17480,182 @@ class ListDownloadJobsResponseBodyResultsExecutionDetails(TeaModel):
|
|
|
15904
17480
|
return self
|
|
15905
17481
|
|
|
15906
17482
|
|
|
15907
|
-
class ListDownloadJobsResponseBodyResults(TeaModel):
|
|
17483
|
+
class ListDownloadJobsResponseBodyResults(TeaModel):
|
|
17484
|
+
def __init__(
|
|
17485
|
+
self,
|
|
17486
|
+
configuration: ListDownloadJobsResponseBodyResultsConfiguration = None,
|
|
17487
|
+
create_time: str = None,
|
|
17488
|
+
description: str = None,
|
|
17489
|
+
display_name: str = None,
|
|
17490
|
+
execution_details: ListDownloadJobsResponseBodyResultsExecutionDetails = None,
|
|
17491
|
+
name: str = None,
|
|
17492
|
+
status: str = None,
|
|
17493
|
+
):
|
|
17494
|
+
# 下载配置
|
|
17495
|
+
self.configuration = configuration
|
|
17496
|
+
self.create_time = create_time
|
|
17497
|
+
# 任务描述
|
|
17498
|
+
self.description = description
|
|
17499
|
+
# 任务显示名称
|
|
17500
|
+
self.display_name = display_name
|
|
17501
|
+
# The execution details.
|
|
17502
|
+
self.execution_details = execution_details
|
|
17503
|
+
# 代表资源名称的资源属性字段
|
|
17504
|
+
self.name = name
|
|
17505
|
+
# The task status.
|
|
17506
|
+
self.status = status
|
|
17507
|
+
|
|
17508
|
+
def validate(self):
|
|
17509
|
+
if self.configuration:
|
|
17510
|
+
self.configuration.validate()
|
|
17511
|
+
if self.execution_details:
|
|
17512
|
+
self.execution_details.validate()
|
|
17513
|
+
|
|
17514
|
+
def to_map(self):
|
|
17515
|
+
_map = super().to_map()
|
|
17516
|
+
if _map is not None:
|
|
17517
|
+
return _map
|
|
17518
|
+
|
|
17519
|
+
result = dict()
|
|
17520
|
+
if self.configuration is not None:
|
|
17521
|
+
result['configuration'] = self.configuration.to_map()
|
|
17522
|
+
if self.create_time is not None:
|
|
17523
|
+
result['createTime'] = self.create_time
|
|
17524
|
+
if self.description is not None:
|
|
17525
|
+
result['description'] = self.description
|
|
17526
|
+
if self.display_name is not None:
|
|
17527
|
+
result['displayName'] = self.display_name
|
|
17528
|
+
if self.execution_details is not None:
|
|
17529
|
+
result['executionDetails'] = self.execution_details.to_map()
|
|
17530
|
+
if self.name is not None:
|
|
17531
|
+
result['name'] = self.name
|
|
17532
|
+
if self.status is not None:
|
|
17533
|
+
result['status'] = self.status
|
|
17534
|
+
return result
|
|
17535
|
+
|
|
17536
|
+
def from_map(self, m: dict = None):
|
|
17537
|
+
m = m or dict()
|
|
17538
|
+
if m.get('configuration') is not None:
|
|
17539
|
+
temp_model = ListDownloadJobsResponseBodyResultsConfiguration()
|
|
17540
|
+
self.configuration = temp_model.from_map(m['configuration'])
|
|
17541
|
+
if m.get('createTime') is not None:
|
|
17542
|
+
self.create_time = m.get('createTime')
|
|
17543
|
+
if m.get('description') is not None:
|
|
17544
|
+
self.description = m.get('description')
|
|
17545
|
+
if m.get('displayName') is not None:
|
|
17546
|
+
self.display_name = m.get('displayName')
|
|
17547
|
+
if m.get('executionDetails') is not None:
|
|
17548
|
+
temp_model = ListDownloadJobsResponseBodyResultsExecutionDetails()
|
|
17549
|
+
self.execution_details = temp_model.from_map(m['executionDetails'])
|
|
17550
|
+
if m.get('name') is not None:
|
|
17551
|
+
self.name = m.get('name')
|
|
17552
|
+
if m.get('status') is not None:
|
|
17553
|
+
self.status = m.get('status')
|
|
17554
|
+
return self
|
|
17555
|
+
|
|
17556
|
+
|
|
17557
|
+
class ListDownloadJobsResponseBody(TeaModel):
|
|
17558
|
+
def __init__(
|
|
17559
|
+
self,
|
|
17560
|
+
count: int = None,
|
|
17561
|
+
results: List[ListDownloadJobsResponseBodyResults] = None,
|
|
17562
|
+
total: int = None,
|
|
17563
|
+
):
|
|
17564
|
+
self.count = count
|
|
17565
|
+
# Array, to return a list of log download tasks.
|
|
17566
|
+
self.results = results
|
|
17567
|
+
self.total = total
|
|
17568
|
+
|
|
17569
|
+
def validate(self):
|
|
17570
|
+
if self.results:
|
|
17571
|
+
for k in self.results:
|
|
17572
|
+
if k:
|
|
17573
|
+
k.validate()
|
|
17574
|
+
|
|
17575
|
+
def to_map(self):
|
|
17576
|
+
_map = super().to_map()
|
|
17577
|
+
if _map is not None:
|
|
17578
|
+
return _map
|
|
17579
|
+
|
|
17580
|
+
result = dict()
|
|
17581
|
+
if self.count is not None:
|
|
17582
|
+
result['count'] = self.count
|
|
17583
|
+
result['results'] = []
|
|
17584
|
+
if self.results is not None:
|
|
17585
|
+
for k in self.results:
|
|
17586
|
+
result['results'].append(k.to_map() if k else None)
|
|
17587
|
+
if self.total is not None:
|
|
17588
|
+
result['total'] = self.total
|
|
17589
|
+
return result
|
|
17590
|
+
|
|
17591
|
+
def from_map(self, m: dict = None):
|
|
17592
|
+
m = m or dict()
|
|
17593
|
+
if m.get('count') is not None:
|
|
17594
|
+
self.count = m.get('count')
|
|
17595
|
+
self.results = []
|
|
17596
|
+
if m.get('results') is not None:
|
|
17597
|
+
for k in m.get('results'):
|
|
17598
|
+
temp_model = ListDownloadJobsResponseBodyResults()
|
|
17599
|
+
self.results.append(temp_model.from_map(k))
|
|
17600
|
+
if m.get('total') is not None:
|
|
17601
|
+
self.total = m.get('total')
|
|
17602
|
+
return self
|
|
17603
|
+
|
|
17604
|
+
|
|
17605
|
+
class ListDownloadJobsResponse(TeaModel):
|
|
17606
|
+
def __init__(
|
|
17607
|
+
self,
|
|
17608
|
+
headers: Dict[str, str] = None,
|
|
17609
|
+
status_code: int = None,
|
|
17610
|
+
body: ListDownloadJobsResponseBody = None,
|
|
17611
|
+
):
|
|
17612
|
+
self.headers = headers
|
|
17613
|
+
self.status_code = status_code
|
|
17614
|
+
self.body = body
|
|
17615
|
+
|
|
17616
|
+
def validate(self):
|
|
17617
|
+
if self.body:
|
|
17618
|
+
self.body.validate()
|
|
17619
|
+
|
|
17620
|
+
def to_map(self):
|
|
17621
|
+
_map = super().to_map()
|
|
17622
|
+
if _map is not None:
|
|
17623
|
+
return _map
|
|
17624
|
+
|
|
17625
|
+
result = dict()
|
|
17626
|
+
if self.headers is not None:
|
|
17627
|
+
result['headers'] = self.headers
|
|
17628
|
+
if self.status_code is not None:
|
|
17629
|
+
result['statusCode'] = self.status_code
|
|
17630
|
+
if self.body is not None:
|
|
17631
|
+
result['body'] = self.body.to_map()
|
|
17632
|
+
return result
|
|
17633
|
+
|
|
17634
|
+
def from_map(self, m: dict = None):
|
|
17635
|
+
m = m or dict()
|
|
17636
|
+
if m.get('headers') is not None:
|
|
17637
|
+
self.headers = m.get('headers')
|
|
17638
|
+
if m.get('statusCode') is not None:
|
|
17639
|
+
self.status_code = m.get('statusCode')
|
|
17640
|
+
if m.get('body') is not None:
|
|
17641
|
+
temp_model = ListDownloadJobsResponseBody()
|
|
17642
|
+
self.body = temp_model.from_map(m['body'])
|
|
17643
|
+
return self
|
|
17644
|
+
|
|
17645
|
+
|
|
17646
|
+
class ListETLsRequest(TeaModel):
|
|
15908
17647
|
def __init__(
|
|
15909
17648
|
self,
|
|
15910
|
-
|
|
15911
|
-
|
|
15912
|
-
|
|
15913
|
-
display_name: str = None,
|
|
15914
|
-
execution_details: ListDownloadJobsResponseBodyResultsExecutionDetails = None,
|
|
15915
|
-
name: str = None,
|
|
15916
|
-
status: str = None,
|
|
17649
|
+
logstore: str = None,
|
|
17650
|
+
offset: int = None,
|
|
17651
|
+
size: int = None,
|
|
15917
17652
|
):
|
|
15918
|
-
|
|
15919
|
-
self.
|
|
15920
|
-
self.
|
|
15921
|
-
# 任务描述
|
|
15922
|
-
self.description = description
|
|
15923
|
-
# 任务显示名称
|
|
15924
|
-
self.display_name = display_name
|
|
15925
|
-
# The execution details.
|
|
15926
|
-
self.execution_details = execution_details
|
|
15927
|
-
# 代表资源名称的资源属性字段
|
|
15928
|
-
self.name = name
|
|
15929
|
-
# The task status.
|
|
15930
|
-
self.status = status
|
|
17653
|
+
self.logstore = logstore
|
|
17654
|
+
self.offset = offset
|
|
17655
|
+
self.size = size
|
|
15931
17656
|
|
|
15932
17657
|
def validate(self):
|
|
15933
|
-
|
|
15934
|
-
self.configuration.validate()
|
|
15935
|
-
if self.execution_details:
|
|
15936
|
-
self.execution_details.validate()
|
|
17658
|
+
pass
|
|
15937
17659
|
|
|
15938
17660
|
def to_map(self):
|
|
15939
17661
|
_map = super().to_map()
|
|
@@ -15941,53 +17663,36 @@ class ListDownloadJobsResponseBodyResults(TeaModel):
|
|
|
15941
17663
|
return _map
|
|
15942
17664
|
|
|
15943
17665
|
result = dict()
|
|
15944
|
-
if self.
|
|
15945
|
-
result['
|
|
15946
|
-
if self.
|
|
15947
|
-
result['
|
|
15948
|
-
if self.
|
|
15949
|
-
result['
|
|
15950
|
-
if self.display_name is not None:
|
|
15951
|
-
result['displayName'] = self.display_name
|
|
15952
|
-
if self.execution_details is not None:
|
|
15953
|
-
result['executionDetails'] = self.execution_details.to_map()
|
|
15954
|
-
if self.name is not None:
|
|
15955
|
-
result['name'] = self.name
|
|
15956
|
-
if self.status is not None:
|
|
15957
|
-
result['status'] = self.status
|
|
17666
|
+
if self.logstore is not None:
|
|
17667
|
+
result['logstore'] = self.logstore
|
|
17668
|
+
if self.offset is not None:
|
|
17669
|
+
result['offset'] = self.offset
|
|
17670
|
+
if self.size is not None:
|
|
17671
|
+
result['size'] = self.size
|
|
15958
17672
|
return result
|
|
15959
17673
|
|
|
15960
17674
|
def from_map(self, m: dict = None):
|
|
15961
17675
|
m = m or dict()
|
|
15962
|
-
if m.get('
|
|
15963
|
-
|
|
15964
|
-
|
|
15965
|
-
|
|
15966
|
-
|
|
15967
|
-
|
|
15968
|
-
self.description = m.get('description')
|
|
15969
|
-
if m.get('displayName') is not None:
|
|
15970
|
-
self.display_name = m.get('displayName')
|
|
15971
|
-
if m.get('executionDetails') is not None:
|
|
15972
|
-
temp_model = ListDownloadJobsResponseBodyResultsExecutionDetails()
|
|
15973
|
-
self.execution_details = temp_model.from_map(m['executionDetails'])
|
|
15974
|
-
if m.get('name') is not None:
|
|
15975
|
-
self.name = m.get('name')
|
|
15976
|
-
if m.get('status') is not None:
|
|
15977
|
-
self.status = m.get('status')
|
|
17676
|
+
if m.get('logstore') is not None:
|
|
17677
|
+
self.logstore = m.get('logstore')
|
|
17678
|
+
if m.get('offset') is not None:
|
|
17679
|
+
self.offset = m.get('offset')
|
|
17680
|
+
if m.get('size') is not None:
|
|
17681
|
+
self.size = m.get('size')
|
|
15978
17682
|
return self
|
|
15979
17683
|
|
|
15980
17684
|
|
|
15981
|
-
class
|
|
17685
|
+
class ListETLsResponseBody(TeaModel):
|
|
15982
17686
|
def __init__(
|
|
15983
17687
|
self,
|
|
15984
17688
|
count: int = None,
|
|
15985
|
-
results: List[
|
|
17689
|
+
results: List[ETL] = None,
|
|
15986
17690
|
total: int = None,
|
|
15987
17691
|
):
|
|
17692
|
+
# The number of data transformation jobs that are returned.
|
|
15988
17693
|
self.count = count
|
|
15989
|
-
# Array, to return a list of log download tasks.
|
|
15990
17694
|
self.results = results
|
|
17695
|
+
# The total number of data transformation jobs in the project.
|
|
15991
17696
|
self.total = total
|
|
15992
17697
|
|
|
15993
17698
|
def validate(self):
|
|
@@ -16019,19 +17724,19 @@ class ListDownloadJobsResponseBody(TeaModel):
|
|
|
16019
17724
|
self.results = []
|
|
16020
17725
|
if m.get('results') is not None:
|
|
16021
17726
|
for k in m.get('results'):
|
|
16022
|
-
temp_model =
|
|
17727
|
+
temp_model = ETL()
|
|
16023
17728
|
self.results.append(temp_model.from_map(k))
|
|
16024
17729
|
if m.get('total') is not None:
|
|
16025
17730
|
self.total = m.get('total')
|
|
16026
17731
|
return self
|
|
16027
17732
|
|
|
16028
17733
|
|
|
16029
|
-
class
|
|
17734
|
+
class ListETLsResponse(TeaModel):
|
|
16030
17735
|
def __init__(
|
|
16031
17736
|
self,
|
|
16032
17737
|
headers: Dict[str, str] = None,
|
|
16033
17738
|
status_code: int = None,
|
|
16034
|
-
body:
|
|
17739
|
+
body: ListETLsResponseBody = None,
|
|
16035
17740
|
):
|
|
16036
17741
|
self.headers = headers
|
|
16037
17742
|
self.status_code = status_code
|
|
@@ -16062,12 +17767,12 @@ class ListDownloadJobsResponse(TeaModel):
|
|
|
16062
17767
|
if m.get('statusCode') is not None:
|
|
16063
17768
|
self.status_code = m.get('statusCode')
|
|
16064
17769
|
if m.get('body') is not None:
|
|
16065
|
-
temp_model =
|
|
17770
|
+
temp_model = ListETLsResponseBody()
|
|
16066
17771
|
self.body = temp_model.from_map(m['body'])
|
|
16067
17772
|
return self
|
|
16068
17773
|
|
|
16069
17774
|
|
|
16070
|
-
class
|
|
17775
|
+
class ListElasticsearchIngestionsRequest(TeaModel):
|
|
16071
17776
|
def __init__(
|
|
16072
17777
|
self,
|
|
16073
17778
|
logstore: str = None,
|
|
@@ -16106,17 +17811,15 @@ class ListETLsRequest(TeaModel):
|
|
|
16106
17811
|
return self
|
|
16107
17812
|
|
|
16108
17813
|
|
|
16109
|
-
class
|
|
17814
|
+
class ListElasticsearchIngestionsResponseBody(TeaModel):
|
|
16110
17815
|
def __init__(
|
|
16111
17816
|
self,
|
|
16112
17817
|
count: int = None,
|
|
16113
|
-
results: List[
|
|
17818
|
+
results: List[ESIngestion] = None,
|
|
16114
17819
|
total: int = None,
|
|
16115
17820
|
):
|
|
16116
|
-
# The number of data transformation jobs that are returned.
|
|
16117
17821
|
self.count = count
|
|
16118
17822
|
self.results = results
|
|
16119
|
-
# The total number of data transformation jobs in the project.
|
|
16120
17823
|
self.total = total
|
|
16121
17824
|
|
|
16122
17825
|
def validate(self):
|
|
@@ -16148,19 +17851,19 @@ class ListETLsResponseBody(TeaModel):
|
|
|
16148
17851
|
self.results = []
|
|
16149
17852
|
if m.get('results') is not None:
|
|
16150
17853
|
for k in m.get('results'):
|
|
16151
|
-
temp_model =
|
|
17854
|
+
temp_model = ESIngestion()
|
|
16152
17855
|
self.results.append(temp_model.from_map(k))
|
|
16153
17856
|
if m.get('total') is not None:
|
|
16154
17857
|
self.total = m.get('total')
|
|
16155
17858
|
return self
|
|
16156
17859
|
|
|
16157
17860
|
|
|
16158
|
-
class
|
|
17861
|
+
class ListElasticsearchIngestionsResponse(TeaModel):
|
|
16159
17862
|
def __init__(
|
|
16160
17863
|
self,
|
|
16161
17864
|
headers: Dict[str, str] = None,
|
|
16162
17865
|
status_code: int = None,
|
|
16163
|
-
body:
|
|
17866
|
+
body: ListElasticsearchIngestionsResponseBody = None,
|
|
16164
17867
|
):
|
|
16165
17868
|
self.headers = headers
|
|
16166
17869
|
self.status_code = status_code
|
|
@@ -16191,7 +17894,7 @@ class ListETLsResponse(TeaModel):
|
|
|
16191
17894
|
if m.get('statusCode') is not None:
|
|
16192
17895
|
self.status_code = m.get('statusCode')
|
|
16193
17896
|
if m.get('body') is not None:
|
|
16194
|
-
temp_model =
|
|
17897
|
+
temp_model = ListElasticsearchIngestionsResponseBody()
|
|
16195
17898
|
self.body = temp_model.from_map(m['body'])
|
|
16196
17899
|
return self
|
|
16197
17900
|
|
|
@@ -17416,32 +19119,183 @@ class ListOSSIngestionsRequest(TeaModel):
|
|
|
17416
19119
|
|
|
17417
19120
|
def from_map(self, m: dict = None):
|
|
17418
19121
|
m = m or dict()
|
|
17419
|
-
if m.get('logstore') is not None:
|
|
17420
|
-
self.logstore = m.get('logstore')
|
|
19122
|
+
if m.get('logstore') is not None:
|
|
19123
|
+
self.logstore = m.get('logstore')
|
|
19124
|
+
if m.get('offset') is not None:
|
|
19125
|
+
self.offset = m.get('offset')
|
|
19126
|
+
if m.get('size') is not None:
|
|
19127
|
+
self.size = m.get('size')
|
|
19128
|
+
return self
|
|
19129
|
+
|
|
19130
|
+
|
|
19131
|
+
class ListOSSIngestionsResponseBody(TeaModel):
|
|
19132
|
+
def __init__(
|
|
19133
|
+
self,
|
|
19134
|
+
count: int = None,
|
|
19135
|
+
results: List[OSSIngestion] = None,
|
|
19136
|
+
total: int = None,
|
|
19137
|
+
):
|
|
19138
|
+
# The number of OSS data import jobs that are returned.
|
|
19139
|
+
self.count = count
|
|
19140
|
+
# The OSS data import jobs.
|
|
19141
|
+
self.results = results
|
|
19142
|
+
# The total number of OSS data import jobs in the project.
|
|
19143
|
+
self.total = total
|
|
19144
|
+
|
|
19145
|
+
def validate(self):
|
|
19146
|
+
if self.results:
|
|
19147
|
+
for k in self.results:
|
|
19148
|
+
if k:
|
|
19149
|
+
k.validate()
|
|
19150
|
+
|
|
19151
|
+
def to_map(self):
|
|
19152
|
+
_map = super().to_map()
|
|
19153
|
+
if _map is not None:
|
|
19154
|
+
return _map
|
|
19155
|
+
|
|
19156
|
+
result = dict()
|
|
19157
|
+
if self.count is not None:
|
|
19158
|
+
result['count'] = self.count
|
|
19159
|
+
result['results'] = []
|
|
19160
|
+
if self.results is not None:
|
|
19161
|
+
for k in self.results:
|
|
19162
|
+
result['results'].append(k.to_map() if k else None)
|
|
19163
|
+
if self.total is not None:
|
|
19164
|
+
result['total'] = self.total
|
|
19165
|
+
return result
|
|
19166
|
+
|
|
19167
|
+
def from_map(self, m: dict = None):
|
|
19168
|
+
m = m or dict()
|
|
19169
|
+
if m.get('count') is not None:
|
|
19170
|
+
self.count = m.get('count')
|
|
19171
|
+
self.results = []
|
|
19172
|
+
if m.get('results') is not None:
|
|
19173
|
+
for k in m.get('results'):
|
|
19174
|
+
temp_model = OSSIngestion()
|
|
19175
|
+
self.results.append(temp_model.from_map(k))
|
|
19176
|
+
if m.get('total') is not None:
|
|
19177
|
+
self.total = m.get('total')
|
|
19178
|
+
return self
|
|
19179
|
+
|
|
19180
|
+
|
|
19181
|
+
class ListOSSIngestionsResponse(TeaModel):
|
|
19182
|
+
def __init__(
|
|
19183
|
+
self,
|
|
19184
|
+
headers: Dict[str, str] = None,
|
|
19185
|
+
status_code: int = None,
|
|
19186
|
+
body: ListOSSIngestionsResponseBody = None,
|
|
19187
|
+
):
|
|
19188
|
+
self.headers = headers
|
|
19189
|
+
self.status_code = status_code
|
|
19190
|
+
self.body = body
|
|
19191
|
+
|
|
19192
|
+
def validate(self):
|
|
19193
|
+
if self.body:
|
|
19194
|
+
self.body.validate()
|
|
19195
|
+
|
|
19196
|
+
def to_map(self):
|
|
19197
|
+
_map = super().to_map()
|
|
19198
|
+
if _map is not None:
|
|
19199
|
+
return _map
|
|
19200
|
+
|
|
19201
|
+
result = dict()
|
|
19202
|
+
if self.headers is not None:
|
|
19203
|
+
result['headers'] = self.headers
|
|
19204
|
+
if self.status_code is not None:
|
|
19205
|
+
result['statusCode'] = self.status_code
|
|
19206
|
+
if self.body is not None:
|
|
19207
|
+
result['body'] = self.body.to_map()
|
|
19208
|
+
return result
|
|
19209
|
+
|
|
19210
|
+
def from_map(self, m: dict = None):
|
|
19211
|
+
m = m or dict()
|
|
19212
|
+
if m.get('headers') is not None:
|
|
19213
|
+
self.headers = m.get('headers')
|
|
19214
|
+
if m.get('statusCode') is not None:
|
|
19215
|
+
self.status_code = m.get('statusCode')
|
|
19216
|
+
if m.get('body') is not None:
|
|
19217
|
+
temp_model = ListOSSIngestionsResponseBody()
|
|
19218
|
+
self.body = temp_model.from_map(m['body'])
|
|
19219
|
+
return self
|
|
19220
|
+
|
|
19221
|
+
|
|
19222
|
+
class ListProjectRequest(TeaModel):
|
|
19223
|
+
def __init__(
|
|
19224
|
+
self,
|
|
19225
|
+
description: str = None,
|
|
19226
|
+
fetch_quota: bool = None,
|
|
19227
|
+
offset: int = None,
|
|
19228
|
+
project_name: str = None,
|
|
19229
|
+
resource_group_id: str = None,
|
|
19230
|
+
size: int = None,
|
|
19231
|
+
):
|
|
19232
|
+
self.description = description
|
|
19233
|
+
self.fetch_quota = fetch_quota
|
|
19234
|
+
# The line from which the query starts. Default value: 0.
|
|
19235
|
+
self.offset = offset
|
|
19236
|
+
# The name of the project.
|
|
19237
|
+
self.project_name = project_name
|
|
19238
|
+
self.resource_group_id = resource_group_id
|
|
19239
|
+
# The number of entries per page. Default value: 100. This operation can return up to 500 projects.
|
|
19240
|
+
self.size = size
|
|
19241
|
+
|
|
19242
|
+
def validate(self):
|
|
19243
|
+
pass
|
|
19244
|
+
|
|
19245
|
+
def to_map(self):
|
|
19246
|
+
_map = super().to_map()
|
|
19247
|
+
if _map is not None:
|
|
19248
|
+
return _map
|
|
19249
|
+
|
|
19250
|
+
result = dict()
|
|
19251
|
+
if self.description is not None:
|
|
19252
|
+
result['description'] = self.description
|
|
19253
|
+
if self.fetch_quota is not None:
|
|
19254
|
+
result['fetchQuota'] = self.fetch_quota
|
|
19255
|
+
if self.offset is not None:
|
|
19256
|
+
result['offset'] = self.offset
|
|
19257
|
+
if self.project_name is not None:
|
|
19258
|
+
result['projectName'] = self.project_name
|
|
19259
|
+
if self.resource_group_id is not None:
|
|
19260
|
+
result['resourceGroupId'] = self.resource_group_id
|
|
19261
|
+
if self.size is not None:
|
|
19262
|
+
result['size'] = self.size
|
|
19263
|
+
return result
|
|
19264
|
+
|
|
19265
|
+
def from_map(self, m: dict = None):
|
|
19266
|
+
m = m or dict()
|
|
19267
|
+
if m.get('description') is not None:
|
|
19268
|
+
self.description = m.get('description')
|
|
19269
|
+
if m.get('fetchQuota') is not None:
|
|
19270
|
+
self.fetch_quota = m.get('fetchQuota')
|
|
17421
19271
|
if m.get('offset') is not None:
|
|
17422
19272
|
self.offset = m.get('offset')
|
|
19273
|
+
if m.get('projectName') is not None:
|
|
19274
|
+
self.project_name = m.get('projectName')
|
|
19275
|
+
if m.get('resourceGroupId') is not None:
|
|
19276
|
+
self.resource_group_id = m.get('resourceGroupId')
|
|
17423
19277
|
if m.get('size') is not None:
|
|
17424
19278
|
self.size = m.get('size')
|
|
17425
19279
|
return self
|
|
17426
19280
|
|
|
17427
19281
|
|
|
17428
|
-
class
|
|
19282
|
+
class ListProjectResponseBody(TeaModel):
|
|
17429
19283
|
def __init__(
|
|
17430
19284
|
self,
|
|
17431
19285
|
count: int = None,
|
|
17432
|
-
|
|
19286
|
+
projects: List[Project] = None,
|
|
17433
19287
|
total: int = None,
|
|
17434
19288
|
):
|
|
17435
|
-
# The number of
|
|
19289
|
+
# The number of returned projects on the current page.
|
|
17436
19290
|
self.count = count
|
|
17437
|
-
# The
|
|
17438
|
-
self.
|
|
17439
|
-
# The total number of
|
|
19291
|
+
# The projects that meet the query conditions.
|
|
19292
|
+
self.projects = projects
|
|
19293
|
+
# The total number of projects that meet the query conditions.
|
|
17440
19294
|
self.total = total
|
|
17441
19295
|
|
|
17442
19296
|
def validate(self):
|
|
17443
|
-
if self.
|
|
17444
|
-
for k in self.
|
|
19297
|
+
if self.projects:
|
|
19298
|
+
for k in self.projects:
|
|
17445
19299
|
if k:
|
|
17446
19300
|
k.validate()
|
|
17447
19301
|
|
|
@@ -17453,10 +19307,10 @@ class ListOSSIngestionsResponseBody(TeaModel):
|
|
|
17453
19307
|
result = dict()
|
|
17454
19308
|
if self.count is not None:
|
|
17455
19309
|
result['count'] = self.count
|
|
17456
|
-
result['
|
|
17457
|
-
if self.
|
|
17458
|
-
for k in self.
|
|
17459
|
-
result['
|
|
19310
|
+
result['projects'] = []
|
|
19311
|
+
if self.projects is not None:
|
|
19312
|
+
for k in self.projects:
|
|
19313
|
+
result['projects'].append(k.to_map() if k else None)
|
|
17460
19314
|
if self.total is not None:
|
|
17461
19315
|
result['total'] = self.total
|
|
17462
19316
|
return result
|
|
@@ -17465,22 +19319,22 @@ class ListOSSIngestionsResponseBody(TeaModel):
|
|
|
17465
19319
|
m = m or dict()
|
|
17466
19320
|
if m.get('count') is not None:
|
|
17467
19321
|
self.count = m.get('count')
|
|
17468
|
-
self.
|
|
17469
|
-
if m.get('
|
|
17470
|
-
for k in m.get('
|
|
17471
|
-
temp_model =
|
|
17472
|
-
self.
|
|
19322
|
+
self.projects = []
|
|
19323
|
+
if m.get('projects') is not None:
|
|
19324
|
+
for k in m.get('projects'):
|
|
19325
|
+
temp_model = Project()
|
|
19326
|
+
self.projects.append(temp_model.from_map(k))
|
|
17473
19327
|
if m.get('total') is not None:
|
|
17474
19328
|
self.total = m.get('total')
|
|
17475
19329
|
return self
|
|
17476
19330
|
|
|
17477
19331
|
|
|
17478
|
-
class
|
|
19332
|
+
class ListProjectResponse(TeaModel):
|
|
17479
19333
|
def __init__(
|
|
17480
19334
|
self,
|
|
17481
19335
|
headers: Dict[str, str] = None,
|
|
17482
19336
|
status_code: int = None,
|
|
17483
|
-
body:
|
|
19337
|
+
body: ListProjectResponseBody = None,
|
|
17484
19338
|
):
|
|
17485
19339
|
self.headers = headers
|
|
17486
19340
|
self.status_code = status_code
|
|
@@ -17511,27 +19365,20 @@ class ListOSSIngestionsResponse(TeaModel):
|
|
|
17511
19365
|
if m.get('statusCode') is not None:
|
|
17512
19366
|
self.status_code = m.get('statusCode')
|
|
17513
19367
|
if m.get('body') is not None:
|
|
17514
|
-
temp_model =
|
|
19368
|
+
temp_model = ListProjectResponseBody()
|
|
17515
19369
|
self.body = temp_model.from_map(m['body'])
|
|
17516
19370
|
return self
|
|
17517
19371
|
|
|
17518
19372
|
|
|
17519
|
-
class
|
|
19373
|
+
class ListS3IngestionsRequest(TeaModel):
|
|
17520
19374
|
def __init__(
|
|
17521
19375
|
self,
|
|
17522
|
-
|
|
17523
|
-
offset:
|
|
17524
|
-
|
|
17525
|
-
resource_group_id: str = None,
|
|
17526
|
-
size: int = None,
|
|
19376
|
+
logstore: str = None,
|
|
19377
|
+
offset: str = None,
|
|
19378
|
+
size: str = None,
|
|
17527
19379
|
):
|
|
17528
|
-
self.
|
|
17529
|
-
# The line from which the query starts. Default value: 0.
|
|
19380
|
+
self.logstore = logstore
|
|
17530
19381
|
self.offset = offset
|
|
17531
|
-
# The name of the project.
|
|
17532
|
-
self.project_name = project_name
|
|
17533
|
-
self.resource_group_id = resource_group_id
|
|
17534
|
-
# The number of entries per page. Default value: 100. This operation can return up to 500 projects.
|
|
17535
19382
|
self.size = size
|
|
17536
19383
|
|
|
17537
19384
|
def validate(self):
|
|
@@ -17543,50 +19390,39 @@ class ListProjectRequest(TeaModel):
|
|
|
17543
19390
|
return _map
|
|
17544
19391
|
|
|
17545
19392
|
result = dict()
|
|
17546
|
-
if self.
|
|
17547
|
-
result['
|
|
19393
|
+
if self.logstore is not None:
|
|
19394
|
+
result['logstore'] = self.logstore
|
|
17548
19395
|
if self.offset is not None:
|
|
17549
19396
|
result['offset'] = self.offset
|
|
17550
|
-
if self.project_name is not None:
|
|
17551
|
-
result['projectName'] = self.project_name
|
|
17552
|
-
if self.resource_group_id is not None:
|
|
17553
|
-
result['resourceGroupId'] = self.resource_group_id
|
|
17554
19397
|
if self.size is not None:
|
|
17555
19398
|
result['size'] = self.size
|
|
17556
19399
|
return result
|
|
17557
19400
|
|
|
17558
19401
|
def from_map(self, m: dict = None):
|
|
17559
19402
|
m = m or dict()
|
|
17560
|
-
if m.get('
|
|
17561
|
-
self.
|
|
19403
|
+
if m.get('logstore') is not None:
|
|
19404
|
+
self.logstore = m.get('logstore')
|
|
17562
19405
|
if m.get('offset') is not None:
|
|
17563
19406
|
self.offset = m.get('offset')
|
|
17564
|
-
if m.get('projectName') is not None:
|
|
17565
|
-
self.project_name = m.get('projectName')
|
|
17566
|
-
if m.get('resourceGroupId') is not None:
|
|
17567
|
-
self.resource_group_id = m.get('resourceGroupId')
|
|
17568
19407
|
if m.get('size') is not None:
|
|
17569
19408
|
self.size = m.get('size')
|
|
17570
19409
|
return self
|
|
17571
19410
|
|
|
17572
19411
|
|
|
17573
|
-
class
|
|
19412
|
+
class ListS3IngestionsResponseBody(TeaModel):
|
|
17574
19413
|
def __init__(
|
|
17575
19414
|
self,
|
|
17576
19415
|
count: int = None,
|
|
17577
|
-
|
|
19416
|
+
results: List[S3Ingestion] = None,
|
|
17578
19417
|
total: int = None,
|
|
17579
19418
|
):
|
|
17580
|
-
# The number of returned projects on the current page.
|
|
17581
19419
|
self.count = count
|
|
17582
|
-
|
|
17583
|
-
self.projects = projects
|
|
17584
|
-
# The total number of projects that meet the query conditions.
|
|
19420
|
+
self.results = results
|
|
17585
19421
|
self.total = total
|
|
17586
19422
|
|
|
17587
19423
|
def validate(self):
|
|
17588
|
-
if self.
|
|
17589
|
-
for k in self.
|
|
19424
|
+
if self.results:
|
|
19425
|
+
for k in self.results:
|
|
17590
19426
|
if k:
|
|
17591
19427
|
k.validate()
|
|
17592
19428
|
|
|
@@ -17598,10 +19434,10 @@ class ListProjectResponseBody(TeaModel):
|
|
|
17598
19434
|
result = dict()
|
|
17599
19435
|
if self.count is not None:
|
|
17600
19436
|
result['count'] = self.count
|
|
17601
|
-
result['
|
|
17602
|
-
if self.
|
|
17603
|
-
for k in self.
|
|
17604
|
-
result['
|
|
19437
|
+
result['results'] = []
|
|
19438
|
+
if self.results is not None:
|
|
19439
|
+
for k in self.results:
|
|
19440
|
+
result['results'].append(k.to_map() if k else None)
|
|
17605
19441
|
if self.total is not None:
|
|
17606
19442
|
result['total'] = self.total
|
|
17607
19443
|
return result
|
|
@@ -17610,22 +19446,22 @@ class ListProjectResponseBody(TeaModel):
|
|
|
17610
19446
|
m = m or dict()
|
|
17611
19447
|
if m.get('count') is not None:
|
|
17612
19448
|
self.count = m.get('count')
|
|
17613
|
-
self.
|
|
17614
|
-
if m.get('
|
|
17615
|
-
for k in m.get('
|
|
17616
|
-
temp_model =
|
|
17617
|
-
self.
|
|
19449
|
+
self.results = []
|
|
19450
|
+
if m.get('results') is not None:
|
|
19451
|
+
for k in m.get('results'):
|
|
19452
|
+
temp_model = S3Ingestion()
|
|
19453
|
+
self.results.append(temp_model.from_map(k))
|
|
17618
19454
|
if m.get('total') is not None:
|
|
17619
19455
|
self.total = m.get('total')
|
|
17620
19456
|
return self
|
|
17621
19457
|
|
|
17622
19458
|
|
|
17623
|
-
class
|
|
19459
|
+
class ListS3IngestionsResponse(TeaModel):
|
|
17624
19460
|
def __init__(
|
|
17625
19461
|
self,
|
|
17626
19462
|
headers: Dict[str, str] = None,
|
|
17627
19463
|
status_code: int = None,
|
|
17628
|
-
body:
|
|
19464
|
+
body: ListS3IngestionsResponseBody = None,
|
|
17629
19465
|
):
|
|
17630
19466
|
self.headers = headers
|
|
17631
19467
|
self.status_code = status_code
|
|
@@ -17656,7 +19492,7 @@ class ListProjectResponse(TeaModel):
|
|
|
17656
19492
|
if m.get('statusCode') is not None:
|
|
17657
19493
|
self.status_code = m.get('statusCode')
|
|
17658
19494
|
if m.get('body') is not None:
|
|
17659
|
-
temp_model =
|
|
19495
|
+
temp_model = ListS3IngestionsResponseBody()
|
|
17660
19496
|
self.body = temp_model.from_map(m['body'])
|
|
17661
19497
|
return self
|
|
17662
19498
|
|
|
@@ -19206,25 +21042,178 @@ class RefreshTokenResponseBody(TeaModel):
|
|
|
19206
21042
|
|
|
19207
21043
|
def from_map(self, m: dict = None):
|
|
19208
21044
|
m = m or dict()
|
|
19209
|
-
if m.get('accessToken') is not None:
|
|
19210
|
-
self.access_token = m.get('accessToken')
|
|
21045
|
+
if m.get('accessToken') is not None:
|
|
21046
|
+
self.access_token = m.get('accessToken')
|
|
21047
|
+
return self
|
|
21048
|
+
|
|
21049
|
+
|
|
21050
|
+
class RefreshTokenResponse(TeaModel):
|
|
21051
|
+
def __init__(
|
|
21052
|
+
self,
|
|
21053
|
+
headers: Dict[str, str] = None,
|
|
21054
|
+
status_code: int = None,
|
|
21055
|
+
body: RefreshTokenResponseBody = None,
|
|
21056
|
+
):
|
|
21057
|
+
self.headers = headers
|
|
21058
|
+
self.status_code = status_code
|
|
21059
|
+
self.body = body
|
|
21060
|
+
|
|
21061
|
+
def validate(self):
|
|
21062
|
+
if self.body:
|
|
21063
|
+
self.body.validate()
|
|
21064
|
+
|
|
21065
|
+
def to_map(self):
|
|
21066
|
+
_map = super().to_map()
|
|
21067
|
+
if _map is not None:
|
|
21068
|
+
return _map
|
|
21069
|
+
|
|
21070
|
+
result = dict()
|
|
21071
|
+
if self.headers is not None:
|
|
21072
|
+
result['headers'] = self.headers
|
|
21073
|
+
if self.status_code is not None:
|
|
21074
|
+
result['statusCode'] = self.status_code
|
|
21075
|
+
if self.body is not None:
|
|
21076
|
+
result['body'] = self.body.to_map()
|
|
21077
|
+
return result
|
|
21078
|
+
|
|
21079
|
+
def from_map(self, m: dict = None):
|
|
21080
|
+
m = m or dict()
|
|
21081
|
+
if m.get('headers') is not None:
|
|
21082
|
+
self.headers = m.get('headers')
|
|
21083
|
+
if m.get('statusCode') is not None:
|
|
21084
|
+
self.status_code = m.get('statusCode')
|
|
21085
|
+
if m.get('body') is not None:
|
|
21086
|
+
temp_model = RefreshTokenResponseBody()
|
|
21087
|
+
self.body = temp_model.from_map(m['body'])
|
|
21088
|
+
return self
|
|
21089
|
+
|
|
21090
|
+
|
|
21091
|
+
class RemoveConfigFromMachineGroupResponse(TeaModel):
|
|
21092
|
+
def __init__(
|
|
21093
|
+
self,
|
|
21094
|
+
headers: Dict[str, str] = None,
|
|
21095
|
+
status_code: int = None,
|
|
21096
|
+
):
|
|
21097
|
+
self.headers = headers
|
|
21098
|
+
self.status_code = status_code
|
|
21099
|
+
|
|
21100
|
+
def validate(self):
|
|
21101
|
+
pass
|
|
21102
|
+
|
|
21103
|
+
def to_map(self):
|
|
21104
|
+
_map = super().to_map()
|
|
21105
|
+
if _map is not None:
|
|
21106
|
+
return _map
|
|
21107
|
+
|
|
21108
|
+
result = dict()
|
|
21109
|
+
if self.headers is not None:
|
|
21110
|
+
result['headers'] = self.headers
|
|
21111
|
+
if self.status_code is not None:
|
|
21112
|
+
result['statusCode'] = self.status_code
|
|
21113
|
+
return result
|
|
21114
|
+
|
|
21115
|
+
def from_map(self, m: dict = None):
|
|
21116
|
+
m = m or dict()
|
|
21117
|
+
if m.get('headers') is not None:
|
|
21118
|
+
self.headers = m.get('headers')
|
|
21119
|
+
if m.get('statusCode') is not None:
|
|
21120
|
+
self.status_code = m.get('statusCode')
|
|
21121
|
+
return self
|
|
21122
|
+
|
|
21123
|
+
|
|
21124
|
+
class SplitShardRequest(TeaModel):
|
|
21125
|
+
def __init__(
|
|
21126
|
+
self,
|
|
21127
|
+
key: str = None,
|
|
21128
|
+
shard_count: int = None,
|
|
21129
|
+
):
|
|
21130
|
+
# The position where the shard is split.
|
|
21131
|
+
self.key = key
|
|
21132
|
+
# The number of new shards that are generated after splitting.
|
|
21133
|
+
self.shard_count = shard_count
|
|
21134
|
+
|
|
21135
|
+
def validate(self):
|
|
21136
|
+
pass
|
|
21137
|
+
|
|
21138
|
+
def to_map(self):
|
|
21139
|
+
_map = super().to_map()
|
|
21140
|
+
if _map is not None:
|
|
21141
|
+
return _map
|
|
21142
|
+
|
|
21143
|
+
result = dict()
|
|
21144
|
+
if self.key is not None:
|
|
21145
|
+
result['key'] = self.key
|
|
21146
|
+
if self.shard_count is not None:
|
|
21147
|
+
result['shardCount'] = self.shard_count
|
|
21148
|
+
return result
|
|
21149
|
+
|
|
21150
|
+
def from_map(self, m: dict = None):
|
|
21151
|
+
m = m or dict()
|
|
21152
|
+
if m.get('key') is not None:
|
|
21153
|
+
self.key = m.get('key')
|
|
21154
|
+
if m.get('shardCount') is not None:
|
|
21155
|
+
self.shard_count = m.get('shardCount')
|
|
21156
|
+
return self
|
|
21157
|
+
|
|
21158
|
+
|
|
21159
|
+
class SplitShardResponse(TeaModel):
|
|
21160
|
+
def __init__(
|
|
21161
|
+
self,
|
|
21162
|
+
headers: Dict[str, str] = None,
|
|
21163
|
+
status_code: int = None,
|
|
21164
|
+
body: List[Shard] = None,
|
|
21165
|
+
):
|
|
21166
|
+
self.headers = headers
|
|
21167
|
+
self.status_code = status_code
|
|
21168
|
+
self.body = body
|
|
21169
|
+
|
|
21170
|
+
def validate(self):
|
|
21171
|
+
if self.body:
|
|
21172
|
+
for k in self.body:
|
|
21173
|
+
if k:
|
|
21174
|
+
k.validate()
|
|
21175
|
+
|
|
21176
|
+
def to_map(self):
|
|
21177
|
+
_map = super().to_map()
|
|
21178
|
+
if _map is not None:
|
|
21179
|
+
return _map
|
|
21180
|
+
|
|
21181
|
+
result = dict()
|
|
21182
|
+
if self.headers is not None:
|
|
21183
|
+
result['headers'] = self.headers
|
|
21184
|
+
if self.status_code is not None:
|
|
21185
|
+
result['statusCode'] = self.status_code
|
|
21186
|
+
result['body'] = []
|
|
21187
|
+
if self.body is not None:
|
|
21188
|
+
for k in self.body:
|
|
21189
|
+
result['body'].append(k.to_map() if k else None)
|
|
21190
|
+
return result
|
|
21191
|
+
|
|
21192
|
+
def from_map(self, m: dict = None):
|
|
21193
|
+
m = m or dict()
|
|
21194
|
+
if m.get('headers') is not None:
|
|
21195
|
+
self.headers = m.get('headers')
|
|
21196
|
+
if m.get('statusCode') is not None:
|
|
21197
|
+
self.status_code = m.get('statusCode')
|
|
21198
|
+
self.body = []
|
|
21199
|
+
if m.get('body') is not None:
|
|
21200
|
+
for k in m.get('body'):
|
|
21201
|
+
temp_model = Shard()
|
|
21202
|
+
self.body.append(temp_model.from_map(k))
|
|
19211
21203
|
return self
|
|
19212
21204
|
|
|
19213
21205
|
|
|
19214
|
-
class
|
|
21206
|
+
class StartAzureBlobIngestionResponse(TeaModel):
|
|
19215
21207
|
def __init__(
|
|
19216
21208
|
self,
|
|
19217
21209
|
headers: Dict[str, str] = None,
|
|
19218
21210
|
status_code: int = None,
|
|
19219
|
-
body: RefreshTokenResponseBody = None,
|
|
19220
21211
|
):
|
|
19221
21212
|
self.headers = headers
|
|
19222
21213
|
self.status_code = status_code
|
|
19223
|
-
self.body = body
|
|
19224
21214
|
|
|
19225
21215
|
def validate(self):
|
|
19226
|
-
|
|
19227
|
-
self.body.validate()
|
|
21216
|
+
pass
|
|
19228
21217
|
|
|
19229
21218
|
def to_map(self):
|
|
19230
21219
|
_map = super().to_map()
|
|
@@ -19236,8 +21225,6 @@ class RefreshTokenResponse(TeaModel):
|
|
|
19236
21225
|
result['headers'] = self.headers
|
|
19237
21226
|
if self.status_code is not None:
|
|
19238
21227
|
result['statusCode'] = self.status_code
|
|
19239
|
-
if self.body is not None:
|
|
19240
|
-
result['body'] = self.body.to_map()
|
|
19241
21228
|
return result
|
|
19242
21229
|
|
|
19243
21230
|
def from_map(self, m: dict = None):
|
|
@@ -19246,13 +21233,10 @@ class RefreshTokenResponse(TeaModel):
|
|
|
19246
21233
|
self.headers = m.get('headers')
|
|
19247
21234
|
if m.get('statusCode') is not None:
|
|
19248
21235
|
self.status_code = m.get('statusCode')
|
|
19249
|
-
if m.get('body') is not None:
|
|
19250
|
-
temp_model = RefreshTokenResponseBody()
|
|
19251
|
-
self.body = temp_model.from_map(m['body'])
|
|
19252
21236
|
return self
|
|
19253
21237
|
|
|
19254
21238
|
|
|
19255
|
-
class
|
|
21239
|
+
class StartETLResponse(TeaModel):
|
|
19256
21240
|
def __init__(
|
|
19257
21241
|
self,
|
|
19258
21242
|
headers: Dict[str, str] = None,
|
|
@@ -19285,16 +21269,14 @@ class RemoveConfigFromMachineGroupResponse(TeaModel):
|
|
|
19285
21269
|
return self
|
|
19286
21270
|
|
|
19287
21271
|
|
|
19288
|
-
class
|
|
21272
|
+
class StartElasticsearchIngestionResponse(TeaModel):
|
|
19289
21273
|
def __init__(
|
|
19290
21274
|
self,
|
|
19291
|
-
|
|
19292
|
-
|
|
21275
|
+
headers: Dict[str, str] = None,
|
|
21276
|
+
status_code: int = None,
|
|
19293
21277
|
):
|
|
19294
|
-
|
|
19295
|
-
self.
|
|
19296
|
-
# The number of new shards that are generated after splitting.
|
|
19297
|
-
self.shard_count = shard_count
|
|
21278
|
+
self.headers = headers
|
|
21279
|
+
self.status_code = status_code
|
|
19298
21280
|
|
|
19299
21281
|
def validate(self):
|
|
19300
21282
|
pass
|
|
@@ -19305,37 +21287,32 @@ class SplitShardRequest(TeaModel):
|
|
|
19305
21287
|
return _map
|
|
19306
21288
|
|
|
19307
21289
|
result = dict()
|
|
19308
|
-
if self.
|
|
19309
|
-
result['
|
|
19310
|
-
if self.
|
|
19311
|
-
result['
|
|
21290
|
+
if self.headers is not None:
|
|
21291
|
+
result['headers'] = self.headers
|
|
21292
|
+
if self.status_code is not None:
|
|
21293
|
+
result['statusCode'] = self.status_code
|
|
19312
21294
|
return result
|
|
19313
21295
|
|
|
19314
21296
|
def from_map(self, m: dict = None):
|
|
19315
21297
|
m = m or dict()
|
|
19316
|
-
if m.get('
|
|
19317
|
-
self.
|
|
19318
|
-
if m.get('
|
|
19319
|
-
self.
|
|
21298
|
+
if m.get('headers') is not None:
|
|
21299
|
+
self.headers = m.get('headers')
|
|
21300
|
+
if m.get('statusCode') is not None:
|
|
21301
|
+
self.status_code = m.get('statusCode')
|
|
19320
21302
|
return self
|
|
19321
21303
|
|
|
19322
21304
|
|
|
19323
|
-
class
|
|
21305
|
+
class StartMaxComputeExportResponse(TeaModel):
|
|
19324
21306
|
def __init__(
|
|
19325
21307
|
self,
|
|
19326
21308
|
headers: Dict[str, str] = None,
|
|
19327
21309
|
status_code: int = None,
|
|
19328
|
-
body: List[Shard] = None,
|
|
19329
21310
|
):
|
|
19330
21311
|
self.headers = headers
|
|
19331
21312
|
self.status_code = status_code
|
|
19332
|
-
self.body = body
|
|
19333
21313
|
|
|
19334
21314
|
def validate(self):
|
|
19335
|
-
|
|
19336
|
-
for k in self.body:
|
|
19337
|
-
if k:
|
|
19338
|
-
k.validate()
|
|
21315
|
+
pass
|
|
19339
21316
|
|
|
19340
21317
|
def to_map(self):
|
|
19341
21318
|
_map = super().to_map()
|
|
@@ -19347,10 +21324,6 @@ class SplitShardResponse(TeaModel):
|
|
|
19347
21324
|
result['headers'] = self.headers
|
|
19348
21325
|
if self.status_code is not None:
|
|
19349
21326
|
result['statusCode'] = self.status_code
|
|
19350
|
-
result['body'] = []
|
|
19351
|
-
if self.body is not None:
|
|
19352
|
-
for k in self.body:
|
|
19353
|
-
result['body'].append(k.to_map() if k else None)
|
|
19354
21327
|
return result
|
|
19355
21328
|
|
|
19356
21329
|
def from_map(self, m: dict = None):
|
|
@@ -19359,15 +21332,10 @@ class SplitShardResponse(TeaModel):
|
|
|
19359
21332
|
self.headers = m.get('headers')
|
|
19360
21333
|
if m.get('statusCode') is not None:
|
|
19361
21334
|
self.status_code = m.get('statusCode')
|
|
19362
|
-
self.body = []
|
|
19363
|
-
if m.get('body') is not None:
|
|
19364
|
-
for k in m.get('body'):
|
|
19365
|
-
temp_model = Shard()
|
|
19366
|
-
self.body.append(temp_model.from_map(k))
|
|
19367
21335
|
return self
|
|
19368
21336
|
|
|
19369
21337
|
|
|
19370
|
-
class
|
|
21338
|
+
class StartOSSExportResponse(TeaModel):
|
|
19371
21339
|
def __init__(
|
|
19372
21340
|
self,
|
|
19373
21341
|
headers: Dict[str, str] = None,
|
|
@@ -19400,7 +21368,7 @@ class StartETLResponse(TeaModel):
|
|
|
19400
21368
|
return self
|
|
19401
21369
|
|
|
19402
21370
|
|
|
19403
|
-
class
|
|
21371
|
+
class StartOSSHDFSExportResponse(TeaModel):
|
|
19404
21372
|
def __init__(
|
|
19405
21373
|
self,
|
|
19406
21374
|
headers: Dict[str, str] = None,
|
|
@@ -19433,7 +21401,7 @@ class StartMaxComputeExportResponse(TeaModel):
|
|
|
19433
21401
|
return self
|
|
19434
21402
|
|
|
19435
21403
|
|
|
19436
|
-
class
|
|
21404
|
+
class StartOSSIngestionResponse(TeaModel):
|
|
19437
21405
|
def __init__(
|
|
19438
21406
|
self,
|
|
19439
21407
|
headers: Dict[str, str] = None,
|
|
@@ -19466,7 +21434,7 @@ class StartOSSExportResponse(TeaModel):
|
|
|
19466
21434
|
return self
|
|
19467
21435
|
|
|
19468
21436
|
|
|
19469
|
-
class
|
|
21437
|
+
class StopAzureBlobIngestionResponse(TeaModel):
|
|
19470
21438
|
def __init__(
|
|
19471
21439
|
self,
|
|
19472
21440
|
headers: Dict[str, str] = None,
|
|
@@ -19499,7 +21467,7 @@ class StartOSSHDFSExportResponse(TeaModel):
|
|
|
19499
21467
|
return self
|
|
19500
21468
|
|
|
19501
21469
|
|
|
19502
|
-
class
|
|
21470
|
+
class StopETLResponse(TeaModel):
|
|
19503
21471
|
def __init__(
|
|
19504
21472
|
self,
|
|
19505
21473
|
headers: Dict[str, str] = None,
|
|
@@ -19532,7 +21500,7 @@ class StartOSSIngestionResponse(TeaModel):
|
|
|
19532
21500
|
return self
|
|
19533
21501
|
|
|
19534
21502
|
|
|
19535
|
-
class
|
|
21503
|
+
class StopElasticsearchIngestionResponse(TeaModel):
|
|
19536
21504
|
def __init__(
|
|
19537
21505
|
self,
|
|
19538
21506
|
headers: Dict[str, str] = None,
|
|
@@ -20279,6 +22247,91 @@ class UpdateAnnotationLabelResponse(TeaModel):
|
|
|
20279
22247
|
return self
|
|
20280
22248
|
|
|
20281
22249
|
|
|
22250
|
+
class UpdateAzureBlobIngestionRequest(TeaModel):
|
|
22251
|
+
def __init__(
|
|
22252
|
+
self,
|
|
22253
|
+
configuration: AzureBlobIngestionConfiguration = None,
|
|
22254
|
+
description: str = None,
|
|
22255
|
+
display_name: str = None,
|
|
22256
|
+
schedule: Schedule = None,
|
|
22257
|
+
):
|
|
22258
|
+
# This parameter is required.
|
|
22259
|
+
self.configuration = configuration
|
|
22260
|
+
self.description = description
|
|
22261
|
+
# This parameter is required.
|
|
22262
|
+
self.display_name = display_name
|
|
22263
|
+
self.schedule = schedule
|
|
22264
|
+
|
|
22265
|
+
def validate(self):
|
|
22266
|
+
if self.configuration:
|
|
22267
|
+
self.configuration.validate()
|
|
22268
|
+
if self.schedule:
|
|
22269
|
+
self.schedule.validate()
|
|
22270
|
+
|
|
22271
|
+
def to_map(self):
|
|
22272
|
+
_map = super().to_map()
|
|
22273
|
+
if _map is not None:
|
|
22274
|
+
return _map
|
|
22275
|
+
|
|
22276
|
+
result = dict()
|
|
22277
|
+
if self.configuration is not None:
|
|
22278
|
+
result['configuration'] = self.configuration.to_map()
|
|
22279
|
+
if self.description is not None:
|
|
22280
|
+
result['description'] = self.description
|
|
22281
|
+
if self.display_name is not None:
|
|
22282
|
+
result['displayName'] = self.display_name
|
|
22283
|
+
if self.schedule is not None:
|
|
22284
|
+
result['schedule'] = self.schedule.to_map()
|
|
22285
|
+
return result
|
|
22286
|
+
|
|
22287
|
+
def from_map(self, m: dict = None):
|
|
22288
|
+
m = m or dict()
|
|
22289
|
+
if m.get('configuration') is not None:
|
|
22290
|
+
temp_model = AzureBlobIngestionConfiguration()
|
|
22291
|
+
self.configuration = temp_model.from_map(m['configuration'])
|
|
22292
|
+
if m.get('description') is not None:
|
|
22293
|
+
self.description = m.get('description')
|
|
22294
|
+
if m.get('displayName') is not None:
|
|
22295
|
+
self.display_name = m.get('displayName')
|
|
22296
|
+
if m.get('schedule') is not None:
|
|
22297
|
+
temp_model = Schedule()
|
|
22298
|
+
self.schedule = temp_model.from_map(m['schedule'])
|
|
22299
|
+
return self
|
|
22300
|
+
|
|
22301
|
+
|
|
22302
|
+
class UpdateAzureBlobIngestionResponse(TeaModel):
|
|
22303
|
+
def __init__(
|
|
22304
|
+
self,
|
|
22305
|
+
headers: Dict[str, str] = None,
|
|
22306
|
+
status_code: int = None,
|
|
22307
|
+
):
|
|
22308
|
+
self.headers = headers
|
|
22309
|
+
self.status_code = status_code
|
|
22310
|
+
|
|
22311
|
+
def validate(self):
|
|
22312
|
+
pass
|
|
22313
|
+
|
|
22314
|
+
def to_map(self):
|
|
22315
|
+
_map = super().to_map()
|
|
22316
|
+
if _map is not None:
|
|
22317
|
+
return _map
|
|
22318
|
+
|
|
22319
|
+
result = dict()
|
|
22320
|
+
if self.headers is not None:
|
|
22321
|
+
result['headers'] = self.headers
|
|
22322
|
+
if self.status_code is not None:
|
|
22323
|
+
result['statusCode'] = self.status_code
|
|
22324
|
+
return result
|
|
22325
|
+
|
|
22326
|
+
def from_map(self, m: dict = None):
|
|
22327
|
+
m = m or dict()
|
|
22328
|
+
if m.get('headers') is not None:
|
|
22329
|
+
self.headers = m.get('headers')
|
|
22330
|
+
if m.get('statusCode') is not None:
|
|
22331
|
+
self.status_code = m.get('statusCode')
|
|
22332
|
+
return self
|
|
22333
|
+
|
|
22334
|
+
|
|
20282
22335
|
class UpdateConfigRequest(TeaModel):
|
|
20283
22336
|
def __init__(
|
|
20284
22337
|
self,
|
|
@@ -20592,6 +22645,91 @@ class UpdateETLResponse(TeaModel):
|
|
|
20592
22645
|
return self
|
|
20593
22646
|
|
|
20594
22647
|
|
|
22648
|
+
class UpdateElasticsearchIngestionRequest(TeaModel):
|
|
22649
|
+
def __init__(
|
|
22650
|
+
self,
|
|
22651
|
+
configuration: ESIngestionConfiguration = None,
|
|
22652
|
+
description: str = None,
|
|
22653
|
+
display_name: str = None,
|
|
22654
|
+
schedule: Schedule = None,
|
|
22655
|
+
):
|
|
22656
|
+
# This parameter is required.
|
|
22657
|
+
self.configuration = configuration
|
|
22658
|
+
self.description = description
|
|
22659
|
+
# This parameter is required.
|
|
22660
|
+
self.display_name = display_name
|
|
22661
|
+
self.schedule = schedule
|
|
22662
|
+
|
|
22663
|
+
def validate(self):
|
|
22664
|
+
if self.configuration:
|
|
22665
|
+
self.configuration.validate()
|
|
22666
|
+
if self.schedule:
|
|
22667
|
+
self.schedule.validate()
|
|
22668
|
+
|
|
22669
|
+
def to_map(self):
|
|
22670
|
+
_map = super().to_map()
|
|
22671
|
+
if _map is not None:
|
|
22672
|
+
return _map
|
|
22673
|
+
|
|
22674
|
+
result = dict()
|
|
22675
|
+
if self.configuration is not None:
|
|
22676
|
+
result['configuration'] = self.configuration.to_map()
|
|
22677
|
+
if self.description is not None:
|
|
22678
|
+
result['description'] = self.description
|
|
22679
|
+
if self.display_name is not None:
|
|
22680
|
+
result['displayName'] = self.display_name
|
|
22681
|
+
if self.schedule is not None:
|
|
22682
|
+
result['schedule'] = self.schedule.to_map()
|
|
22683
|
+
return result
|
|
22684
|
+
|
|
22685
|
+
def from_map(self, m: dict = None):
|
|
22686
|
+
m = m or dict()
|
|
22687
|
+
if m.get('configuration') is not None:
|
|
22688
|
+
temp_model = ESIngestionConfiguration()
|
|
22689
|
+
self.configuration = temp_model.from_map(m['configuration'])
|
|
22690
|
+
if m.get('description') is not None:
|
|
22691
|
+
self.description = m.get('description')
|
|
22692
|
+
if m.get('displayName') is not None:
|
|
22693
|
+
self.display_name = m.get('displayName')
|
|
22694
|
+
if m.get('schedule') is not None:
|
|
22695
|
+
temp_model = Schedule()
|
|
22696
|
+
self.schedule = temp_model.from_map(m['schedule'])
|
|
22697
|
+
return self
|
|
22698
|
+
|
|
22699
|
+
|
|
22700
|
+
class UpdateElasticsearchIngestionResponse(TeaModel):
|
|
22701
|
+
def __init__(
|
|
22702
|
+
self,
|
|
22703
|
+
headers: Dict[str, str] = None,
|
|
22704
|
+
status_code: int = None,
|
|
22705
|
+
):
|
|
22706
|
+
self.headers = headers
|
|
22707
|
+
self.status_code = status_code
|
|
22708
|
+
|
|
22709
|
+
def validate(self):
|
|
22710
|
+
pass
|
|
22711
|
+
|
|
22712
|
+
def to_map(self):
|
|
22713
|
+
_map = super().to_map()
|
|
22714
|
+
if _map is not None:
|
|
22715
|
+
return _map
|
|
22716
|
+
|
|
22717
|
+
result = dict()
|
|
22718
|
+
if self.headers is not None:
|
|
22719
|
+
result['headers'] = self.headers
|
|
22720
|
+
if self.status_code is not None:
|
|
22721
|
+
result['statusCode'] = self.status_code
|
|
22722
|
+
return result
|
|
22723
|
+
|
|
22724
|
+
def from_map(self, m: dict = None):
|
|
22725
|
+
m = m or dict()
|
|
22726
|
+
if m.get('headers') is not None:
|
|
22727
|
+
self.headers = m.get('headers')
|
|
22728
|
+
if m.get('statusCode') is not None:
|
|
22729
|
+
self.status_code = m.get('statusCode')
|
|
22730
|
+
return self
|
|
22731
|
+
|
|
22732
|
+
|
|
20595
22733
|
class UpdateIndexRequest(TeaModel):
|
|
20596
22734
|
def __init__(
|
|
20597
22735
|
self,
|
|
@@ -20668,6 +22806,7 @@ class UpdateLogStoreRequest(TeaModel):
|
|
|
20668
22806
|
max_split_shard: int = None,
|
|
20669
22807
|
mode: str = None,
|
|
20670
22808
|
shard_count: int = None,
|
|
22809
|
+
sharding_policy: ShardingPolicy = None,
|
|
20671
22810
|
telemetry_type: str = None,
|
|
20672
22811
|
ttl: int = None,
|
|
20673
22812
|
):
|
|
@@ -20709,6 +22848,7 @@ class UpdateLogStoreRequest(TeaModel):
|
|
|
20709
22848
|
#
|
|
20710
22849
|
# > You cannot call the UpdateLogStore operation to change the number of shards. You can call the SplitShard or MergeShards operation to change the number of shards.
|
|
20711
22850
|
self.shard_count = shard_count
|
|
22851
|
+
self.sharding_policy = sharding_policy
|
|
20712
22852
|
# The type of the observable data. Valid values:
|
|
20713
22853
|
#
|
|
20714
22854
|
# * None (default): log data.
|
|
@@ -20722,6 +22862,8 @@ class UpdateLogStoreRequest(TeaModel):
|
|
|
20722
22862
|
def validate(self):
|
|
20723
22863
|
if self.encrypt_conf:
|
|
20724
22864
|
self.encrypt_conf.validate()
|
|
22865
|
+
if self.sharding_policy:
|
|
22866
|
+
self.sharding_policy.validate()
|
|
20725
22867
|
|
|
20726
22868
|
def to_map(self):
|
|
20727
22869
|
_map = super().to_map()
|
|
@@ -20749,6 +22891,8 @@ class UpdateLogStoreRequest(TeaModel):
|
|
|
20749
22891
|
result['mode'] = self.mode
|
|
20750
22892
|
if self.shard_count is not None:
|
|
20751
22893
|
result['shardCount'] = self.shard_count
|
|
22894
|
+
if self.sharding_policy is not None:
|
|
22895
|
+
result['shardingPolicy'] = self.sharding_policy.to_map()
|
|
20752
22896
|
if self.telemetry_type is not None:
|
|
20753
22897
|
result['telemetryType'] = self.telemetry_type
|
|
20754
22898
|
if self.ttl is not None:
|
|
@@ -20778,6 +22922,9 @@ class UpdateLogStoreRequest(TeaModel):
|
|
|
20778
22922
|
self.mode = m.get('mode')
|
|
20779
22923
|
if m.get('shardCount') is not None:
|
|
20780
22924
|
self.shard_count = m.get('shardCount')
|
|
22925
|
+
if m.get('shardingPolicy') is not None:
|
|
22926
|
+
temp_model = ShardingPolicy()
|
|
22927
|
+
self.sharding_policy = temp_model.from_map(m['shardingPolicy'])
|
|
20781
22928
|
if m.get('telemetryType') is not None:
|
|
20782
22929
|
self.telemetry_type = m.get('telemetryType')
|
|
20783
22930
|
if m.get('ttl') is not None:
|
|
@@ -21621,13 +23768,16 @@ class UpdateMaxComputeExportResponse(TeaModel):
|
|
|
21621
23768
|
class UpdateMetricStoreRequest(TeaModel):
|
|
21622
23769
|
def __init__(
|
|
21623
23770
|
self,
|
|
23771
|
+
append_meta: bool = None,
|
|
21624
23772
|
auto_split: bool = None,
|
|
21625
23773
|
hot_ttl: int = None,
|
|
21626
23774
|
infrequent_access_ttl: int = None,
|
|
21627
23775
|
max_split_shard: int = None,
|
|
21628
23776
|
mode: str = None,
|
|
23777
|
+
sharding_policy: ShardingPolicy = None,
|
|
21629
23778
|
ttl: int = None,
|
|
21630
23779
|
):
|
|
23780
|
+
self.append_meta = append_meta
|
|
21631
23781
|
# Specifies whether to enable automatic sharding.
|
|
21632
23782
|
self.auto_split = auto_split
|
|
21633
23783
|
self.hot_ttl = hot_ttl
|
|
@@ -21636,11 +23786,13 @@ class UpdateMetricStoreRequest(TeaModel):
|
|
|
21636
23786
|
self.max_split_shard = max_split_shard
|
|
21637
23787
|
# The type of the Metricstore.
|
|
21638
23788
|
self.mode = mode
|
|
23789
|
+
self.sharding_policy = sharding_policy
|
|
21639
23790
|
# The retention period of the metric data. Unit: days.
|
|
21640
23791
|
self.ttl = ttl
|
|
21641
23792
|
|
|
21642
23793
|
def validate(self):
|
|
21643
|
-
|
|
23794
|
+
if self.sharding_policy:
|
|
23795
|
+
self.sharding_policy.validate()
|
|
21644
23796
|
|
|
21645
23797
|
def to_map(self):
|
|
21646
23798
|
_map = super().to_map()
|
|
@@ -21648,6 +23800,8 @@ class UpdateMetricStoreRequest(TeaModel):
|
|
|
21648
23800
|
return _map
|
|
21649
23801
|
|
|
21650
23802
|
result = dict()
|
|
23803
|
+
if self.append_meta is not None:
|
|
23804
|
+
result['appendMeta'] = self.append_meta
|
|
21651
23805
|
if self.auto_split is not None:
|
|
21652
23806
|
result['autoSplit'] = self.auto_split
|
|
21653
23807
|
if self.hot_ttl is not None:
|
|
@@ -21658,12 +23812,16 @@ class UpdateMetricStoreRequest(TeaModel):
|
|
|
21658
23812
|
result['maxSplitShard'] = self.max_split_shard
|
|
21659
23813
|
if self.mode is not None:
|
|
21660
23814
|
result['mode'] = self.mode
|
|
23815
|
+
if self.sharding_policy is not None:
|
|
23816
|
+
result['shardingPolicy'] = self.sharding_policy.to_map()
|
|
21661
23817
|
if self.ttl is not None:
|
|
21662
23818
|
result['ttl'] = self.ttl
|
|
21663
23819
|
return result
|
|
21664
23820
|
|
|
21665
23821
|
def from_map(self, m: dict = None):
|
|
21666
23822
|
m = m or dict()
|
|
23823
|
+
if m.get('appendMeta') is not None:
|
|
23824
|
+
self.append_meta = m.get('appendMeta')
|
|
21667
23825
|
if m.get('autoSplit') is not None:
|
|
21668
23826
|
self.auto_split = m.get('autoSplit')
|
|
21669
23827
|
if m.get('hot_ttl') is not None:
|
|
@@ -21674,6 +23832,9 @@ class UpdateMetricStoreRequest(TeaModel):
|
|
|
21674
23832
|
self.max_split_shard = m.get('maxSplitShard')
|
|
21675
23833
|
if m.get('mode') is not None:
|
|
21676
23834
|
self.mode = m.get('mode')
|
|
23835
|
+
if m.get('shardingPolicy') is not None:
|
|
23836
|
+
temp_model = ShardingPolicy()
|
|
23837
|
+
self.sharding_policy = temp_model.from_map(m['shardingPolicy'])
|
|
21677
23838
|
if m.get('ttl') is not None:
|
|
21678
23839
|
self.ttl = m.get('ttl')
|
|
21679
23840
|
return self
|